Merge ~dannf/canonical-server-hwe-utils:redirect-to-pe-repo into canonical-server-hwe-utils:main

Proposed by dann frazier
Status: Needs review
Proposed branch: ~dannf/canonical-server-hwe-utils:redirect-to-pe-repo
Merge into: canonical-server-hwe-utils:main
Diff against target: 3165 lines (+2/-145)
2 files modified
README.md (+2/-31)
dev/null (+0/-114)
Reviewer Review Type Date Requested Status
CE Hyperscale Pending
Review via email: mp+459263@code.launchpad.net
To post a comment you must log in.
Revision history for this message
Taihsiang Ho (tai271828) wrote :

I did not manage to see the updated README to point to the new repository. Is there a typo or something?

Revision history for this message
dann frazier (dannf) wrote :

Nice attention to detail @Tai!

Now added.

Unmerged commits

f8ab178... by dann frazier

Clear contents and update README.md to point to new repository

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1diff --git a/README.md b/README.md
2index 9291131..b24b096 100644
3--- a/README.md
4+++ b/README.md
5@@ -1,32 +1,3 @@
6-canonical-server-hwe-utils
7-==========================
8+# This repository has moved #
9
10-This package brings a few utilities used internally by the Canonical HWE
11-Team; most of them are used directly from the repository working copy, but
12-a few can also be installed in the user's home directory or in a virtual
13-environment, so they are always available in the current path. It will also
14-install the required dependencies automatically.
15-
16-
17-
18-Installation
19-------------
20-
21-To install, run a `pip install .` from the repository root, optionally
22-using an already created and activated virtual environment. After that, the
23-following scripts will be installed in the corresponding `bin` directory:
24-
25-- labkey: A script tool to manipulate machines in the lab.
26-- weekly-rota-init.py: scans a Jenkins server looking for tests which most
27- recent run failed and creates Jira (sub-)tasks accordingly.
28-- sbsh: Obtain a jenkins user shell within a ScaleBot deployment.
29-
30-
31-
32-
33-Upgrade and uninstall
34----------------------
35-
36-To upgrade an already installed copy of the scripts, run `pip install -U .`
37-from the repository root; to uninstall the scripts, run
38-`pip uninstall canonical-server-hwe-utils`.
39+Now located at: [lp:canonical-partner-eng-utils](https://code.launchpad.net/~canonical-partner-eng/canonical-partner-eng-utils/+git/canonical-partner-eng-utils)
40diff --git a/installer-test-matrix/config.yaml b/installer-test-matrix/config.yaml
41deleted file mode 100644
42index af38628..0000000
43--- a/installer-test-matrix/config.yaml
44+++ /dev/null
45@@ -1,33 +0,0 @@
46-releases:
47- # The HWE option needs to be added at the 2nd point release
48- jammy:
49- boot-options: [ GA, HWE ]
50- impish:
51- boot-options: [ GA ]
52- hirsute:
53- boot-options: [ GA ]
54- focal:
55- boot-options: [ GA, HWE ]
56- bionic:
57- boot-options: [ GA, HWE ]
58-
59-# 'coverage: full' is intended for platforms that are covered by an
60-# active maintenance engagement. 'coverage: spot' is for certified
61-# platforms that are no longer covered by an engagement.
62-platforms:
63- crb1s:
64- coverage: spot
65- crb2s:
66- coverage: spot
67- d05:
68- coverage: full
69- mtjade:
70- coverage: full
71- unsupported:
72- releases:
73- bionic:
74- boot-options: GA
75- saber:
76- coverage: spot
77- taishan2280v2:
78- coverage: full
79diff --git a/installer-test-matrix/generate-matrix.py b/installer-test-matrix/generate-matrix.py
80deleted file mode 100755
81index 8c1f161..0000000
82--- a/installer-test-matrix/generate-matrix.py
83+++ /dev/null
84@@ -1,76 +0,0 @@
85-#!/usr/bin/env python3
86-
87-import argparse
88-import random
89-import sys
90-import yaml
91-
92-
93-def is_supported(platform_config, release, boot_option):
94- if "unsupported" not in platform_config.keys():
95- return True
96- if "releases" not in platform_config["unsupported"]:
97- return True
98- if release not in platform_config["unsupported"]["releases"]:
99- return True
100- if "boot-options" not in platform_config["unsupported"]["releases"][release].keys():
101- return False
102- if (
103- boot_option
104- in platform_config["unsupported"]["releases"][release]["boot-options"]
105- ):
106- return False
107- return True
108-
109-
110-if __name__ == "__main__":
111- desc = """
112-Generates a random test matrix of platform/kernel/boot method combinations,
113-per a policy supplied in a configuration file.
114- """
115- parser = argparse.ArgumentParser(description=desc)
116- parser.add_argument("--config", "-c", default="./config.yaml")
117- parser.add_argument("--release", "-r", required=True)
118- args = parser.parse_args()
119-
120- with open(args.config, "r") as f:
121- y = yaml.safe_load(f)
122- release = y["releases"][args.release]
123-
124- # Create shuffled lists of the various install
125- # config variables. We'll round-robin through them.
126- boot_options = release["boot-options"][:]
127- random.shuffle(boot_options)
128- media_options = ["iso", "pxe"]
129- random.shuffle(media_options)
130- cd_hdd = ["cd", "usb-hdd"]
131- random.shuffle(cd_hdd)
132-
133- for platform in y["platforms"]:
134- p = y["platforms"][platform]
135- done = False
136- for m in range(len(media_options)):
137- if done:
138- break
139- # pop/append pattern creates a circular queue
140- media_type = media_options.pop(0)
141- media_options.append(media_type)
142- for b in range(len(boot_options)):
143- boot_type = boot_options.pop(0)
144- boot_options.append(boot_type)
145- if not is_supported(p, args.release, boot_type):
146- continue
147- # Alternate between booting the ISO as a CD
148- # and as a USB stick. Both create a nearly identical
149- # install environment, so we don't consider them
150- # separate media types.
151- if media_type == "iso":
152- media_string = cd_hdd.pop(0)
153- cd_hdd.append(media_string)
154- else:
155- media_string = media_type
156- sys.stdout.write("%s %s %s\n" % (platform, media_string, boot_type))
157- if p["coverage"] == "spot":
158- # We only need 1 case for spot testing
159- done = True
160- break
161diff --git a/jenkins-scripts/sbsh b/jenkins-scripts/sbsh
162deleted file mode 100755
163index 5a13ad9..0000000
164--- a/jenkins-scripts/sbsh
165+++ /dev/null
166@@ -1,55 +0,0 @@
167-#!/usr/bin/env python3
168-
169-import argparse
170-import subprocess
171-import sys
172-import yaml
173-
174-LabHosts = {"1ss": "10.229.0.101", "tremont": "10.228.0.2"}
175-
176-if __name__ == "__main__":
177- try:
178- idx = sys.argv.index("--")
179- argv = sys.argv[1:idx]
180- cmd = sys.argv[idx + 1 :]
181- except ValueError:
182- argv = sys.argv[1:]
183- cmd = []
184-
185- parser = argparse.ArgumentParser(
186- description="Obtain a jenkins user shell within a ScaleBot deployment"
187- )
188- parser.add_argument("-l", "--lab", choices=LabHosts.keys(), required=True)
189- args = parser.parse_args(args=argv)
190-
191- juju_proc = subprocess.run(
192- [
193- "ssh",
194- "-t",
195- f"ubuntu@{LabHosts[args.lab]}",
196- "juju",
197- "status",
198- "--format=yaml",
199- ],
200- check=True,
201- capture_output=True,
202- )
203-
204- status = yaml.safe_load(juju_proc.stdout)
205- unit_name = list(status["applications"]["jenkins"]["units"].keys())[0]
206- juju_cmd = [
207- "ssh",
208- "-t",
209- f"ubuntu@{LabHosts[args.lab]}",
210- "juju",
211- "ssh",
212- unit_name,
213- "sudo",
214- "su",
215- "-",
216- "jenkins",
217- ]
218- if cmd:
219- juju_cmd.extend(["-c", f"\"'{' '.join(cmd)}'\""])
220-
221- subprocess.run(juju_cmd)
222diff --git a/jenkins-scripts/weekly-rota-init.py b/jenkins-scripts/weekly-rota-init.py
223deleted file mode 100755
224index 7c2d663..0000000
225--- a/jenkins-scripts/weekly-rota-init.py
226+++ /dev/null
227@@ -1,294 +0,0 @@
228-#!/usr/bin/env python3
229-
230-#
231-# This script scans a Jenkins server instance for jobs that did not pass in
232-# their most recent runs. It then generates a primary Jira task for that
233-# lab/week, and attaches to it a set of sub-tasks (one per unsuccessful test)
234-# that can be used for tracking the analysis of those potential failures. The
235-# created jobs are automatically assigned to the owner of the API token used to
236-# authenticate to Jira.
237-#
238-# Jira authentication requires an API user field and an API token. This
239-# script expects to find them in the JIRA_API_USER and JIRA_API_TOKEN
240-# environment variables, respectively.
241-#
242-# The Jenkins endpoint and credentials are supplied via command line options.
243-#
244-
245-import argparse
246-import datetime
247-import jenkins
248-import logging
249-import os
250-import re
251-import requests
252-import tempfile
253-from jira import JIRA
254-
255-JIRA_URL = "https://warthogs.atlassian.net"
256-# The lane to which new cards will be moved
257-JIRA_INITIAL_TRANSITION = "In Progress"
258-
259-Subnet2LabNameMap = {
260- "10.228": "Tremont",
261- "10.229": "Needham",
262-}
263-
264-
265-def jenkins_url_to_lab_name(url):
266- r = re.compile(r"http://(?P<subnet>[0-9]+\.[0-9]+)\.")
267- m = r.match(url)
268- try:
269- return Subnet2LabNameMap[m.group("subnet")]
270- except IndexError:
271- return "Unknown Lab Subnet"
272-
273-
274-if __name__ == "__main__":
275- desc = """
276- Parse a Jenkins Atom feed and create Jira cards for
277- any unexpected failures.
278- """
279- logging.basicConfig(level=logging.INFO)
280- parser = argparse.ArgumentParser(description=desc)
281- parser.add_argument(
282- "-j",
283- "--jenkins-url",
284- help="Jenkins URL, e.g. http://localhost:8080",
285- required=True,
286- dest="jenkins_url",
287- )
288- parser.add_argument(
289- "-u",
290- "--jenkins-user",
291- help="Jenkins username",
292- required=True,
293- dest="jenkins_user",
294- )
295- parser.add_argument(
296- "-p",
297- "--jenkins-password",
298- help="Jenkins password",
299- required=True,
300- dest="jenkins_password",
301- )
302- parser.add_argument(
303- "-k",
304- "--jira-project-key",
305- help="JIRA project key to use. This argument is useful when you need to test the script result in JIRA without messing up the default project. (Default: SHENG)",
306- required=False,
307- default="SHENG",
308- dest="jira_project_key",
309- )
310- parser.add_argument(
311- "--dry-run",
312- help="Don't make any updates to Jira, just report what it would do.",
313- default=False,
314- action="store_true",
315- )
316- args = parser.parse_args()
317-
318- labname = jenkins_url_to_lab_name(args.jenkins_url)
319- jenkins_server = jenkins.Jenkins(
320- args.jenkins_url,
321- username=args.jenkins_user,
322- password=args.jenkins_password,
323- )
324-
325- jira_project_key = args.jira_project_key
326-
327- today = datetime.date.today()
328- monday = today - datetime.timedelta(days=today.weekday())
329- weeknum = monday.isocalendar()[1]
330- prefix = (
331- f"[{monday.year}W{weeknum:02}][{monday.year}-{monday.month:02}-{monday.day:02}]"
332- )
333- title = f"Review weekend {labname} lab tests"
334-
335- JIRA_API_USER = os.environ["JIRA_API_USER"]
336- JIRA_API_TOKEN = os.environ["JIRA_API_TOKEN"]
337-
338- jira = JIRA(
339- options={
340- "server": JIRA_URL,
341- },
342- basic_auth=(JIRA_API_USER, JIRA_API_TOKEN),
343- )
344-
345- summary = f"{prefix} {title}"
346- description = "||Job Name||Last Run||Status||Cause By||\n"
347- if args.dry_run:
348- logging.info(f'[dry-run] Creating task for "{summary}"')
349- else:
350- primary = jira.create_issue(
351- fields={
352- "summary": summary,
353- "project": {
354- "key": jira_project_key,
355- },
356- "issuetype": {
357- "name": "Task",
358- "subtask": False,
359- },
360- "components": [
361- {
362- "name": "Scalebot Rota Review",
363- },
364- ],
365- "assignee": {"id": jira.myself()["accountId"]},
366- }
367- )
368- # Adding a link to the Launchpad bug into the JIRA entry
369- link = {
370- "url": "https://docs.google.com/document/d/1ui1sHSSNPaqV9K1p9IQrHS0Qrb0TUTcSKU5hHMEQoQ8/edit",
371- "title": "Instructions",
372- }
373- jira.add_simple_link(primary, object=link)
374- if args.dry_run:
375- logging.info(
376- f'[dry-run] Transitioning "{summary}" to {JIRA_INITIAL_TRANSITION}'
377- )
378- else:
379- jira.transition_issue(primary, JIRA_INITIAL_TRANSITION)
380- logging.info(f"Primary issue: {primary.permalink()}")
381-
382- for job in jenkins_server.get_jobs():
383- last_build = jenkins_server.get_job_info(job["name"])["lastCompletedBuild"]
384- last_successful_build = jenkins_server.get_job_info(job["name"])[
385- "lastSuccessfulBuild"
386- ]
387- last_failed_build = jenkins_server.get_job_info(job["name"])["lastFailedBuild"]
388- if last_build is None:
389- logging.info(
390- f"{job['name']} has never been run. Is this jenkins project just created?"
391- )
392- continue
393- if last_successful_build is None:
394- last_successful_build_number = -1
395- else:
396- last_successful_build_number = last_successful_build["number"]
397- if last_failed_build is None:
398- last_failed_build_number = -1
399- else:
400- last_failed_build_number = last_failed_build["number"]
401-
402- if last_successful_build_number > last_failed_build_number:
403- last_build_number = last_successful_build_number
404- else:
405- last_build_number = last_failed_build_number
406-
407- logging.info(
408- f"{job['name']} Last build: #{last_build['number']} Last success: #{last_successful_build_number} Last failed: #{last_failed_build_number}"
409- )
410-
411- if last_build_number == -1:
412- logging.info(f"{job['name']} has never been run successfully, skipping")
413- continue
414-
415- build_info = jenkins_server.get_build_info(job["name"], last_build_number)
416- result = build_info["result"]
417- timestamp = datetime.datetime.fromtimestamp(
418- int(build_info["timestamp"] / 1000)
419- ).strftime("%Y-%m-%d")
420- url = build_info["url"]
421- logging.info(
422- f"{job['name']} #{last_build_number} status is {result} time: {build_info['timestamp']}"
423- )
424- artifacts = build_info["artifacts"]
425-
426- # Save all job's status to the task's description
427- stale = ""
428- if (
429- datetime.datetime.now().timestamp() - build_info["timestamp"] / 1000
430- > 14 * 24 * 60 * 60
431- ):
432- stale = "STALE!"
433- result_color = "black"
434- if result in ["FAILURE"]:
435- result_color = "red"
436-
437- description += f"|{job['name']}|[{timestamp}|{url}] {stale}|{{color:{result_color}}}{result}{{color}}| |\n"
438-
439- if result in ["SUCCESS"]:
440- logging.info(f"{job['name']} status is {result}, skipping")
441- continue
442-
443- # We only want to create one task per job failure. To ensure
444- # that, we need a way to see if a Jira card for this failed job
445- # already exists. Let's define a summary string that uniquely
446- # identifies the job.
447- summary = f"{job['name']} #{last_build_number} {timestamp}"
448- # Check to see if a task already exists for this card. This can
449- # happen for jobs that only run every 2 weeks - there's no need
450- # to investigate a failure twice.
451- duplicates = jira.search_issues(
452- f'project = {jira_project_key} AND summary ~ "{summary}"'
453- )
454- if len(duplicates) > 0:
455- logging.info(f'"{summary}" matches the following existing tasks:')
456- for dup in duplicates:
457- logging.info(f" {dup.permalink()}")
458- answer = None
459- while answer not in ["n", "no", "y", "yes"]:
460- answer = input("Create anyway? ")
461- if answer in ["n", "no"]:
462- continue
463- if args.dry_run:
464- logging.info(f'[dry-run] Creating subtask for "{summary}"')
465- else:
466- subtask = jira.create_issue(
467- fields={
468- "summary": summary,
469- "description": f"{url}",
470- "project": {
471- "key": jira_project_key,
472- },
473- "issuetype": {
474- "name": "Sub-task",
475- },
476- "parent": {
477- "key": primary.key,
478- },
479- "components": [
480- {
481- "name": "Scalebot Rota Review",
482- },
483- ],
484- "assignee": {"id": jira.myself()["accountId"]},
485- }
486- )
487- response = requests.get(
488- f"{url}consoleText",
489- stream=True,
490- auth=(args.jenkins_user, args.jenkins_password),
491- )
492- with tempfile.TemporaryFile() as fp:
493- for chunk in response.iter_content(chunk_size=10 * 1024 * 1024):
494- fp.write(chunk)
495- jira.add_attachment(subtask, fp, "Jenkins_console")
496-
497- for artifact in artifacts:
498- logging.info(f"{url}artifact/{artifact['relativePath']}")
499- response = requests.get(
500- f"{url}artifact/{artifact['relativePath']}",
501- stream=True,
502- auth=(args.jenkins_user, args.jenkins_password),
503- )
504- with tempfile.TemporaryFile() as fp:
505- for chunk in response.iter_content(chunk_size=10 * 1024 * 1024):
506- fp.write(chunk)
507- jira.add_attachment(subtask, fp, artifact["relativePath"])
508-
509- if args.dry_run:
510- logging.info(
511- f'[dry-run] Transitioning "{summary}" task to {JIRA_INITIAL_TRANSITION}'
512- )
513- else:
514- jira.transition_issue(subtask, JIRA_INITIAL_TRANSITION)
515- logging.info(f"{job['name']} is {result}: {subtask.permalink()}")
516-
517- if args.dry_run:
518- logging.info(f"[dry-run] Update description {description}")
519- else:
520- desc_value = {"description": description}
521- primary.update(fields=desc_value)
522diff --git a/labkey b/labkey
523deleted file mode 100755
524index ea4e707..0000000
525--- a/labkey
526+++ /dev/null
527@@ -1,419 +0,0 @@
528-#!/usr/bin/env python3
529-"""
530-A script tool to manipulate machines in the lab.
531-
532-Requirements:
533- - python 3.6 or above (if you want to launch pre-commit hook, you need 3.8)
534- - pyyml python package (via pip)
535- - ipmi tools (for ubuntu users it could be installed via apt)
536-"""
537-
538-import argparse
539-import os
540-import os.path
541-import shutil
542-import subprocess
543-import sys
544-import yaml
545-
546-ProgName = "labkey"
547-Config = os.path.join(os.environ["HOME"], ".config", ProgName)
548-ScaleBotLocal = os.path.join(Config, "scalebot-repo")
549-
550-
551-def handle_default_branch_rename():
552- """
553- We've renamed the default branch from "master" to "main". Migrate
554- the local clone if necessary.
555- """
556- local_branch = (
557- subprocess.check_output(["git", "branch", "--show-current"])
558- .decode("UTF-8")
559- .split()[0]
560- )
561- if local_branch == "main":
562- return
563- # Unknown state; bail to avoid doing any damage
564- assert local_branch == "master"
565- sys.stderr.write('Migrating local default branch name to "main"...\n')
566- subprocess.check_call(["git", "branch", "-m", "master", "main"])
567- subprocess.check_call(["git", "fetch"])
568- subprocess.check_call(["git", "branch", "--unset-upstream"])
569- subprocess.check_call(["git", "branch", "-u", "origin/main"])
570- subprocess.check_call(
571- ["git", "symbolic-ref", "refs/remotes/origin/HEAD", "refs/remotes/origin/main"]
572- )
573-
574-
575-def update(lpid, force):
576- if lpid:
577- user = "%s@" % (lpid)
578- else:
579- user = ""
580- if force and os.path.exists(ScaleBotLocal):
581- shutil.rmtree(ScaleBotLocal)
582-
583- ScaleBotRemote = "git+ssh://%sgit.launchpad.net/scalebot" % (user)
584- umaskSav = os.umask(int("077", 8))
585- if os.path.exists(ScaleBotLocal):
586- os.chdir(ScaleBotLocal)
587- handle_default_branch_rename()
588- subprocess.run(["git", "pull"], check=True)
589- else:
590- subprocess.run(["git", "clone", ScaleBotRemote, ScaleBotLocal], check=True)
591- os.umask(umaskSav)
592-
593-
594-def loadLabs():
595- config = []
596- labroot = os.path.join(ScaleBotLocal, "labs")
597- for labdir in os.scandir(labroot):
598- if not labdir.is_dir():
599- continue
600- labcfg = {}
601- interesting_yaml = ["lab", "machines", "clouds"]
602- for f in os.scandir(labdir):
603- if f.name[-5:] != ".yaml":
604- continue
605- if f.name[:-5] not in interesting_yaml:
606- continue
607- with open(f.path, "r") as stream:
608- y = yaml.safe_load(stream)
609- labcfg[f.name[:-5]] = y
610- if "lab" not in labcfg.keys():
611- continue
612- config.append(labcfg)
613- return config
614-
615-
616-class YAMLCache:
617- """
618- Save/load an object from a YAML file
619- """
620-
621- def __init__(self, path):
622- """
623- path: file to save/load from
624- """
625- self.path = path
626-
627- def load(self):
628- try:
629- with open(self.path, "r") as stream:
630- return yaml.safe_load(stream)
631- except FileNotFoundError:
632- return {}
633-
634- def save(self, data):
635- oldumask = os.umask(0o077)
636- tmppath = "%s.tmp" % (self.path)
637- with open(tmppath, "w") as tmpfile:
638- yaml.dump(data, tmpfile)
639- os.fsync(tmpfile)
640- os.rename(tmppath, self.path)
641- os.umask(oldumask)
642-
643-
644-class LabMAAS:
645- MAASCreds = os.path.join(Config, "mass-credentials.yaml")
646- MAASHostCache = os.path.join(Config, "maas-host-cache.yaml")
647-
648- def __init__(self, endpoint):
649- self.endpoint = endpoint
650- self.client = self._get_client()
651-
652- def api_key_prompt(self):
653- """
654- Prompt the user for a MAAS API key and save it for next time
655- """
656- key = input("Enter a MAAS API key for %s: " % (self.endpoint))
657- cache = YAMLCache(self.MAASCreds)
658- y = cache.load()
659- y[self.endpoint] = key
660- cache.save(y)
661- return key
662-
663- def get_api_key(self):
664- """
665- Lookup the user's API key for endpoint, fallback to prompting
666- """
667- try:
668- with open(self.MAASCreds, "r") as stream:
669- y = yaml.safe_load(stream)
670- key = y[self.endpoint]
671- except (FileNotFoundError, KeyError):
672- key = self.api_key_prompt()
673- return key
674-
675- def get_machine_by_hostname(self, hostname):
676- """
677- Return a MAAS machine object for a given hostname
678- """
679- import maas.client
680-
681- machine = None
682- cache = YAMLCache(self.MAASHostCache)
683- y = cache.load()
684- try:
685- maas_id = y[self.endpoint][hostname]
686- machine = self.client.machines.get(system_id=maas_id)
687- except (maas.client.bones.CallError, KeyError):
688- for m in self.client.machines.list(hostnames=[hostname]):
689- if m.hostname == hostname:
690- machine = m
691- if self.endpoint not in y.keys():
692- y[self.endpoint] = {}
693- y[self.endpoint][hostname] = m.system_id
694- cache.save(y)
695- return machine
696-
697- def _get_client(self):
698- """
699- Return a MAAS client object for the MAAS endpoint where our node
700- resides.
701- """
702- import maas.client
703-
704- while True:
705- try:
706- apikey = self.get_api_key()
707- client = maas.client.connect(self.endpoint, apikey=apikey)
708- # Test the client. This will raise maas.client.bones.CallError
709- # if authentication fails
710- client.users.whoami()
711- return client
712- except (maas.client.bones.CallError, AttributeError, ValueError):
713- sys.stderr.write("Unable to authenticate to %s\n" % (self.endpoint))
714- self.api_key_prompt()
715-
716-
717-class Machine:
718- def __init__(self, name):
719- self.name = name
720- labs = loadLabs()
721- for lab in labs:
722- if lab["machines"] is None:
723- continue
724- if name in lab["machines"].keys():
725- self.data = lab["machines"][name]
726- self.labdata = lab["lab"]
727- try:
728- self.clouddata = lab["clouds"]
729- except KeyError:
730- # clouds.yaml is only needed for subcommands that
731- # use MAAS. Those commands will fail later.
732- pass
733- return
734- raise KeyError("Machine not found")
735-
736- def __repr__(self):
737- return yaml.dump(self.data, default_flow_style=False)
738-
739- def _do_ipmitool(self, cmd):
740- bmc = self.data["bmc"]
741- if bmc["type"] != "ipmi":
742- raise KeyError("Machine has unknown power type")
743- subprocess.run(
744- [
745- "ipmitool",
746- "-I",
747- "lanplus",
748- "-H",
749- bmc["address"],
750- "-U",
751- bmc["user"],
752- "-P",
753- bmc["password"],
754- ]
755- + cmd,
756- check=True,
757- )
758-
759- def mc_reset(self):
760- cmd = ["mc", "reset", "cold"]
761- self._do_ipmitool(cmd)
762-
763- def sel_clear(self):
764- cmd = ["sel", "clear"]
765- self._do_ipmitool(cmd)
766-
767- def power(self, state):
768- cmd = ["chassis", "power", state]
769- self._do_ipmitool(cmd)
770-
771- def open_console(self, force):
772- """
773- Open an interactive console session to the machine.
774- Currently only supports conserver.
775- """
776- # We currently assume that any lab that provides a conserver
777- # will have all machines in that lab connected to the conserver.
778- # Theory is that if labkey can connect to a console, so could
779- # a conserver, and that's the better layer. However, there are
780- # situations where this may not be true. For example, the Moonshot
781- # chassis does not support an active console connection to every
782- # server at the same time like conserver would do, so labkey may
783- # want to support connecting to those systems directly. In which
784- # case, we may need to introduce a per-machine console-type field.
785- if self.labdata and "conserver" in self.labdata.keys():
786- if shutil.which("console") is None:
787- raise OSError(
788- "'console' command not found. Try 'sudo apt install conserver-client'"
789- )
790- concfg = self.labdata["conserver"]
791- subprocess.run(
792- [
793- "console",
794- "-M%s" % (concfg["master"]),
795- "-p%s" % (concfg["port"]),
796- self.name,
797- ],
798- stdin=sys.stdin,
799- stdout=sys.stdout,
800- stderr=sys.stderr,
801- )
802- return
803- if "bmc" in self.data.keys() and self.data["bmc"]["type"] == "ipmi":
804- if shutil.which("ipmitool") is None:
805- raise OSError(
806- "'ipmitool' command not found. Try 'sudo apt install ipmitool'"
807- )
808- if force:
809- self._do_ipmitool(["sol", "deactivate"])
810- self._do_ipmitool(["sol", "activate"])
811- return
812-
813- raise Exception("No valid console definition for %s" % (self.name))
814-
815- def get_cloud_endpoint(self):
816- if self.labdata is None:
817- raise KeyError("No lab.yaml associated with %s" % (self.name))
818- if not hasattr(self, "clouddata"):
819- raise KeyError("No clouds.yaml associated with %s" % (self.name))
820-
821- if self.clouddata["clouds"]["scalebot"]["type"] != "maas":
822- raise KeyError("Cloud is not of type MAAS")
823- endpoint = self.clouddata["clouds"]["scalebot"]["endpoint"]
824- return endpoint
825-
826- def ssh(self):
827- if "maas-name" in self.data.keys():
828- hostname = self.data["maas-name"]
829- else:
830- hostname = self.name
831-
832- labmaas = LabMAAS(self.get_cloud_endpoint())
833- machine = labmaas.get_machine_by_hostname(hostname)
834- if machine is None:
835- raise KeyError("MAAS Server has no machine with hostname %s" % (hostname))
836- # We use the first IP address we can ping. This is just a heuristic.
837- # It is possible that the node has an IP we can not reach that happens
838- # to match an IP on another host that we can reach, say if we've
839- # picked the same RFC 1918 subnets. In which case, we could connect
840- # to the wrong system.
841- ip = None
842- for address in machine.ip_addresses:
843- try:
844- subprocess.check_call(
845- ["ping", "-c", "1", address],
846- stdout=subprocess.DEVNULL,
847- stderr=subprocess.DEVNULL,
848- )
849- ip = address
850- except subprocess.CalledProcessError:
851- print("Could not ping %s" % (address))
852- continue
853- if ip is None:
854- raise KeyError(
855- "Unable to determine IP address for %s from MAAS" % (hostname)
856- )
857- subprocess.run(["ssh", "-l", "ubuntu", ip])
858-
859-
860-def show_owners(lab):
861- """Print owner-* tags for all machines in the given lab."""
862- lablist = loadLabs()
863- labs = {a["lab"]["name"]: a["machines"] for a in lablist}
864- if lab not in labs.keys():
865- print("Lab %s not found. Valid ones are: %s." % (lab, ", ".join(labs.keys())))
866- return
867- for labhost, machine in labs[lab].items():
868- if "maas-tags" in machine:
869- owners = [tag for tag in machine["maas-tags"] if tag.startswith("owner-")]
870- else:
871- owners = []
872- print("%s: %s" % (labhost, ", ".join(owners)))
873-
874-
875-if __name__ == "__main__":
876- power_cmds = ["on", "off", "cycle", "mc-reset", "reset", "status"]
877- machine_cmds = power_cmds + ["console", "sel-clear", "show", "ssh"]
878- lab_cmds = ["show-owners"]
879- cmd_help = {
880- "show": "Show machine configuration",
881- "on": "Power machine on",
882- "off": "Power machine off",
883- "cycle": "Power cycle machine",
884- "mc-reset": "Perform cold reset of BMC",
885- "sel-clear": "Clear system event log",
886- "reset": "Hard reset machine",
887- "console": "Open a console session",
888- "ssh": "ssh to machine",
889- "status": "Show machine power status",
890- "update": "Update cached machine information",
891- "show-owners": "Print owner-* tags for all machines in a lab",
892- }
893-
894- d = "Do things with machines in ScaleBot labs"
895- parser = argparse.ArgumentParser(prog=ProgName, description=d)
896- subparsers = parser.add_subparsers(help="sub-command help")
897- update_parser = subparsers.add_parser("update", help=cmd_help["update"])
898- update_parser.add_argument(
899- "-u", dest="lpid", metavar="LPID", required=False, help="Launchpad ID"
900- )
901- update_parser.add_argument(
902- "-f",
903- dest="force",
904- action="store_true",
905- required=False,
906- help="Flush machine cache before update",
907- )
908- update_parser.set_defaults(cmd="update")
909- for a in machine_cmds:
910- action_parser = subparsers.add_parser(a, help=cmd_help[a])
911- action_parser.add_argument("machine", metavar="MACHINE")
912- action_parser.set_defaults(cmd=a)
913- if a == "console":
914- action_parser.add_argument(
915- "-f",
916- dest="force",
917- action="store_true",
918- required=False,
919- help="Terminate all other console connections to MACHINE if necessary",
920- )
921- for a in lab_cmds:
922- action_parser = subparsers.add_parser(a, help=cmd_help[a])
923- action_parser.add_argument("lab", metavar="LAB")
924- action_parser.set_defaults(cmd=a)
925- args = parser.parse_args()
926-
927- if args.cmd == "update":
928- update(lpid=args.lpid, force=args.force)
929- sys.exit(0)
930- elif args.cmd == "show-owners":
931- show_owners(args.lab)
932- sys.exit(0)
933-
934- m = Machine(args.machine)
935- if args.cmd == "show":
936- print(m)
937- elif args.cmd == "mc-reset":
938- m.mc_reset()
939- elif args.cmd == "sel-clear":
940- m.sel_clear()
941- elif args.cmd == "console":
942- m.open_console(args.force)
943- elif args.cmd == "ssh":
944- m.ssh()
945- elif args.cmd in power_cmds:
946- m.power(args.cmd)
947diff --git a/lp-scripts/LaunchpadBugBucketer.py b/lp-scripts/LaunchpadBugBucketer.py
948deleted file mode 100644
949index 81e9e99..0000000
950--- a/lp-scripts/LaunchpadBugBucketer.py
951+++ /dev/null
952@@ -1,82 +0,0 @@
953-import collections
954-import inspect
955-from launchpadlib.launchpad import Launchpad
956-
957-
958-class LaunchpadBugBucketer(collections.UserDict):
959- """Sort project bugs into buckets to aide in report generation.
960-
961- This is a base class. It is not intended to be used directly. Rather,
962- it should be inherited by subclasses which add their own "bucketing"
963- methods.
964-
965- Bucketing methods names begin with the string "is_" and take a bug task
966- parameter. Each bucketing method is called for each task until one
967- matches. The task is then placed in the bucket that it first matches.
968- It is up to the subclass to make sure that each task falls into one and
969- only one bucket.
970-
971- A simple two bucket subclass example would be:
972-
973- class MyBugBucket(LaunchpadBugBucketer):
974- LaunchpadBugBucketer.__init__(self, 'myproject', 'myseries',
975- requiredTags=['patchset'])
976- def is_green(self, task):
977- if task.status in ['Fix Committed', 'Fix Released']:
978- return True
979- return False
980-
981- def is_red(self, task):
982- if not self.is_green(task):
983- return True
984- return False
985-
986- The returned object can be treated as a dictionary, where the bucketnames
987- (is_BUCKET) are the keys that return a list of bug tasks in that bucket.
988- For example:
989-
990- b = MyBugBucket()
991- for task in b['green']:
992- bug = s.tasktoBug(task)
993- sys.stdout.write("%s: %s\n" % (bug.title, task.web_link))
994- """
995-
996- def __init__(self, project, series, requiredTags=[]):
997- """
998- Args:
999- project (str): Launchpad project name
1000- series (str): A series defined in the given Launchpad project
1001- requiredTags (:obj:`list` of :obj:`str`): Only bucket bugs that
1002- have one or more tags from this list.
1003- """
1004- self.lp = Launchpad.login_with("lpbugs", "production", version="devel")
1005- self.project = self.lp.projects[project]
1006- self.series = series
1007- self.requiredTags = requiredTags
1008- self._doBucketing()
1009- collections.UserDict.__init__(self, self.buckets)
1010-
1011- def taskToBug(self, task):
1012- bugid = int(task.self_link.split("/")[-1])
1013- return self.lp.bugs[bugid]
1014-
1015- def _doBucketing(self):
1016- methods = inspect.getmembers(self, predicate=inspect.ismethod)
1017-
1018- self.buckets = {}
1019- for series in self.project.series:
1020- if series.name != self.series:
1021- continue
1022- for task in series.searchTasks():
1023- bug = self.taskToBug(task)
1024- if not set(self.requiredTags) & set(bug.tags):
1025- continue
1026- for mName, mFunc in methods:
1027- if not mName.startswith("is_"):
1028- continue
1029- if mFunc(task):
1030- b = mName[len("is_") :]
1031- if b not in self.buckets.keys():
1032- self.buckets[b] = []
1033- self.buckets[b].append(task)
1034- break
1035diff --git a/lp-scripts/bandera-bug-csv-summary.py b/lp-scripts/bandera-bug-csv-summary.py
1036deleted file mode 100755
1037index 05f84aa..0000000
1038--- a/lp-scripts/bandera-bug-csv-summary.py
1039+++ /dev/null
1040@@ -1,46 +0,0 @@
1041-#!/usr/bin/python3
1042-
1043-import argparse
1044-import csv
1045-from LaunchpadBugBucketer import LaunchpadBugBucketer
1046-
1047-
1048-class BanderaBugBucketer(LaunchpadBugBucketer):
1049- def __init__(self):
1050- LaunchpadBugBucketer.__init__(
1051- self, "bandera", "ubuntu-16.04", requiredTags=["patchset"]
1052- )
1053-
1054- def is_green(self, task):
1055- if task.status in ["Fix Committed", "Fix Released"]:
1056- return True
1057- return False
1058-
1059- def is_amber(self, task):
1060- if self.is_green(task):
1061- return False
1062- elif task.milestone and task.milestone.name == "ubuntu-16.04.3":
1063- return True
1064- return False
1065-
1066- def is_red(self, task):
1067- if self.is_green(task) or self.is_amber(task):
1068- return False
1069- return True
1070-
1071-
1072-if __name__ == "__main__":
1073- parser = argparse.ArgumentParser()
1074- parser.add_argument("-o", "--outfile")
1075- args = parser.parse_args()
1076-
1077- s = BanderaBugBucketer()
1078-
1079- with open(args.outfile, "w") as csvfile:
1080- c = csv.writer(csvfile)
1081- c.writerow(["Title", "Bug ID", "Importance", "Status", "RAG Risk"])
1082- for bucket in s.keys():
1083- for task in s[bucket]:
1084- bug = s.taskToBug(task)
1085- link = '=HYPERLINK("%s", "LP: #%d")' % (task.web_link, bug.id)
1086- c.writerow([bug.title, link, task.importance, task.status, bucket])
1087diff --git a/lp-scripts/bugreport/README.md b/lp-scripts/bugreport/README.md
1088deleted file mode 100644
1089index 253303a..0000000
1090--- a/lp-scripts/bugreport/README.md
1091+++ /dev/null
1092@@ -1,33 +0,0 @@
1093-# bugreport
1094-This tool help with collecting information on bugs, you can use this tool to generate a report on how many bugs were processed
1095-during a given timeframe. You can generate a quick summary to show you the number of fix-released, fix-commited, in-progress etc. It also displays some statistics like how long the bug was active, how long it was dormant/inactive etc that might come handy in triaging bugs reported for your project.
1096-
1097-# How to invoke the tool:
1098-## Summary only:
1099-./bugreport.py -p project -d start date
1100-
1101-Will generate a summary bug report based on importance (ie Critical, High etc).
1102-
1103-## Summary with bug details:
1104-./bugreport.py -p project -d start date -v
1105-
1106-Will generate a summary bug report as above, and detailed report for each bug.
1107-
1108-## Summary for bugs by importance (high, medium, low etc)
1109-./bugreport.py -p project -d start date -i high,medium,low
1110-
1111-Please do not use spaces in comma separated list.
1112-
1113-# How to create HTML files:
1114-1. ./bugreport.py -p ubuntu-power-systems -d 2017-08-01 -o out.t2t
1115-
1116-Where out.t2t is an ascii text file, you could use the -v option to generate
1117-a full report.
1118-
1119-2. txt2tags -C htmlconvert.conf -t html out.t2t
1120-
1121-This will generate out.html.
1122-
1123-## How to create PDF:
1124-1. Open the out.html in a web browser.
1125-2. Print and save as PDF.
1126diff --git a/lp-scripts/bugreport/bugreport.py b/lp-scripts/bugreport/bugreport.py
1127deleted file mode 100755
1128index ca9b5b1..0000000
1129--- a/lp-scripts/bugreport/bugreport.py
1130+++ /dev/null
1131@@ -1,232 +0,0 @@
1132-#! /usr/bin/python
1133-# Author: Manoj Iyer manoj.iyer@canonical.com
1134-# License: GPLv3
1135-from launchpadlib.launchpad import Launchpad
1136-from launchpadlib.uris import LPNET_SERVICE_ROOT
1137-from optparse import OptionParser
1138-from re import compile
1139-from datetime import datetime as dt
1140-from collections import defaultdict as coldict
1141-import numpy as np
1142-import os
1143-import sys
1144-
1145-
1146-def gen_bug_report(lp, lp_bugs, project, verbose):
1147- bug_summary_info = coldict(dict)
1148- bug_details_info = coldict(dict)
1149- url = compile("https://api.launchpad.net/1.0/~|/")
1150-
1151- for task in lp_bugs:
1152- bug_info = {}
1153- bug_subtask = []
1154-
1155- if task.status in bug_summary_info[task.importance]:
1156- bug_summary_info[task.importance][task.status] += 1
1157- else:
1158- bug_summary_info[task.importance][task.status] = 1
1159-
1160- if "#Bugs Processed" in bug_summary_info[task.importance]:
1161- bug_summary_info[task.importance]["#Bugs Processed"] += 1
1162- else:
1163- bug_summary_info[task.importance]["#Bugs Processed"] = 1
1164-
1165- if "#Bugs Closed" not in bug_summary_info[task.importance]:
1166- bug_summary_info[task.importance]["#Bugs Closed"] = 0
1167-
1168- # A bug could affect multiple projects, we care only whether
1169- # the project we are tracking is complete.
1170- lp_bug_tasks = lp.bugs[task.bug.id].bug_tasks
1171- for bugtask in lp_bug_tasks:
1172- if bugtask.bug_target_name in project:
1173- bug_is_complete = bugtask.is_complete
1174- if verbose is True:
1175- bug_subtask.append("%s: %s" % (bugtask.bug_target_name, bugtask.status))
1176-
1177- if bug_is_complete is True:
1178- if "#Bugs Closed" in bug_summary_info[task.importance]:
1179- bug_summary_info[task.importance]["#Bugs Closed"] += 1
1180-
1181- if verbose is True:
1182- inactive_days = 0
1183- if bug_is_complete is not True:
1184- inactive_days = np.busday_count(
1185- task.bug.date_last_updated.strftime("%Y-%m-%d"),
1186- dt.now().strftime("%Y-%m-%d"),
1187- )
1188-
1189- active_days = np.busday_count(
1190- task.bug.date_created.strftime("%Y-%m-%d"),
1191- task.bug.date_last_updated.strftime("%Y-%m-%d"),
1192- )
1193-
1194- assignee = (
1195- "Unassigned"
1196- if task.assignee_link is None
1197- else url.sub("", task.assignee_link)
1198- )
1199-
1200- bug_info[task.bug.id] = [
1201- task.bug.date_created.strftime("%Y-%m-%d"),
1202- task.bug.date_last_updated.strftime("%Y-%m-%d"),
1203- active_days,
1204- inactive_days,
1205- task.bug.message_count,
1206- assignee,
1207- "''<br>''".join(bug_subtask),
1208- ]
1209-
1210- bug_details_info[task.importance].update(bug_info)
1211-
1212- return bug_summary_info, bug_details_info
1213-
1214-
1215-def main():
1216- parser = OptionParser(usage="usage: %prog [options]", version="%prog 1.0")
1217- parser.add_option(
1218- "-d",
1219- "--date",
1220- dest="start_date",
1221- action="store",
1222- default="2017-01-01",
1223- type="string",
1224- help="start date for bug search",
1225- )
1226- parser.add_option(
1227- "-p",
1228- "--project",
1229- dest="project",
1230- action="store",
1231- default="ubuntu-power-systems",
1232- type="string",
1233- help="name of the launchpad project",
1234- )
1235- parser.add_option(
1236- "-s",
1237- "--status",
1238- dest="bug_status",
1239- action="store",
1240- default=(
1241- "New,Opinion,Invalid,Won't Fix,Expired,"
1242- "Confirmed,Triaged,In Progress,Fix Committed,"
1243- "Fix Released,Incomplete"
1244- ),
1245- type="string",
1246- help="bug status (or quoted and comma seperated list)",
1247- )
1248- parser.add_option(
1249- "-i",
1250- "--importance",
1251- dest="bug_importance",
1252- default=("Unknown,Undecided,Critical,High,Medium,Low,Wishlist"),
1253- type="string",
1254- help="bug importance (or comma seperated list, no spaces)",
1255- )
1256- parser.add_option(
1257- "-t",
1258- "--tag",
1259- dest="bug_tag",
1260- default=None,
1261- help="bug tag (or quoted and comma seperated list)",
1262- )
1263- parser.add_option(
1264- "-m",
1265- "--modify",
1266- dest="bug_tag_modify",
1267- default="Any",
1268- help="search any or all tags (valid args: any or all)",
1269- )
1270- parser.add_option(
1271- "-v",
1272- "--verbose",
1273- dest="verbose",
1274- action="store_true",
1275- help="verbose output with bug details",
1276- )
1277- parser.add_option(
1278- "-a",
1279- "--author",
1280- dest="author",
1281- default="Manoj Iyer manoj.iyer@canonical.com",
1282- help='"Firstname Lastname first.last@canonical.com"',
1283- )
1284- parser.add_option(
1285- "-o",
1286- "--outfile",
1287- dest="outfile",
1288- help="filename to store output (default stdout)",
1289- )
1290-
1291- (options, args) = parser.parse_args()
1292-
1293- if len(args) is None:
1294- parser.error("No arguments found!")
1295-
1296- script_name = sys.argv[0].split("/")[-1].split(".")[0]
1297- cachedir = os.path.expanduser("~/.launchpadlib/cache")
1298-
1299- launchpad = Launchpad.login_with(script_name, LPNET_SERVICE_ROOT, cachedir)
1300- lp_project = launchpad.projects[options.project]
1301-
1302- lp_bugs = [
1303- task
1304- for task in lp_project.searchTasks(
1305- created_since=None
1306- if options.start_date is None
1307- else dt.strptime(options.start_date, "%Y-%m-%d").isoformat(),
1308- status=options.bug_status.split(","),
1309- importance=options.bug_importance.title().replace(" ", "").split(","),
1310- tags=None if options.bug_tag is None else options.bug_tag.split(","),
1311- tags_combinator=options.bug_tag_modify.title(),
1312- )
1313- ]
1314-
1315- with (open(options.outfile, "w") if options.outfile else sys.stdout) as f:
1316- f.write(
1317- "Bug activity in %s project since %s\n\n\n"
1318- % (options.project, options.start_date)
1319- )
1320- f.write(" || {:<35} | {:<20} |\n".format("Created By", "Date"))
1321- f.write(" | [%s]" % (options.author) + " | %%mtime(%A %B %d, %Y) |\n")
1322-
1323- if f is not sys.stdout and options.verbose is True:
1324- sys.stdout.write(
1325- "Bug activity in %s project since %s\n"
1326- % (options.project, options.start_date)
1327- )
1328- sys.stdout.write("Generating detailed report in %s \n" % options.outfile)
1329- sys.stdout.write("Please wait...\n")
1330- sys.stdout.flush()
1331-
1332- summary_report, detailed_report = gen_bug_report(
1333- launchpad, lp_bugs, options.project, options.verbose
1334- )
1335-
1336- for k, v in sorted(summary_report.iteritems()):
1337- f.write("\n= Summary of %s bugs =\n" % k)
1338- f.write("|| {:<14} | {:<8} |\n".format("Status", "Count"))
1339- for x, y in sorted(v.iteritems()):
1340- f.write("| {:<15} | {:<8} |\n".format(x, y))
1341- if options.verbose is True:
1342- f.write("== Details on %s bugs ==\n" % k)
1343- f.write(
1344- "|| Bug# | Created | Last Updated | Active Period "
1345- "| Dormant Period | #Comments | Assignee | Status |\n"
1346- )
1347-
1348- for a, b in sorted(
1349- detailed_report[k].iteritems(),
1350- key=lambda item: item[1][1],
1351- reverse=True,
1352- ):
1353- f.write(
1354- "| [%s https://launchpad.net/bugs/%s] | %s |\n"
1355- % (a, a, " | ".join(map(str, b)))
1356- )
1357-
1358- if f is not sys.stdout:
1359- f.close()
1360-
1361-
1362-if __name__ == "__main__":
1363- main()
1364diff --git a/lp-scripts/bugreport/htmlconvert.conf b/lp-scripts/bugreport/htmlconvert.conf
1365deleted file mode 100644
1366index 9338612..0000000
1367--- a/lp-scripts/bugreport/htmlconvert.conf
1368+++ /dev/null
1369@@ -1,10 +0,0 @@
1370-%!options: --toc-level 4
1371-%!postproc(html): '(?i)(<TH)' '\1 style="text-align:left;"'
1372-%!postproc(html): '(?i)(<TR)' '\1 style="font-size:11; font-family:ubuntu,sans-serif; text-align:left;"'
1373-%!postproc(html): <HEAD> '<HEAD>\n<STYLE TYPE="text/css">\n</STYLE>'
1374-%!postproc(html): (</STYLE>) 'h1 {color:#740946; font-size:16; font-family:ubuntu,sans-serif;} \n\1'
1375-%!postproc(html): (</STYLE>) 'h2 {color:#740946; font-size:14; font-family:ubuntu,sans-serif;} \n\1'
1376-%!postproc(html): (</STYLE>) 'h3 {color:#740946; font-size:12; font-family:ubuntu,sans-serif;} \n\1'
1377-%!postproc(html): (</STYLE>) 'h4 {color:#740946; font-size:11; font-family:ubuntu,sans-serif;} \n\1'
1378-%!postproc(html): (</STYLE>) 'body {font-size:11; font-family:ubuntu,sans-serif;;} \n\1'
1379-
1380diff --git a/lp-scripts/clone-project-milestones.py b/lp-scripts/clone-project-milestones.py
1381deleted file mode 100755
1382index 39099ab..0000000
1383--- a/lp-scripts/clone-project-milestones.py
1384+++ /dev/null
1385@@ -1,74 +0,0 @@
1386-#!/usr/bin/env python3
1387-
1388-#
1389-# Copy the series and milestones from one project to another.
1390-#
1391-
1392-import argparse
1393-import logging
1394-from launchpadlib.launchpad import Launchpad
1395-
1396-
1397-# These may contain proprietary content, skip them
1398-IgnoredSeries = ["firmware", "ppa-uose", "silicon", "trunk"]
1399-
1400-
1401-def clone_project_milestones(src, dest, dry_run):
1402- lp = Launchpad.login_with("lpbugs", "production", version="devel")
1403- src_project = lp.projects[src]
1404- dest_project = lp.projects[dest]
1405- for src_series in src_project.series:
1406- if src_series.name in IgnoredSeries:
1407- continue
1408- dest_series = dest_project.getSeries(name=src_series.name)
1409- if dest_series:
1410- logger.warning("Series %s already exists, skipping\n" % (dest_series))
1411- else:
1412- logger.info("Creating series %s" % (src_series.name))
1413- if not dry_run:
1414- dest_series = dest_project.newSeries(
1415- name=src_series.name, summary=src_series.summary
1416- )
1417- for src_milestone in src_series.all_milestones:
1418- dest_milestone = dest_project.getMilestone(name=src_milestone.name)
1419- if dest_milestone:
1420- logger.warning(
1421- "Project already has milestone %s, skipping\n"
1422- % (dest_milestone.name)
1423- )
1424- else:
1425- logger.info("Creating milestone %s" % (src_milestone.name))
1426- if not dry_run:
1427- dest_milestone = dest_series.newMilestone(
1428- name=src_milestone.name,
1429- date_targeted=src_milestone.date_targeted,
1430- )
1431- dest_milestone.is_active = src_milestone.is_active
1432- dest_milestone.lp_save()
1433- src_release = src_milestone.release
1434- if src_release:
1435- logger.info(
1436- "Releasing milestone %s on %s "
1437- % (src_milestone.name, src_milestone.release.date_released)
1438- )
1439- if not dry_run:
1440- dest_milestone.createProductRelease(
1441- date_released=src_milestone.release.date_released
1442- )
1443-
1444-
1445-if __name__ == "__main__":
1446- parser = argparse.ArgumentParser()
1447- parser.add_argument("-s", "--source", required=True)
1448- parser.add_argument("-d", "--dest", required=True)
1449- parser.add_argument("--dry-run", action="store_true")
1450- args = parser.parse_args()
1451-
1452- logger = logging.getLogger("clone-project-milestones")
1453- logger.setLevel(logging.INFO)
1454- ch = logging.StreamHandler()
1455- formatter = logging.Formatter("%(levelname)s - %(message)s")
1456- ch.setFormatter(formatter)
1457- logger.addHandler(ch)
1458-
1459- clone_project_milestones(args.source, args.dest, args.dry_run)
1460diff --git a/lp-scripts/dgx2-performance-regression-googlesheet.py b/lp-scripts/dgx2-performance-regression-googlesheet.py
1461deleted file mode 100755
1462index d84a410..0000000
1463--- a/lp-scripts/dgx2-performance-regression-googlesheet.py
1464+++ /dev/null
1465@@ -1,99 +0,0 @@
1466-#!/usr/bin/env python3
1467-#
1468-# Updates an existing google sheet that tracks performance regression bugs.
1469-# To run, you'll need google sheet API credentials, and a number of pip
1470-# libraries installed (a virtualenv is recommended). See
1471-# https://developers.google.com/sheets/api/quickstart/python
1472-# And, of course, you'll need write-access to the spreadsheet.
1473-#
1474-import pickle
1475-import os.path
1476-from googleapiclient.discovery import build
1477-from google_auth_oauthlib.flow import InstalledAppFlow
1478-from google.auth.transport.requests import Request
1479-from launchpadlib.launchpad import Launchpad
1480-
1481-FIELDNAMES = ["Link", "Title", "Importance", "Status"]
1482-GOOGLE_SPREADSHEET_ID = "1CkjLn_yWxR_LN2nhOHCkyiJq-UvuI2XTGjHIiUAW7mU"
1483-GOOGLE_RANGE_NAME = "Sheet1!A2"
1484-
1485-
1486-def update_google_sheet(data):
1487- # If modifying these scopes, delete the file token.pickle.
1488- scopes = ["https://www.googleapis.com/auth/spreadsheets"]
1489-
1490- creds = None
1491- # The file token.pickle stores the user's access and refresh tokens, and is
1492- # created automatically when the authorization flow completes for the first
1493- # time.
1494- if os.path.exists("token.pickle"):
1495- with open("token.pickle", "rb") as token:
1496- creds = pickle.load(token)
1497- # If there are no (valid) credentials available, let the user log in.
1498- if not creds or not creds.valid:
1499- if creds and creds.expired and creds.refresh_token:
1500- creds.refresh(Request())
1501- else:
1502- flow = InstalledAppFlow.from_client_secrets_file(
1503- "credentials.json",
1504- scopes,
1505- )
1506- creds = flow.run_local_server(port=0)
1507- # Save the credentials for the next run
1508- with open("token.pickle", "wb") as token:
1509- pickle.dump(creds, token)
1510-
1511- service = build("sheets", "v4", credentials=creds)
1512-
1513- result = (
1514- service.spreadsheets()
1515- .values()
1516- .clear(
1517- spreadsheetId=GOOGLE_SPREADSHEET_ID,
1518- range=GOOGLE_RANGE_NAME,
1519- )
1520- .execute()
1521- )
1522-
1523- body = {"values": data}
1524- result = (
1525- service.spreadsheets()
1526- .values()
1527- .update(
1528- spreadsheetId=GOOGLE_SPREADSHEET_ID,
1529- range=GOOGLE_RANGE_NAME,
1530- valueInputOption="USER_ENTERED",
1531- body=body,
1532- )
1533- .execute()
1534- )
1535- print("{0} cells updated.".format(result.get("updatedCells")))
1536-
1537-
1538-if __name__ == "__main__":
1539- lp = Launchpad.login_with("lpbugs", "production", version="devel")
1540-
1541- p = lp.projects["ubuntu"]
1542-
1543- data = [FIELDNAMES]
1544-
1545- for task in p.searchTasks(tags=["dgx2-performance-regression"], status=[]):
1546- row = []
1547- bug = int(task.web_link.split("/")[-1])
1548- for field in FIELDNAMES:
1549- if field == "Link":
1550- row.append(
1551- '=HYPERLINK("{}", "LP: #{}")'.format(
1552- task.web_link,
1553- bug,
1554- )
1555- )
1556- if field == "Title":
1557- row.append(lp.bugs[bug].title)
1558- if field == "Importance":
1559- row.append(task.importance)
1560- if field == "Status":
1561- row.append(task.status)
1562- data.append(row)
1563-
1564- update_google_sheet(data)
1565diff --git a/lp-scripts/dump-bug-subscribers.py b/lp-scripts/dump-bug-subscribers.py
1566deleted file mode 100755
1567index 70f8c72..0000000
1568--- a/lp-scripts/dump-bug-subscribers.py
1569+++ /dev/null
1570@@ -1,62 +0,0 @@
1571-#!/usr/bin/python3
1572-
1573-# Dump the bugs associated subscribers for a specified
1574-# LP project into a CSV file
1575-
1576-import argparse
1577-import csv
1578-from launchpadlib.launchpad import Launchpad
1579-
1580-
1581-bugStatuses = [
1582- "New",
1583- "Opinion",
1584- "Invalid",
1585- "Won't Fix",
1586- "Expired",
1587- "Confirmed",
1588- "Triaged",
1589- "In Progress",
1590- "Fix Committed",
1591- "Fix Released",
1592- "Incomplete",
1593-]
1594-
1595-
1596-def taskToBug(task):
1597- bugid = int(task.self_link.split("/")[-1])
1598- return lp.bugs[bugid]
1599-
1600-
1601-def getSubscriptionId(subscription_url):
1602- lpString = str(subscription_url)
1603- lpPath = lpString.rsplit("/", 1)
1604- return lpPath[-1]
1605-
1606-
1607-if __name__ == "__main__":
1608- parser = argparse.ArgumentParser()
1609- parser.add_argument(
1610- "project",
1611- help="Launchpad project from which \
1612- subscribers will be dumped",
1613- )
1614- parser.add_argument("-o", "--outfile", help="Output file", required=True)
1615- args = parser.parse_args()
1616-
1617- lp = Launchpad.login_with("lpbugs", "production", version="devel")
1618- project = lp.projects[args.project]
1619-
1620- with open(args.outfile, "w") as csvfile:
1621- c = csv.writer(csvfile)
1622- c.writerow(["Bug", "Title", "Subscribers"])
1623- # no way to list all open/closed bugs, so iterate through bug states
1624- for bugStatus in bugStatuses:
1625- for task in project.searchTasks(status=bugStatus):
1626- output = []
1627- bug = taskToBug(task)
1628- output.append('=HYPERLINK("%s", "LP: #%d")' % (task.web_link, bug.id))
1629- output.append(bug.title)
1630- for subscription in bug.subscriptions:
1631- output.append(getSubscriptionId(subscription))
1632- c.writerow(output)
1633diff --git a/lp-scripts/hisi_sync_patch_count.py b/lp-scripts/hisi_sync_patch_count.py
1634deleted file mode 100755
1635index fc72e07..0000000
1636--- a/lp-scripts/hisi_sync_patch_count.py
1637+++ /dev/null
1638@@ -1,44 +0,0 @@
1639-#!/usr/bin/env python3
1640-
1641-# Count the patches in the kunpeng920 "sync" bugs
1642-# Output to stdout
1643-
1644-from launchpadlib.launchpad import Launchpad
1645-
1646-
1647-def taskToBug(task):
1648- bugid = int(task.self_link.split("/")[-1])
1649- return lp.bugs[bugid]
1650-
1651-
1652-if __name__ == "__main__":
1653- lp = Launchpad.login_with("lpbugs", "production", version="devel")
1654-
1655- subsystem_search_strings = [
1656- "hns3",
1657- "RDMA/hns",
1658- "crypto: hisilicon",
1659- "hisi_sas",
1660- "irqchip/gic-v",
1661- "drivers/perf",
1662- "spi:",
1663- "mtd:",
1664- "drm/hisilicon",
1665- "PCI:",
1666- "PCI/",
1667- ]
1668-
1669- kunpeng = lp.projects["kunpeng920"]
1670- total = 0
1671- for task in kunpeng.searchTasks(search_text="sync"):
1672- bug = taskToBug(task)
1673- count = 0
1674- lines = bug.description.split("\n")
1675- for line in lines:
1676- for subsystem in subsystem_search_strings:
1677- if subsystem in line:
1678- count += 1
1679- print(f'LP#{bug.id}, "{bug.title}", {count}')
1680- total += count
1681-
1682- print(f"Grand Total: {total}")
1683diff --git a/lp-scripts/lp-bulk-action.py b/lp-scripts/lp-bulk-action.py
1684deleted file mode 100755
1685index 12cca76..0000000
1686--- a/lp-scripts/lp-bulk-action.py
1687+++ /dev/null
1688@@ -1,119 +0,0 @@
1689-#!/usr/bin/python3
1690-
1691-import argparse
1692-import sys
1693-from launchpadlib.launchpad import Launchpad
1694-
1695-Projects = [
1696- "bandera",
1697- "kunpeng920",
1698- "pearl",
1699- "pearl2",
1700- "quicksilver",
1701- "ubuntu-power-systems",
1702- "ubuntu-z-systems",
1703- "yarmouth",
1704- "yarmouth2",
1705-]
1706-
1707-
1708-def createMilestone(lp, project, series, milestone, date):
1709- p = lp.projects[project]
1710- s = p.getSeries(name=series)
1711- if not s:
1712- sys.stderr.write(
1713- "Warning: Project %s has no series %s, skipping\n" % (project, series)
1714- )
1715- return
1716- if p.getMilestone(name=milestone):
1717- sys.stderr.write(
1718- "Warning: Project %s already has milestone %s, skipping\n"
1719- % (project, milestone)
1720- )
1721- return
1722- s.newMilestone(name=milestone, date_targeted=date)
1723-
1724-
1725-def releaseMilestone(lp, project, milestone, date):
1726- p = lp.projects[project]
1727- m = p.getMilestone(name=milestone)
1728- if not m:
1729- sys.stderr.write(
1730- "Warning: Project %s has no milestone %s, skipping\n" % (project, milestone)
1731- )
1732- return
1733- if m.release:
1734- sys.stderr.write(
1735- "Warning: Milestone %s in project %s is already released\n"
1736- % (milestone, project)
1737- )
1738- return
1739- m.createProductRelease(date_released=date)
1740-
1741-
1742-def updateMilestone(lp, project, milestone, date):
1743- p = lp.projects[project]
1744- m = p.getMilestone(name=milestone)
1745- if not m:
1746- sys.stderr.write(
1747- "Warning: Project %s does not have milestone %s, skipping\n"
1748- % (project, milestone)
1749- )
1750- return
1751- m.date_targeted = date
1752- m.lp_save()
1753-
1754-
1755-def createSeries(lp, project, series, summary):
1756- p = lp.projects[project]
1757- if p.getSeries(name=series):
1758- sys.stderr.write(
1759- "Warning: Project %s already has series %s, skipping\n" % (project, series)
1760- )
1761- return
1762- p.newSeries(name=series, summary=summary)
1763-
1764-
1765-if __name__ == "__main__":
1766- parser = argparse.ArgumentParser()
1767- subparsers = parser.add_subparsers(dest="cmd")
1768- subparsers.required = True
1769- createMilestoneParser = subparsers.add_parser(
1770- "create-milestone", help="Create Milestone"
1771- )
1772- createMilestoneParser.add_argument("-s", "--series", required=True)
1773- createMilestoneParser.add_argument("-d", "--date", required=True)
1774- createMilestoneParser.add_argument("milestone")
1775- createMilestoneParser.set_defaults(cmd="create-milestone")
1776-
1777- releaseMilestoneParser = subparsers.add_parser(
1778- "release-milestone", help="Release Milestone"
1779- )
1780- releaseMilestoneParser.add_argument("-d", "--date", required=True)
1781- releaseMilestoneParser.add_argument("milestone")
1782- releaseMilestoneParser.set_defaults(cmd="release-milestone")
1783-
1784- releaseMilestoneParser = subparsers.add_parser(
1785- "update-milestone", help="Update Milestone"
1786- )
1787- releaseMilestoneParser.add_argument("-d", "--date", required=True)
1788- releaseMilestoneParser.add_argument("milestone")
1789- releaseMilestoneParser.set_defaults(cmd="update-milestone")
1790-
1791- createSeriesParser = subparsers.add_parser("create-series", help="Create Series")
1792- createSeriesParser.add_argument("-s", "--summary", required=True)
1793- createSeriesParser.add_argument("series")
1794- createSeriesParser.set_defaults(cmd="create-series")
1795- args = parser.parse_args()
1796-
1797- lp = Launchpad.login_with("lpbugs", "production", version="devel")
1798-
1799- for project in Projects:
1800- if args.cmd == "create-milestone":
1801- createMilestone(lp, project, args.series, args.milestone, args.date)
1802- elif args.cmd == "create-series":
1803- createSeries(lp, project, args.series, args.summary)
1804- elif args.cmd == "release-milestone":
1805- releaseMilestone(lp, project, args.milestone, args.date)
1806- elif args.cmd == "update-milestone":
1807- updateMilestone(lp, project, args.milestone, args.date)
1808diff --git a/lp-scripts/pearl-biweekly-bug-report-csv4.py b/lp-scripts/pearl-biweekly-bug-report-csv4.py
1809deleted file mode 100755
1810index 1295c43..0000000
1811--- a/lp-scripts/pearl-biweekly-bug-report-csv4.py
1812+++ /dev/null
1813@@ -1,143 +0,0 @@
1814-#!/usr/bin/python3
1815-# Change History
1816-# Modify encode utf_8 to byte code in Rationale/Comment
1817-# Add Date_created column for creating date for bug
1818-# Add Scope column for showing impact for all arc or arch-arm64 or D05 Boad only
1819-
1820-import argparse
1821-import csv
1822-from launchpadlib.launchpad import Launchpad
1823-
1824-rationaleSectionHeader = "[16.04.3 Risk Comments]"
1825-rationaleBoilerplate = "Still under investigation, not yet root-caused."
1826-
1827-
1828-def taskToBug(task):
1829- bugid = int(task.self_link.split("/")[-1])
1830- return lp.bugs[bugid]
1831-
1832-
1833-def ownerFromStatus(status):
1834- if "Incomplete" in status:
1835- owner = "Huawei"
1836- else:
1837- owner = "Canonical"
1838- return owner
1839-
1840-
1841-def rationaleFromDescription(des):
1842- export_rationale_flag = (
1843- 0 # Flag to detect the start of the "[16.04.3 Risk Comments]" section
1844- )
1845- rationale = ""
1846- for (
1847- des_line
1848- ) in (
1849- des.splitlines()
1850- ): # Loop through the bug description line-by-line looking for sections to export
1851- if des_line == rationaleSectionHeader:
1852- export_rationale_flag = 1
1853- else:
1854- if export_rationale_flag == 1:
1855- if des_line == "":
1856- export_rationale_flag = 0
1857- else:
1858- rationale += des_line
1859- return rationale
1860-
1861-
1862-def checkScopeTags(tags):
1863- scopeTags = ["scope-arch-all", "scope-arch-arm64", "scope-d05-only"]
1864- scope = set(scopeTags).intersection(tags)
1865- for i in scope:
1866- return i
1867-
1868- return "Unknown"
1869-
1870-
1871-if __name__ == "__main__":
1872- parser = argparse.ArgumentParser()
1873- parser.add_argument("-o", "--outfile", required=True)
1874- args = parser.parse_args()
1875-
1876- lp = Launchpad.login_with("lpbugs", "production", version="devel")
1877-
1878- pearl = lp.projects["pearl"]
1879-
1880- with open(args.outfile, "w") as csvfile:
1881- c = csv.writer(csvfile)
1882- c.writerow(
1883- [
1884- "Title",
1885- "Link",
1886- "Status",
1887- "Milestone",
1888- "Scope",
1889- "Rationale/Comment",
1890- "Owner",
1891- "DateCreated",
1892- ]
1893- )
1894- # First pass: patchset bugs
1895- for series in pearl.series:
1896- if series.name not in ["ubuntu-16.04"]:
1897- continue
1898- # First pass, phase one: search for the 'open' bugs
1899- for task in series.searchTasks():
1900- bug = taskToBug(task)
1901- # All kernel patchset bugs in the pearl project are tagged 'upstream-risky'
1902- # if 'upstream-risky' not in bug.tags:
1903- # continue
1904- if task.milestone is None:
1905- milestone = "TBD"
1906- else:
1907- milestone = task.milestone.name
1908- link = '=HYPERLINK("%s", "LP: #%d")' % (task.web_link, bug.id)
1909- rationale = rationaleFromDescription(bug.description)
1910- if (not rationale) and (task.milestone is None):
1911- rationale = rationaleBoilerplate
1912- scope = checkScopeTags(bug.tags)
1913- owner = ownerFromStatus(task.status)
1914- date_created = bug.date_created.strftime("%Y/%m/%d")
1915- c.writerow(
1916- [
1917- bug.title,
1918- link,
1919- task.status,
1920- milestone,
1921- scope,
1922- rationale.encode("utf_8"),
1923- owner,
1924- date_created,
1925- ]
1926- )
1927-
1928- # First pass, phase two: Fix Released bugs
1929- for task in series.searchTasks(status="Fix Released"):
1930- bug = taskToBug(task)
1931- # All kernel patchset bugs in the pearl project are tagged 'upstream-risky'
1932- # if not 'upstream-risky' not in bug.tags:
1933- # continue
1934- if task.milestone is None:
1935- milestone = "TBD"
1936- else:
1937- milestone = task.milestone.name
1938- link = '=HYPERLINK("%s", "LP: #%d")' % (task.web_link, bug.id)
1939- rationale = rationaleFromDescription(bug.description)
1940- if (not rationale) and (task.milestone is None):
1941- rationale = rationaleBoilerplate
1942- scope = checkScopeTags(bug.tags)
1943- owner = ownerFromStatus(task.status)
1944- date_created = bug.date_created.strftime("%Y/%m/%d")
1945- c.writerow(
1946- [
1947- bug.title,
1948- link,
1949- task.status,
1950- milestone,
1951- scope,
1952- rationale.encode("utf_8"),
1953- owner,
1954- date_created,
1955- ]
1956- )
1957diff --git a/lp-scripts/pearl-biweekly-report-csv.py b/lp-scripts/pearl-biweekly-report-csv.py
1958deleted file mode 100755
1959index 5da289d..0000000
1960--- a/lp-scripts/pearl-biweekly-report-csv.py
1961+++ /dev/null
1962@@ -1,125 +0,0 @@
1963-#!/usr/bin/python3
1964-
1965-import argparse
1966-import csv
1967-from launchpadlib.launchpad import Launchpad
1968-
1969-rationaleSectionHeader = "[16.04.3 Risk Comments]"
1970-rationaleBoilerplate = "Still under investigation, not yet root-caused."
1971-
1972-
1973-def taskToBug(task):
1974- bugid = int(task.self_link.split("/")[-1])
1975- return lp.bugs[bugid]
1976-
1977-
1978-def ownerFromStatus(status):
1979- if "Incomplete" in status:
1980- owner = "Huawei"
1981- else:
1982- owner = "Canonical"
1983- return owner
1984-
1985-
1986-def rationaleFromDescription(des):
1987- export_rationale_flag = (
1988- 0 # Flag to detect the start of the "[16.04.3 Risk Comments]" section
1989- )
1990- rationale = ""
1991- for (
1992- des_line
1993- ) in (
1994- des.splitlines()
1995- ): # Loop through the bug description line-by-line looking for sections to export
1996- if des_line == rationaleSectionHeader:
1997- export_rationale_flag = 1
1998- else:
1999- if export_rationale_flag == 1:
2000- if des_line == "":
2001- export_rationale_flag = 0
2002- else:
2003- rationale += des_line
2004- return rationale
2005-
2006-
2007-if __name__ == "__main__":
2008- parser = argparse.ArgumentParser()
2009- parser.add_argument("-o", "--outfile", required=True)
2010- args = parser.parse_args()
2011-
2012- lp = Launchpad.login_with("lpbugs", "production", version="devel")
2013-
2014- pearl = lp.projects["pearl"]
2015-
2016- with open(args.outfile, "w") as csvfile:
2017- c = csv.writer(csvfile)
2018- c.writerow(
2019- [
2020- "Title",
2021- "Link",
2022- "Status",
2023- "Milestone",
2024- "Rationale/Comment",
2025- "Owner",
2026- "DateCreated",
2027- ]
2028- )
2029- # First pass: patchset bugs
2030- for series in pearl.series:
2031- if series.name not in ["ubuntu-16.04"]:
2032- continue
2033- # First pass, phase one: search for the 'open' bugs
2034- for task in series.searchTasks():
2035- bug = taskToBug(task)
2036- # All kernel patchset bugs in the pearl project are tagged 'upstream-risky'
2037- # if 'upstream-risky' not in bug.tags:
2038- # continue
2039- if task.milestone is None:
2040- milestone = "TBD"
2041- else:
2042- milestone = task.milestone.name
2043- link = '=HYPERLINK("%s", "LP: #%d")' % (task.web_link, bug.id)
2044- rationale = rationaleFromDescription(bug.description)
2045- if (not rationale) and (task.milestone is None):
2046- rationale = rationaleBoilerplate
2047- owner = ownerFromStatus(task.status)
2048- date_created = bug.date_created.strftime("%Y/%m/%d")
2049- c.writerow(
2050- [
2051- bug.title,
2052- link,
2053- task.status,
2054- milestone,
2055- rationale.encode("utf_8"),
2056- owner,
2057- date_created,
2058- ]
2059- )
2060-
2061- # First pass, phase two: Fix Released bugs
2062- for task in series.searchTasks(status="Fix Released"):
2063- bug = taskToBug(task)
2064- # All kernel patchset bugs in the pearl project are tagged 'upstream-risky'
2065- # if not 'upstream-risky' not in bug.tags:
2066- # continue
2067- if task.milestone is None:
2068- milestone = "TBD"
2069- else:
2070- milestone = task.milestone.name
2071- link = '=HYPERLINK("%s", "LP: #%d")' % (task.web_link, bug.id)
2072- rationale = rationaleFromDescription(bug.description)
2073- if (not rationale) and (task.milestone is None):
2074- rationale = rationaleBoilerplate
2075- owner = ownerFromStatus(task.status)
2076- date_created = bug.date_created.strftime("%Y/%m/%d")
2077- c.writerow(
2078- [
2079- bug.title,
2080- link,
2081- task.status,
2082- milestone,
2083- rationale.encode("utf_8"),
2084- owner,
2085- date_created,
2086- ]
2087- )
2088diff --git a/lp-scripts/pearl-biweekly-report.csv.py b/lp-scripts/pearl-biweekly-report.csv.py
2089deleted file mode 100644
2090index bfebacc..0000000
2091--- a/lp-scripts/pearl-biweekly-report.csv.py
2092+++ /dev/null
2093@@ -1,149 +0,0 @@
2094-#!/usr/bin/python3
2095-# Change History
2096-# Modify encode utf_8 to byte code in Rationale/Comment
2097-# Add Date_created column for creating date for bug
2098-# Add Scope column for showing impact for all arc or arch-arm64 or D05 Boad only
2099-
2100-import argparse
2101-import csv
2102-from launchpadlib.launchpad import Launchpad
2103-
2104-rationaleSectionHeader = "[16.04.3 Risk Comments]"
2105-rationaleBoilerplate = "Still under investigation, not yet root-caused."
2106-
2107-
2108-def taskToBug(task):
2109- bugid = int(task.self_link.split("/")[-1])
2110- return lp.bugs[bugid]
2111-
2112-
2113-def ownerFromStatus(status):
2114- if "Incomplete" in status:
2115- owner = "Huawei"
2116- else:
2117- owner = "Canonical"
2118- return owner
2119-
2120-
2121-def rationaleFromDescription(des):
2122- export_rationale_flag = (
2123- 0 # Flag to detect the start of the "[16.04.3 Risk Comments]" section
2124- )
2125- rationale = ""
2126- for (
2127- des_line
2128- ) in (
2129- des.splitlines()
2130- ): # Loop through the bug description line-by-line looking for sections to export
2131- if des_line == rationaleSectionHeader:
2132- export_rationale_flag = 1
2133- else:
2134- if export_rationale_flag == 1:
2135- if des_line == "":
2136- export_rationale_flag = 0
2137- else:
2138- rationale += des_line
2139- return rationale
2140-
2141-
2142-def checkScopeTags(tags):
2143- scopeTags = ["scope-arch-all", "scope-arch-arm64", "scope-d05-only"]
2144- scope = set(scopeTags).intersection(tags)
2145- for i in scope:
2146- if i == "scope-arch-all":
2147- i = "Generic"
2148- elif i == "scope-arch-arm64":
2149- i = "ARM64 generic"
2150- elif i == "scope-d05-only":
2151- i = "D05 only"
2152- return i
2153-
2154- return "Unknown"
2155-
2156-
2157-if __name__ == "__main__":
2158- parser = argparse.ArgumentParser()
2159- parser.add_argument("-o", "--outfile", required=True)
2160- args = parser.parse_args()
2161-
2162- lp = Launchpad.login_with("lpbugs", "production", version="devel")
2163-
2164- pearl = lp.projects["pearl"]
2165-
2166- with open(args.outfile, "w") as csvfile:
2167- c = csv.writer(csvfile)
2168- c.writerow(
2169- [
2170- "Title",
2171- "Link",
2172- "Status",
2173- "Milestone",
2174- "Scope",
2175- "Rationale/Comment",
2176- "Owner",
2177- "DateCreated",
2178- ]
2179- )
2180- # First pass: patchset bugs
2181- for series in pearl.series:
2182- if series.name not in ["ubuntu-16.04"]:
2183- continue
2184- # First pass, phase one: search for the 'open' bugs
2185- for task in series.searchTasks():
2186- bug = taskToBug(task)
2187- # All kernel patchset bugs in the pearl project are tagged 'upstream-risky'
2188- # if 'upstream-risky' not in bug.tags:
2189- # continue
2190- if task.milestone is None:
2191- milestone = "TBD"
2192- else:
2193- milestone = task.milestone.name
2194- link = '=HYPERLINK("%s", "LP: #%d")' % (task.web_link, bug.id)
2195- rationale = rationaleFromDescription(bug.description)
2196- if (not rationale) and (task.milestone is None):
2197- rationale = rationaleBoilerplate
2198- scope = checkScopeTags(bug.tags)
2199- owner = ownerFromStatus(task.status)
2200- date_created = bug.date_created.strftime("%Y/%m/%d")
2201- c.writerow(
2202- [
2203- bug.title,
2204- link,
2205- task.status,
2206- milestone,
2207- scope,
2208- rationale.encode("utf_8"),
2209- owner,
2210- date_created,
2211- ]
2212- )
2213-
2214- # First pass, phase two: Fix Released bugs
2215- for task in series.searchTasks(status="Fix Released"):
2216- bug = taskToBug(task)
2217- # All kernel patchset bugs in the pearl project are tagged 'upstream-risky'
2218- # if not 'upstream-risky' not in bug.tags:
2219- # continue
2220- if task.milestone is None:
2221- milestone = "TBD"
2222- else:
2223- milestone = task.milestone.name
2224- link = '=HYPERLINK("%s", "LP: #%d")' % (task.web_link, bug.id)
2225- rationale = rationaleFromDescription(bug.description)
2226- if (not rationale) and (task.milestone is None):
2227- rationale = rationaleBoilerplate
2228- scope = checkScopeTags(bug.tags)
2229- owner = ownerFromStatus(task.status)
2230- date_created = bug.date_created.strftime("%Y/%m/%d")
2231- c.writerow(
2232- [
2233- bug.title,
2234- link,
2235- task.status,
2236- milestone,
2237- scope,
2238- rationale.encode("utf_8"),
2239- owner,
2240- date_created,
2241- ]
2242- )
2243diff --git a/lp-scripts/pearl-bug-csv-summary.py b/lp-scripts/pearl-bug-csv-summary.py
2244deleted file mode 100755
2245index ad4a049..0000000
2246--- a/lp-scripts/pearl-bug-csv-summary.py
2247+++ /dev/null
2248@@ -1,46 +0,0 @@
2249-#!/usr/bin/python3
2250-
2251-import argparse
2252-import csv
2253-from LaunchpadBugBucketer import LaunchpadBugBucketer
2254-
2255-
2256-class PearlBugBucketer(LaunchpadBugBucketer):
2257- def __init__(self):
2258- LaunchpadBugBucketer.__init__(
2259- self, "pearl", "ubuntu-16.04", requiredTags=["upstream-risky"]
2260- )
2261-
2262- def is_green(self, task):
2263- if task.status in ["Fix Committed", "Fix Released"]:
2264- return True
2265- return False
2266-
2267- def is_amber(self, task):
2268- if self.is_green(task):
2269- return False
2270- elif task.milestone and task.milestone.name == "ubuntu-16.04.3":
2271- return True
2272- return False
2273-
2274- def is_red(self, task):
2275- if self.is_green(task) or self.is_amber(task):
2276- return False
2277- return True
2278-
2279-
2280-if __name__ == "__main__":
2281- parser = argparse.ArgumentParser()
2282- parser.add_argument("-o", "--outfile")
2283- args = parser.parse_args()
2284-
2285- s = PearlBugBucketer()
2286-
2287- with open(args.outfile, "w") as csvfile:
2288- c = csv.writer(csvfile)
2289- c.writerow(["Title", "Bug ID", "Importance", "Status", "RAG Risk"])
2290- for bucket in s.keys():
2291- for task in s[bucket]:
2292- bug = s.taskToBug(task)
2293- link = '=HYPERLINK("%s", "LP: #%d")' % (task.web_link, bug.id)
2294- c.writerow([bug.title, link, task.importance, task.status, bucket])
2295diff --git a/lp-scripts/pearl-risky-patch-schedule.py b/lp-scripts/pearl-risky-patch-schedule.py
2296deleted file mode 100755
2297index 01d546e..0000000
2298--- a/lp-scripts/pearl-risky-patch-schedule.py
2299+++ /dev/null
2300@@ -1,38 +0,0 @@
2301-#!/usr/bin/python3
2302-
2303-import argparse
2304-import csv
2305-from launchpadlib.launchpad import Launchpad
2306-
2307-
2308-def taskToBug(task):
2309- bugid = int(task.self_link.split("/")[-1])
2310- return lp.bugs[bugid]
2311-
2312-
2313-if __name__ == "__main__":
2314- parser = argparse.ArgumentParser()
2315- parser.add_argument("-o", "--outfile", required=True)
2316- args = parser.parse_args()
2317-
2318- lp = Launchpad.login_with("lpbugs", "production", version="devel")
2319-
2320- pearl = lp.projects["pearl"]
2321-
2322- with open(args.outfile, "w") as csvfile:
2323- c = csv.writer(csvfile)
2324- for series in pearl.series:
2325- if series.name not in ["ubuntu-17.04"]:
2326- continue
2327- for task in series.searchTasks():
2328- bug = taskToBug(task)
2329- # All kernel patchset bugs in the pearl project are tagged
2330- # 'upstream-risky'
2331- if "upstream-risky" not in bug.tags:
2332- continue
2333- if task.milestone is None:
2334- date = "TBD"
2335- else:
2336- date = task.milestone.date_targeted
2337- link = '=HYPERLINK("%s", "LP: #%d")' % (task.web_link, bug.id)
2338- c.writerow([bug.title, link, date])
2339diff --git a/lp-scripts/pearl2-biweekly-report.csv.py b/lp-scripts/pearl2-biweekly-report.csv.py
2340deleted file mode 100755
2341index a5323cd..0000000
2342--- a/lp-scripts/pearl2-biweekly-report.csv.py
2343+++ /dev/null
2344@@ -1,120 +0,0 @@
2345-#!/usr/bin/python3
2346-
2347-# To Do
2348-# 1) Currently this script requires the ppa-uose series to be present. Enhance
2349-# to remove this restriction while ensuring there are no duplicate bug imports
2350-# 2) Add "scope" information back in once bugs are appropriated tagged
2351-# 3) Add "rationale" information back in once bugs are updated with 'Rationale' sections in description
2352-
2353-
2354-import argparse
2355-import csv
2356-from launchpadlib.launchpad import Launchpad
2357-
2358-rationaleSectionHeader = "[18.04.1 Risk Comments]"
2359-rationaleBoilerplate = "Still under investigation, not yet root-caused."
2360-
2361-
2362-def taskToBug(task):
2363- bugid = int(task.self_link.split("/")[-1])
2364- return lp.bugs[bugid]
2365-
2366-
2367-def ownerFromStatus(status):
2368- if "Incomplete" in status:
2369- owner = "Huawei"
2370- else:
2371- owner = "Canonical"
2372- return owner
2373-
2374-
2375-def rationaleFromDescription(des):
2376- export_rationale_flag = (
2377- 0 # Flag to detect the start of the "[18.04.1 Risk Comments]" section
2378- )
2379- rationale = ""
2380- for (
2381- des_line
2382- ) in (
2383- des.splitlines()
2384- ): # Loop through the bug description line-by-line looking for sections to export
2385- if des_line == rationaleSectionHeader:
2386- export_rationale_flag = 1
2387- else:
2388- if export_rationale_flag == 1:
2389- if des_line == "":
2390- export_rationale_flag = 0
2391- else:
2392- rationale += des_line
2393- return rationale
2394-
2395-
2396-def checkScopeTags(tags):
2397- scopeTags = ["scope-arch-all", "scope-arch-arm64", "scope-d06-only"]
2398- scope = set(scopeTags).intersection(tags)
2399- for i in scope:
2400- if i == "scope-arch-all":
2401- i = "Generic"
2402- elif i == "scope-arch-arm64":
2403- i = "ARM64 generic"
2404- elif i == "scope-d06-only":
2405- i = "D06 only"
2406- return i
2407-
2408- return "Unknown"
2409-
2410-
2411-def getMilestones(series_array):
2412- milestones = []
2413- for series in series_array:
2414- milestone_url = series["milestone_link"]
2415- if milestone_url is not None:
2416- milestone_name = milestone_url.rsplit("/", 1)
2417- milestones.append(milestone_name[-1])
2418- return milestones
2419-
2420-
2421-def getUoseMilestone(series_array):
2422- milestones = getMilestones(series_array)
2423- uose_milestone = ""
2424- for milestone in milestones:
2425- if "uose" in milestone:
2426- uose_milestone = milestone
2427- return uose_milestone
2428-
2429-
2430-if __name__ == "__main__":
2431- parser = argparse.ArgumentParser()
2432- parser.add_argument("-o", "--outfile", required=True)
2433- args = parser.parse_args()
2434-
2435- lp = Launchpad.login_with("lpbugs", "production", version="devel")
2436-
2437- pearl = lp.projects["pearl2"]
2438-
2439- with open(args.outfile, "w") as csvfile:
2440- c = csv.writer(csvfile)
2441- c.writerow(
2442- ["Title", "Link", "Status", "Milestone", "Owner", "DateCreated", "Comments"]
2443- )
2444- # First pass: search for the 'open' bugs
2445- for task in pearl.searchTasks():
2446- uose_milestone = getUoseMilestone(task.related_tasks.entries)
2447- bug = taskToBug(task)
2448- link = '=HYPERLINK("%s", "LP: #%d")' % (task.web_link, bug.id)
2449- owner = ownerFromStatus(task.status)
2450- date_created = bug.date_created.strftime("%Y/%m/%d")
2451- c.writerow(
2452- [bug.title, link, task.status, uose_milestone, owner, date_created]
2453- )
2454-
2455- # Second pass: search for the 'Fix Released' bugs
2456- for task in pearl.searchTasks(status="Fix Released"):
2457- bug = taskToBug(task)
2458- uose_milestone = getUoseMilestone(task.related_tasks.entries)
2459- link = '=HYPERLINK("%s", "LP: #%d")' % (task.web_link, bug.id)
2460- owner = ownerFromStatus(task.status)
2461- date_created = bug.date_created.strftime("%Y/%m/%d")
2462- c.writerow(
2463- [bug.title, link, task.status, uose_milestone, owner, date_created]
2464- )
2465diff --git a/lp-scripts/pearl2-d06-18.04-patch-status.py b/lp-scripts/pearl2-d06-18.04-patch-status.py
2466deleted file mode 100755
2467index 79316bc..0000000
2468--- a/lp-scripts/pearl2-d06-18.04-patch-status.py
2469+++ /dev/null
2470@@ -1,54 +0,0 @@
2471-#!/usr/bin/python3
2472-
2473-import argparse
2474-import csv
2475-from launchpadlib.launchpad import Launchpad
2476-
2477-
2478-def taskToBug(task):
2479- bugid = int(task.self_link.split("/")[-1])
2480- return lp.bugs[bugid]
2481-
2482-
2483-Pre18041Milestones = [
2484- "ubuntu-18.04-ga",
2485- "ubuntu-18.04-sru-1",
2486- "ubuntu-18.04-sru-2",
2487- "ubuntu-18.04-sru-3",
2488- "ubuntu-18.04-sru-4",
2489- "ubuntu-18.04.1",
2490-]
2491-
2492-if __name__ == "__main__":
2493- parser = argparse.ArgumentParser()
2494- parser.add_argument("-o", "--outfile", required=True)
2495- args = parser.parse_args()
2496-
2497- lp = Launchpad.login_with("lpbugs", "production", version="devel")
2498-
2499- pearl = lp.projects["pearl2"]
2500- series = pearl.getSeries(name="ubuntu-18.04")
2501-
2502- with open(args.outfile, "w") as csvfile:
2503- c = csv.writer(csvfile)
2504- c.writerow(["Title", "Link", "Status", "Expected"])
2505- for task in series.searchTasks(status=[]):
2506- bug = taskToBug(task)
2507- link = '=HYPERLINK("%s", "LP: #%d")' % (task.web_link, bug.id)
2508- # At this point we should know which milestone (SRU cycle)
2509- # patches will land in, or it should be tagged as risky.
2510- if task.status in ["Invalid", "Won't Fix"]:
2511- continue
2512- if task.status == "Incomplete":
2513- if "needs-sru-justification" in bug.tags:
2514- expected = "Needs SRU Justification"
2515- else:
2516- expected = "Need input from HiSilicon"
2517- elif task.milestone:
2518- if task.milestone.name in Pre18041Milestones:
2519- expected = "Before 18.04.1 GA"
2520- else:
2521- expected = "After 18.04.1 GA"
2522- else:
2523- expected = "Needs Canonical Review"
2524- c.writerow([bug.title, link, task.status, expected])
2525diff --git a/lp-scripts/pearl2-tag-ubuntu-18.04.3-risky.py b/lp-scripts/pearl2-tag-ubuntu-18.04.3-risky.py
2526deleted file mode 100755
2527index 01dc923..0000000
2528--- a/lp-scripts/pearl2-tag-ubuntu-18.04.3-risky.py
2529+++ /dev/null
2530@@ -1,100 +0,0 @@
2531-#!/usr/bin/python3
2532-
2533-#
2534-# Make sure all bugs that should have the ubuntu-18.04.3-ga-risky tag
2535-# do, and those that shouldn't, don't.
2536-#
2537-import datetime
2538-from enum import Enum
2539-import sys
2540-from launchpadlib.launchpad import Launchpad
2541-
2542-
2543-class Risky(Enum):
2544- HIGH = 1
2545- LOW = 2
2546- WONT_FIX = 3
2547- TBD = 4
2548-
2549-
2550-RiskyTagMap = {
2551- Risky.HIGH: "ubuntu-18.04.3-ga-risk-high",
2552- Risky.LOW: "ubuntu-18.04.3-ga-risk-low",
2553- Risky.WONT_FIX: "ubuntu-18.04.3-ga-wont-fix",
2554- Risky.TBD: "ubuntu-18.04.3-ga-risk-tbd",
2555-}
2556-
2557-
2558-def get_risk(bug):
2559- found = None
2560- for r in RiskyTagMap.keys():
2561- if RiskyTagMap[r] in bug.tags:
2562- # Only one tag should be found
2563- assert found is None
2564- found = r
2565- return found
2566-
2567-
2568-def set_risk(bug, risk):
2569- # First remove all the risky tags that do not apply
2570- for r in RiskyTagMap.keys():
2571- if r is risk:
2572- continue
2573- rmtag = RiskyTagMap[r]
2574- if rmtag in bug.tags:
2575- sys.stderr.write("Removing %s tag from %s\n" % (rmtag, bug.web_link))
2576- newtags = list(bug.tags)
2577- newtags.remove(rmtag)
2578- bug.tags = newtags
2579- # Now make sure the correct risky tag is applied
2580- addtag = RiskyTagMap[risk]
2581- if addtag not in bug.tags:
2582- sys.stderr.write("Adding %s tag to %s\n" % (addtag, bug.web_link))
2583- bug.tags = bug.tags + [addtag]
2584- bug.lp_save()
2585-
2586-
2587-if __name__ == "__main__":
2588- lp = Launchpad.login_with("lpbugs", "production", version="devel")
2589-
2590- pearl2 = lp.projects["pearl2"]
2591- target_milestone = "ubuntu-18.04.3"
2592- target_date = None
2593- # Find the target date of our milestone. We can compare this with
2594- # the target date of other milestones to figure out if they come
2595- # before or after it
2596- for m in pearl2.active_milestones:
2597- if m.name in [target_milestone]:
2598- target_date = m.date_targeted
2599- break
2600- assert target_date is not None
2601-
2602- series = pearl2.getSeries(name="ubuntu-18.04-hwe")
2603- for task in series.searchTasks():
2604- if task.milestone is not None:
2605- tdelta = task.milestone.date_targeted - target_date
2606- if tdelta > datetime.timedelta(0):
2607- # The target milestone is after ours - we've already
2608- # decided it won't happen in time.
2609- set_risk(task.bug, Risky.WONT_FIX)
2610- else:
2611- # If it is our milestone or before, we're confident it'll
2612- # land in time.
2613- set_risk(task.bug, Risky.LOW)
2614- continue
2615- # Now process the ones w/ no milestone set.
2616- # These by definition should all have milestones set.
2617- assert task.status not in [
2618- "Opinion",
2619- "Invalid",
2620- "Won't Fix",
2621- "Fix Committed",
2622- "Fix Released",
2623- ]
2624- if task.status == "Incomplete":
2625- set_risk(task.bug, Risky.HIGH)
2626- continue
2627- # The rest need manually-set tags. Set them to TBD by default
2628- if get_risk(task.bug):
2629- continue
2630- set_risk(task.bug, Risky.TBD)
2631diff --git a/lp-scripts/project-bug-lint.py b/lp-scripts/project-bug-lint.py
2632deleted file mode 100755
2633index b341bf2..0000000
2634--- a/lp-scripts/project-bug-lint.py
2635+++ /dev/null
2636@@ -1,182 +0,0 @@
2637-#!/usr/bin/python3
2638-
2639-from launchpadlib.launchpad import Launchpad
2640-import argparse
2641-import sys
2642-
2643-
2644-# A base class for bug properties, mainly to make them sortable
2645-class BugProperty:
2646- def __init__(self, property):
2647- self.index = self.PropertyList.index(property)
2648-
2649- def __eq__(self, other):
2650- return self.index == other.index
2651-
2652- def __ne__(self, other):
2653- return self.index != other.index
2654-
2655- def __lt__(self, other):
2656- return self.index < other.index
2657-
2658- def __le__(self, other):
2659- return self.index <= other.index
2660-
2661- def __gt__(self, other):
2662- return self.index > other.index
2663-
2664- def __ge__(self, other):
2665- return self.index >= other.index
2666-
2667- def __repr__(self):
2668- return self.PropertyList[self.index]
2669-
2670-
2671-class BugStatus(BugProperty):
2672- PropertyList = [
2673- "New",
2674- "Incomplete",
2675- "Opinion",
2676- "Invalid",
2677- "Won't Fix",
2678- "Confirmed",
2679- "Triaged",
2680- "In Progress",
2681- "Fix Committed",
2682- "Fix Released",
2683- ]
2684-
2685- # min/max could ideally be in the baseclass, but I don't know
2686- # how to do that and stil refer to the correct PropertyList
2687- def min():
2688- return BugStatus(BugStatus.PropertyList[0])
2689-
2690- def max():
2691- return BugStatus(BugStatus.PropertyList[-1])
2692-
2693- def isTerminalState(self):
2694- terminalStates = ["Fix Released", "Invalid", "Won't Fix"]
2695- return self.PropertyList[self.index] in terminalStates
2696-
2697-
2698-class BugImportance(BugProperty):
2699- PropertyList = ["Undecided", "Wishlist", "Low", "Medium", "High", "Critical"]
2700-
2701- # min/max could ideally be in the baseclass, but I don't know
2702- # how to do that and stil refer to the correct PropertyList
2703- def min():
2704- return BugImportance(BugImportance.PropertyList[0])
2705-
2706- def max():
2707- return BugImportance(BugImportance.PropertyList[-1])
2708-
2709-
2710-class BugTask:
2711- def __init__(self, lptask):
2712- self.status = BugStatus(lptask.status)
2713- self.importance = BugImportance(lptask.importance)
2714-
2715-
2716-class Bug:
2717- def __init__(self):
2718- self.defaultTask = None
2719- self.targetedTasks = {}
2720-
2721- def addDefaultTask(self, task):
2722- self.defaultTask = BugTask(task)
2723-
2724- def getDefaultTask(self):
2725- return self.defaultTask
2726-
2727- def addTargetedTask(self, series, task):
2728- self.targetedTasks[series] = BugTask(task)
2729-
2730- def getTargetedTasks(self):
2731- return self.targetedTasks
2732-
2733-
2734-def checkScopeTags(lpbug):
2735- scopeTags = ["scope-arch-all", "scope-arch-arm64", "scope-d05-only"]
2736- common = set(scopeTags).intersection(lpbug.tags)
2737- if len(common) == 0:
2738- sys.stdout.write("http://launchpad.net/bugs/%d has no scope tag\n" % (bugid))
2739- if len(common) > 1:
2740- sys.stdout.write(
2741- "http://launchpad.net/bugs/%d has multiple scope tags\n" % (bugid)
2742- )
2743-
2744-
2745-def taskToBugId(task):
2746- return int(task.self_link.split("/")[-1])
2747-
2748-
2749-if __name__ == "__main__":
2750- parser = argparse.ArgumentParser()
2751- parser.add_argument("-p", "--project", required=True)
2752- args = parser.parse_args()
2753- lp = Launchpad.login_with("lpbugs", "production", version="devel")
2754- project = lp.projects[args.project]
2755-
2756- bugs = {}
2757- # Create a list of bugs for all the "default" tasks
2758- for task in project.searchTasks(status=[]):
2759- bugid = taskToBugId(task)
2760- bugs[bugid] = Bug()
2761- bugs[bugid].addDefaultTask(task)
2762-
2763- # Next, add all the series-targeted tasks
2764- for series in project.series:
2765- for task in series.searchTasks(status=[]):
2766- bugid = taskToBugId(task)
2767- if bugid in bugs.keys():
2768- bugs[bugid].addTargetedTask(series.name, task)
2769- else:
2770- sys.stderr.write(
2771- "WARNING: http://launchpad.net/bugs/%d does not have a default task\n"
2772- % (bugid)
2773- )
2774-
2775- # Now process them.
2776- for bugid in bugs.keys():
2777- bug = bugs[bugid]
2778- targetedTasks = bug.getTargetedTasks()
2779- if len(targetedTasks) == 0:
2780- defaultStatus = bug.getDefaultTask().status
2781- if not defaultStatus.isTerminalState():
2782- sys.stderr.write(
2783- "WARNING: http://launchpad.net/bugs/%d only has a default task\n"
2784- % (bugid)
2785- )
2786- continue
2787-
2788- statusList = []
2789- for k in targetedTasks.keys():
2790- s = targetedTasks[k].status
2791- if not s.isTerminalState():
2792- statusList.append(s)
2793- statusList.sort()
2794- targetStatus = None
2795- if len(statusList) > 0:
2796- targetStatus = statusList[0]
2797-
2798- importanceList = [targetedTasks[k].importance for k in targetedTasks.keys()]
2799- importanceList.sort()
2800- targetImportance = importanceList[-1]
2801-
2802- defaultTask = bug.getDefaultTask()
2803- currentStatus = defaultTask.status
2804- currentImportance = defaultTask.importance
2805- if targetStatus and targetStatus != currentStatus:
2806- sys.stdout.write(
2807- "http://launchpad.net/bugs/%d status %s -> %s\n"
2808- % (bugid, currentStatus, targetStatus)
2809- )
2810- if targetImportance != currentImportance:
2811- sys.stdout.write(
2812- "http://launchpad.net/bugs/%d importance %s -> %s\n"
2813- % (bugid, currentImportance, targetImportance)
2814- )
2815-
2816- if args.project == "pearl":
2817- if not defaultTask.status.isTerminalState():
2818- checkScopeTags(lp.bugs[bugid])
2819diff --git a/maasimg-mod.sh b/maasimg-mod.sh
2820deleted file mode 100755
2821index 001f9f4..0000000
2822--- a/maasimg-mod.sh
2823+++ /dev/null
2824@@ -1,94 +0,0 @@
2825-#!/bin/sh
2826-
2827-set -e
2828-set -x
2829-
2830-proposed=0
2831-while [ $# -gt 0 ]; do
2832- arg="$1"
2833- case $arg in
2834- -k|--kerneldeb)
2835- kerndeb="$2"
2836- shift
2837- ;;
2838- -i|--imagedir)
2839- imgdir="$2"
2840- shift
2841- ;;
2842- --ppa)
2843- ppa="$2"
2844- shift
2845- ;;
2846- --proposed)
2847- proposed=1
2848- ;;
2849- *)
2850- echo "Error: Unknown argument: $arg" 1>&2
2851- exit 1
2852- ;;
2853- esac
2854- shift
2855-done
2856-
2857-if [ -z "$imgdir" ]; then
2858- echo "Error: No image directory (-i) specified." 1>&2
2859- exit 1
2860-fi
2861-
2862-tmpdir="$(mktemp -d)"
2863-cleanup() {
2864- sudo rm -rf "$tmpdir"
2865-}
2866-trap cleanup EXIT INT TERM
2867-
2868-if [ -n "$kerndeb" ] && [ -n "$imgdir" ]; then
2869- kernel="$imgdir/boot-kernel"
2870- initrd="$imgdir/boot-initrd"
2871- kernunpack="$tmpdir/kernel"
2872- mkdir -p "$kernunpack"
2873- initrddir="$tmpdir/initrd"
2874- mkdir -p "$initrddir"
2875-
2876- # Just overwrite the kernel
2877- dpkg-deb -x "$kerndeb" "$kernunpack"
2878- newkver="$(ls $kernunpack/lib/modules)"
2879- cat $kernunpack/boot/vmlinuz-$newkver | sudo tee $kernel > /dev/null
2880-
2881- gunzip < $initrd | (cd $initrddir && sudo cpio -i -d)
2882- oldkver="$(ls $initrddir/lib/modules)"
2883-
2884- # Replace the modules in the initrd w/ the same subset of modules
2885- # from the new kernel.
2886- (cd $initrddir/lib/modules/$oldkver && sudo find) > $tmpdir/modlist
2887- sudo rm -rf $initrddir/lib/modules/$oldkver
2888- sudo mkdir -p $initrddir/lib/modules/$newkver
2889- (cd $kernunpack/lib/modules/$newkver && cpio -H newc -o < $tmpdir/modlist) | \
2890- (cd $initrddir/lib/modules/$newkver && sudo cpio -i -d)
2891- sudo depmod -b $initrddir $newkver
2892- (cd $initrddir && sudo find | sudo cpio -H newc -o) | gzip | sudo tee $initrd > /dev/null
2893-fi
2894-
2895-if [ -n "$ppa" ] || [ $proposed = 1 ]; then
2896- rootdir="$tmpdir/root"
2897- mkdir -p "$rootdir"
2898-
2899- roottgz="$imgdir/root-tgz"
2900- (cd $rootdir && sudo tar --xattrs -x -z -f $roottgz)
2901-
2902- if [ -n "$ppa" ]; then
2903- sudo chroot "$rootdir" apt install software-properties-common -y
2904- sudo chroot "$rootdir" apt-add-repository "$ppa" -y
2905- fi
2906-
2907- if [ $proposed = 1 ]; then
2908- echo "deb http://us.ports.ubuntu.com/ubuntu-ports/ xenial-proposed main restricted universe multiverse" | sudo tee "$rootdir/etc/apt/sources.list.d/proposed.list" > /dev/null
2909- fi
2910- (cd $rootdir && sudo tar -c --xattrs * | gzip -9) | sudo tee "$roottgz" > /dev/null
2911-fi
2912-
2913-# In case they have old fds open...
2914-sudo service maas-rackd restart
2915-sudo service maas-regiond restart
2916-sudo service tgt restart
2917-
2918-exit 0
2919diff --git a/setup.cfg b/setup.cfg
2920deleted file mode 100644
2921index 9d50d6c..0000000
2922--- a/setup.cfg
2923+++ /dev/null
2924@@ -1,24 +0,0 @@
2925-[metadata]
2926-name = canonical-server-hwe-utils
2927-version = 1.0.0
2928-url = https://launchpad.net/canonical-server-hwe-utils
2929-license = AGPLv3
2930-maintainer = CE Hyperscale Team
2931-maintainer_email = canonical-hyperscale-team@lists.canonical.com
2932-description = Assorted utilities used by Canonical's Server Hardware Enablement team
2933-long_description = file: README.md
2934-long_description_content_type = text/markdown
2935-classifiers =
2936- Programming Language :: Python
2937- Programming Language :: Python :: 3
2938-
2939-[options]
2940-python_requires = >=3.8
2941-packages = find:
2942-zip_safe = true
2943-scripts = labkey, jenkins-scripts/weekly-rota-init.py, jenkins-scripts/sbsh
2944-install_requires =
2945- requests >= 2.27.1
2946- jira >= 3.1.1
2947- python-jenkins >= 1.7.0
2948- pyyaml >= 5.4
2949diff --git a/setup.py b/setup.py
2950deleted file mode 100755
2951index 229b2eb..0000000
2952--- a/setup.py
2953+++ /dev/null
2954@@ -1,5 +0,0 @@
2955-#!/usr/bin/env python3
2956-
2957-from setuptools import setup
2958-
2959-setup()
2960diff --git a/sysadmin-tools/gen_conserver_cf.py b/sysadmin-tools/gen_conserver_cf.py
2961deleted file mode 100755
2962index 491f5b3..0000000
2963--- a/sysadmin-tools/gen_conserver_cf.py
2964+++ /dev/null
2965@@ -1,80 +0,0 @@
2966-#!/usr/bin/env python3
2967-#
2968-# Generate conserver.cf from machines.yaml
2969-#
2970-
2971-import argparse
2972-import sys
2973-import yaml
2974-
2975-# Header information scraped from r815 conserver.cf
2976-header = """
2977-# DO NOT EDIT DIRECTLY, YOUR CHANGES WILL BE OVERWRITTEN
2978-# Auto-generated by {progname}
2979-#
2980-# The character '&' in logfile names are substituted with the console
2981-# name.
2982-#
2983-config * {{
2984- initdelay 10;
2985-}}
2986-
2987-default full {{
2988- rw *;
2989-}}
2990-
2991-default * {{
2992- logfile /var/log/conserver/&.log;
2993- timestamp "";
2994- include full;
2995-}}"""
2996-
2997-# Footer information scraped from r815 conserver.cf
2998-footer = """
2999-#
3000-# list of clients allowed
3001-#
3002-access * {{
3003- trusted 127.0.0.1,10.228.0.2,10.172.64.0/18,10.172.192.0/18,10.246.72.53,10.246.72.142;
3004- allowed 127.0.0.1,10.228.0.2,10.172.64.0/18,10.172.192.0/18,10.246.72.53,10.246.72.142;
3005-}}
3006-"""
3007-
3008-# Console entry for ipmi sol connection
3009-ipmi_sol_entry = """
3010-console {srv} {{
3011- master localhost;
3012- type exec;
3013- exec (ipmitool -I lanplus -H {addr} -U {user} -P {pwd} sol deactivate || : && ipmitool -I lanplus -H {addr} -U {user} -P {pwd} sol activate);
3014-}}""" # noqa: E501
3015-
3016-if __name__ == "__main__":
3017- arg_parser = argparse.ArgumentParser()
3018- arg_parser.add_argument("machines_yaml", help="Lab's machines.yaml file")
3019- arg_parser.add_argument("-o", "--outfile", help="Output filename")
3020- args = arg_parser.parse_args()
3021-
3022- with open(args.machines_yaml) as f:
3023- machines = yaml.safe_load(f)
3024-
3025- if args.outfile:
3026- outfile = open(args.outfile, "w")
3027- else:
3028- outfile = sys.stdout
3029- outfile.write(header.format(progname=arg_parser.prog))
3030- for server in machines:
3031- # bmc is required for conserver to attach
3032- if "bmc" not in machines[server]:
3033- continue
3034- bmc_info = machines[server]["bmc"]
3035- # type is required so the conserver knows how to attach
3036- if "type" not in bmc_info:
3037- continue
3038- if bmc_info["type"] == "ipmi":
3039- addr = bmc_info["address"].replace("#", "\\#")
3040- user = bmc_info["user"].replace("#", "\\#")
3041- pwd = bmc_info["password"].replace("#", "\\#")
3042- outfile.write(
3043- ipmi_sol_entry.format(srv=server, addr=addr, user=user, pwd=pwd)
3044- )
3045- outfile.write(footer.format())
3046diff --git a/sysadmin-tools/update_conserver.py b/sysadmin-tools/update_conserver.py
3047deleted file mode 100755
3048index eee1fbd..0000000
3049--- a/sysadmin-tools/update_conserver.py
3050+++ /dev/null
3051@@ -1,114 +0,0 @@
3052-#!/usr/bin/env python3
3053-
3054-import argparse
3055-from datetime import date
3056-import logging
3057-import os.path
3058-import subprocess
3059-import tempfile
3060-
3061-if __name__ == "__main__":
3062- logging.basicConfig(level=logging.INFO)
3063- parser = argparse.ArgumentParser()
3064- parser.add_argument("-l", "--lab", required=True)
3065- args = parser.parse_args()
3066-
3067- gen_conserver_cf = os.path.join(".", "gen_conserver_cf.py")
3068- if not os.path.exists(gen_conserver_cf):
3069- raise Exception(f"Did not find {gen_conserver_cf} in the current directory.")
3070-
3071- if args.lab == "1ss":
3072- server = "10.229.0.101"
3073- elif args.lab == "tremont":
3074- server = "10.228.0.2"
3075- else:
3076- raise Exception("Unknown lab {args.lab}")
3077-
3078- with tempfile.TemporaryDirectory() as tmpdir:
3079- subprocess.run(
3080- [
3081- "git",
3082- "clone",
3083- "--depth",
3084- "1",
3085- "git+ssh://git.launchpad.net/scalebot",
3086- os.path.join(tmpdir, "scalebot"),
3087- ],
3088- check=True,
3089- )
3090- labdir = os.path.join(tmpdir, "scalebot", "labs", args.lab)
3091- if not os.path.isdir(labdir):
3092- raise Exception("Lab {args.lab} directory not found")
3093- subprocess.run(
3094- [
3095- gen_conserver_cf,
3096- "-o",
3097- os.path.join(tmpdir, "conserver.cf"),
3098- os.path.join(labdir, "machines.yaml"),
3099- ],
3100- check=True,
3101- )
3102- logging.info(f"sudo password prompts that follow are from {server}")
3103- backup = f"/etc/conserver/conserver.cf.{date.today().isoformat()}"
3104- subprocess.run(
3105- ["ssh", "-t", server, "sudo", "cp", "/etc/conserver/conserver.cf", backup],
3106- check=True,
3107- )
3108- logging.info(f"Previous file backed up to {backup}")
3109- remote_mktemp = subprocess.run(
3110- ["ssh", server, "mktemp"],
3111- check=True,
3112- capture_output=True,
3113- )
3114- remote_tmp = remote_mktemp.stdout.decode("UTF-8").splitlines()[0]
3115- logging.info(f"Tempfile is {remote_tmp}")
3116- subprocess.run(
3117- ["scp", os.path.join(tmpdir, "conserver.cf"), f"{server}:{remote_tmp}"],
3118- check=True,
3119- )
3120- subprocess.run(
3121- [
3122- "ssh",
3123- "-t",
3124- server,
3125- "sudo",
3126- "chown",
3127- "--reference=/etc/conserver/conserver.cf",
3128- remote_tmp,
3129- ],
3130- check=True,
3131- )
3132- subprocess.run(
3133- [
3134- "ssh",
3135- "-t",
3136- server,
3137- "sudo",
3138- "chmod",
3139- "--reference=/etc/conserver/conserver.cf",
3140- remote_tmp,
3141- ],
3142- check=True,
3143- )
3144- subprocess.run(
3145- [
3146- "ssh",
3147- "-t",
3148- server,
3149- "sudo",
3150- "cp",
3151- remote_tmp,
3152- "/etc/conserver/conserver.cf",
3153- ],
3154- check=True,
3155- )
3156- logging.info(f"Stopping conserver-server on {server}...")
3157- subprocess.run(
3158- ["ssh", "-t", server, "sudo", "service", "conserver-server", "stop"],
3159- check=True,
3160- )
3161- logging.info(f"Starting conserver-server on {server}...")
3162- subprocess.run(
3163- ["ssh", "-t", server, "sudo", "service", "conserver-server", "start"],
3164- check=True,
3165- )

Subscribers

People subscribed via source and target branches

to all changes: