Merge ~hyask/autopkgtest-cloud:skia/ease_browse_dev into autopkgtest-cloud:master

Proposed by Skia
Status: Merged
Merged at revision: 0bcf32f066b6d0a64186ccc2da94ba32ba539756
Proposed branch: ~hyask/autopkgtest-cloud:skia/ease_browse_dev
Merge into: autopkgtest-cloud:master
Diff against target: 205 lines (+187/-0)
3 files modified
charms/focal/autopkgtest-web/webcontrol/README.md (+12/-0)
charms/focal/autopkgtest-web/webcontrol/browse-test.py (+27/-0)
charms/focal/autopkgtest-web/webcontrol/helpers/tests.py (+148/-0)
Reviewer Review Type Date Requested Status
Tim Andersson Approve
Review via email: mp+461027@code.launchpad.net

Description of the change

Allow easier local development.

To post a comment you must log in.
Revision history for this message
Tim Andersson (andersson123) wrote :

Skia to remove header/hyperlink (h2) from running macro, move into browse-running and browse-admin.

Revision history for this message
Tim Andersson (andersson123) wrote :

Skia to
- both commit messages more verbose

Revision history for this message
Tim Andersson (andersson123) wrote :

LGTM once CI passes :)

Revision history for this message
Tim Andersson (andersson123) :
review: Approve
Revision history for this message
Tim Andersson (andersson123) wrote :

Now that CI works, we actually have to wait until it passes to merge things :/ footgun

Revision history for this message
Brian Murray (brian-murray) wrote :

Should the notes about "Developing browse.cgi locally" also end up in /docs and on readthedocs?

Revision history for this message
Tim Andersson (andersson123) wrote :

I don't think so. Our docs on readthedocs dont really cover development at all - it's mostly sysadmin things and architectural information. I personally think this README.md is good, but I really don't mind if it goes into readthedocs also

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1diff --git a/charms/focal/autopkgtest-web/webcontrol/README.md b/charms/focal/autopkgtest-web/webcontrol/README.md
2new file mode 100644
3index 0000000..e489c2a
4--- /dev/null
5+++ b/charms/focal/autopkgtest-web/webcontrol/README.md
6@@ -0,0 +1,12 @@
7+# autopkgtest-cloud web frontend
8+
9+## Developing browse.cgi locally
10+
11+Install the dependencies:
12+`sudo apt install python3-flask python3-distro-info libjs-jquery libjs-bootstrap`
13+
14+Then simply run `./browse-test-py`, it will launch the flask application locally
15+with some mocked data.
16+As the import of `browse.cgi` is done trough `importlib`, changes in that file
17+will not be reloaded automatically, so you'll still need to restart the app
18+manually.
19diff --git a/charms/focal/autopkgtest-web/webcontrol/browse-test.py b/charms/focal/autopkgtest-web/webcontrol/browse-test.py
20new file mode 100755
21index 0000000..a5c5b4e
22--- /dev/null
23+++ b/charms/focal/autopkgtest-web/webcontrol/browse-test.py
24@@ -0,0 +1,27 @@
25+#!/usr/bin/env python3
26+"""Run browse app in local debug mode for testing."""
27+
28+import importlib
29+from pathlib import Path
30+
31+from helpers import tests, utils
32+
33+# import browse.cgi
34+browse_path = str(Path(__file__).parent / "browse.cgi")
35+loader = importlib.machinery.SourceFileLoader("browse", browse_path)
36+spec = importlib.util.spec_from_loader("browse", loader)
37+browse = importlib.util.module_from_spec(spec)
38+loader.exec_module(browse)
39+
40+
41+if __name__ == "__main__":
42+ browse.db_con = utils.init_db(":memory:", check_same_thread=False)
43+ with browse.db_con:
44+ tests.populate_dummy_db(browse.db_con)
45+ browse.swift_container_url = "swift-%s"
46+ browse.AMQP_QUEUE_CACHE = Path("/dev/shm/queue.json")
47+ tests.populate_dummy_amqp_cache(browse.AMQP_QUEUE_CACHE)
48+ browse.RUNNING_CACHE = Path("/dev/shm/running.json")
49+ tests.populate_dummy_running_cache(browse.RUNNING_CACHE)
50+
51+ browse.app.run(host="0.0.0.0", debug=True)
52diff --git a/charms/focal/autopkgtest-web/webcontrol/helpers/tests.py b/charms/focal/autopkgtest-web/webcontrol/helpers/tests.py
53new file mode 100644
54index 0000000..52017c2
55--- /dev/null
56+++ b/charms/focal/autopkgtest-web/webcontrol/helpers/tests.py
57@@ -0,0 +1,148 @@
58+import json
59+from datetime import datetime
60+from uuid import uuid4
61+
62+from .utils import get_supported_releases
63+
64+
65+def populate_dummy_db(db_con):
66+ supported_releases = get_supported_releases()
67+
68+ c = db_con.cursor()
69+ tests = [
70+ (1, supported_releases[0], "amd64", "hello"),
71+ (2, supported_releases[1], "amd64", "hello"),
72+ (3, supported_releases[0], "ppc64el", "hello"),
73+ (4, supported_releases[1], "ppc64el", "hello"),
74+ (5, supported_releases[2], "amd64", "hello"),
75+ ]
76+ c.executemany("INSERT INTO test values(?, ?, ?, ?)", tests)
77+ results = [
78+ # fmt: off
79+ # test_id | run_id | version | trigger | duration | exit_code | requester | env | uuid
80+ (1, datetime.now(), "1.2.3", "hello/1.2.3", 42, 0, "hyask", "", str(uuid4())),
81+ (1, datetime.now(), "1.2.3", "hello/1.2.3", 42, 0, "hyask", "all-proposed=1", str(uuid4())),
82+ (2, datetime.now(), "1.2.3", "hello/1.2.3", 42, 0, "", "", str(uuid4())),
83+ (3, datetime.now(), "1.2.3", "hello/1.2.3", 42, 20, "", "", str(uuid4())),
84+ # fmt: on
85+ ]
86+ c.executemany(
87+ "INSERT INTO result values(?, ?, ?, ?, ?, ?, ?, ?, ?)", results
88+ )
89+ db_con.commit()
90+
91+
92+def populate_dummy_amqp_cache(path):
93+ supported_releases = get_supported_releases()
94+ with open(path, "w") as f:
95+ # pylint: disable=line-too-long
96+ json.dump(
97+ {
98+ "arches": ["amd64", "ppc64el"],
99+ "queues": {
100+ "ubuntu": {
101+ supported_releases[0]: {
102+ "amd64": {
103+ "size": 1,
104+ "requests": [
105+ 'hello\n{"triggers": ["hello/1.2.3ubuntu1"], "submit-time": "2024-02-22 01:55:03"}',
106+ ],
107+ }
108+ }
109+ },
110+ "huge": {
111+ supported_releases[1]: {
112+ "amd64": {
113+ "size": 1,
114+ "requests": [
115+ 'hello\n{"triggers": ["migration-reference/0"], "submit-time": "2024-02-22 01:55:03"}',
116+ ],
117+ }
118+ }
119+ },
120+ "ppa": {
121+ supported_releases[2]: {
122+ "amd64": {
123+ "size": 2,
124+ "requests": [
125+ 'hello\n{"triggers": ["hello/1.2.4~ppa1"], "submit-time": "2024-02-22 01:55:03"}',
126+ 'hello2\n{"triggers": ["hello2/2.0.0~ppa1"], "submit-time": "2024-02-22 01:55:03"}',
127+ ],
128+ }
129+ }
130+ },
131+ "upstream": {
132+ supported_releases[3]: {
133+ "amd64": {
134+ "size": 1,
135+ "requests": [
136+ 'hello\n{"triggers": ["hello/1.2.4~ppa1"], "submit-time": "2024-02-22 01:55:03"}',
137+ ],
138+ }
139+ }
140+ },
141+ },
142+ },
143+ f,
144+ )
145+
146+
147+def populate_dummy_running_cache(path):
148+ supported_releases = get_supported_releases()
149+ with open(path, "w") as f:
150+ json.dump(
151+ {
152+ "hello": {
153+ "hash1": {
154+ supported_releases[0]: {
155+ "amd64": [
156+ {
157+ "requester": "hyask",
158+ "submit-time": "2024-02-21 11:00:51",
159+ "triggers": [
160+ "hello/1.2.3",
161+ ],
162+ "uuid": "84669a9c-ac08-46a3-a5fd-6247d0d2021c",
163+ },
164+ 3504,
165+ """
166+3071s hello/test_XYZ.hello . [ 54%]
167+3153s hello/test_XYZ.hello ...... [ 64%]
168+3271s hello/test_XYZ.hello .......... [ 74%]
169+3292s hello/test_XYZ.hello .................. [ 84%]
170+3493s hello/test_XYZ.hello ............................ [ 94%]
171+3494s hello/test_XYZ.hello .................................... [ 98%]
172+""",
173+ ]
174+ }
175+ }
176+ },
177+ "hello2": {
178+ "hash1": {
179+ supported_releases[4]: {
180+ "amd64": [
181+ {
182+ "all-proposed": "1",
183+ "requester": "hyask",
184+ "submit-time": "2024-02-21 11:01:21",
185+ "triggers": [
186+ "hello2/1.2.3-0ubuntu1",
187+ ],
188+ "uuid": "42369a9c-ac08-46a3-a5fd-6247d0d2021c",
189+ },
190+ 3504,
191+ """
192+3071s hello2/test_XYZ.hello [ 54%]
193+3153s hello2/test_XYZ.hello [ 64%]
194+3271s hello2/test_XYZ.hello [ 74%]
195+3292s hello2/test_XYZ.hello [ 84%]
196+3493s hello2/test_XYZ.hello [ 94%]
197+3494s hello2/test_XYZ.hello [ 98%]
198+""",
199+ ]
200+ }
201+ }
202+ },
203+ },
204+ f,
205+ )

Subscribers

People subscribed via source and target branches