Merge lp:~caio1982/ubuntu-system-image/sis-logging into lp:ubuntu-system-image/server

Proposed by Caio Begotti
Status: Merged
Merged at revision: 262
Proposed branch: lp:~caio1982/ubuntu-system-image/sis-logging
Merge into: lp:ubuntu-system-image/server
Diff against target: 574 lines (+101/-17)
4 files modified
lib/systemimage/generators.py (+71/-9)
lib/systemimage/gpg.py (+6/-1)
lib/systemimage/tools.py (+17/-6)
lib/systemimage/tree.py (+7/-1)
To merge this branch: bzr merge lp:~caio1982/ubuntu-system-image/sis-logging
Reviewer Review Type Date Requested Status
Stéphane Graber (community) Needs Fixing
Review via email: mp+252578@code.launchpad.net

Description of the change

Without real logging it's nearly impossible to debug image server on Prodstack, and that's something we highly need in PES otherwise the IS team won't let us simply ssh into the machines to investigate what's going on.

To post a comment you must log in.
Revision history for this message
Stéphane Graber (stgraber) wrote :

Seems reasonable though it doesn't match the general coding style.

All strings must be between double-quotes and single quotes should only be used for dict keys.

review: Needs Fixing
262. By Caio Begotti

fix all single quotes logging messages as single quotes are for dict keys only

Revision history for this message
Caio Begotti (caio1982) wrote :

Oops, you're right. Fixed.

263. By Caio Begotti

fix some pep8 warning along the way

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'lib/systemimage/generators.py'
2--- lib/systemimage/generators.py 2015-03-11 20:55:00 +0000
3+++ lib/systemimage/generators.py 2015-03-11 21:18:58 +0000
4@@ -17,6 +17,8 @@
5
6 from hashlib import sha256
7 from systemimage import diff, gpg, tree, tools
8+
9+import logging
10 import json
11 import os
12 import socket
13@@ -34,11 +36,15 @@
14 # Global
15 CACHE = {}
16
17+logger = logging.getLogger(__name__)
18+
19
20 def list_versions(cdimage_path):
21- return sorted([version for version in os.listdir(cdimage_path)
22- if version not in ("pending", "current")],
23- reverse=True)
24+ versions = sorted([version for version in os.listdir(cdimage_path)
25+ if version not in ("pending", "current")],
26+ reverse=True)
27+ logger.debug("Versions detected: %s" % versions)
28+ return versions
29
30
31 def root_ownership(tarinfo):
32@@ -89,6 +95,7 @@
33 path = os.path.realpath(os.path.join(conf.publish_path, "pool",
34 "%s.delta-%s.tar.xz" %
35 (target_filename, source_filename)))
36+ logger.debug("Path generated: %s" % path)
37
38 # Return pre-existing entries
39 if os.path.exists(path):
40@@ -174,6 +181,7 @@
41
42 # We need at least a path and a series
43 if len(arguments) < 2:
44+ logger.debug("Too few arguments")
45 return None
46
47 # Read the arguments
48@@ -198,12 +206,15 @@
49
50 # Check that the directory exists
51 if not os.path.exists(cdimage_path):
52+ logger.debug("Directory not found: %s" % cdimage_path)
53 return None
54
55 for version in list_versions(cdimage_path):
56 # Skip directory without checksums
57- if not os.path.exists(os.path.join(cdimage_path, version,
58- "SHA256SUMS")):
59+ checksum_path = os.path.exists(os.path.join(cdimage_path, version,
60+ "SHA256SUMS"))
61+ if not checksum_path:
62+ logger.debug("Missing checksum: %s" % checksum_path)
63 continue
64
65 # Check for all the ANDROID files
66@@ -212,6 +223,7 @@
67 (series, boot_arch,
68 environment['device_name']))
69 if not os.path.exists(boot_path):
70+ logger.debug("Missing boot image: %s" % boot_path)
71 continue
72
73 recovery_path = os.path.join(cdimage_path, version,
74@@ -219,6 +231,7 @@
75 (series, recovery_arch,
76 environment['device_name']))
77 if not os.path.exists(recovery_path):
78+ logger.debug("Missing recovery image: %s" % recovery_path)
79 continue
80
81 system_path = os.path.join(cdimage_path, version,
82@@ -226,6 +239,7 @@
83 (series, system_arch,
84 environment['device_name']))
85 if not os.path.exists(system_path):
86+ logger.debug("Missing system image: %s" % system_path)
87 continue
88
89 # Check if we should only import tested images
90@@ -256,6 +270,7 @@
91 break
92
93 if not boot_hash or not recovery_hash or not system_hash:
94+ logger.debug("Boot, recovery or system checksum missing")
95 continue
96
97 hash_string = "%s/%s/%s" % (boot_hash, recovery_hash, system_hash)
98@@ -264,6 +279,7 @@
99 # Generate the path
100 path = os.path.join(conf.publish_path, "pool",
101 "device-%s.tar.xz" % global_hash)
102+ logger.debug("Path generated: %s" % path)
103
104 # Return pre-existing entries
105 if os.path.exists(path):
106@@ -281,6 +297,7 @@
107 temp_dir = tempfile.mkdtemp()
108
109 # Generate a new tarball
110+ logger.debug("Opening tarball for processing")
111 target_tarball = tarfile.open(os.path.join(temp_dir, "target.tar"),
112 "w:")
113
114@@ -310,6 +327,7 @@
115 arcname="partitions/recovery.img",
116 filter=root_ownership)
117
118+ logger.debug("Closing tarbal")
119 target_tarball.close()
120
121 # Create the pool if it doesn't exist
122@@ -355,6 +373,7 @@
123
124 # We need at least a path and a series
125 if len(arguments) < 2:
126+ logger.debug("Too few arguments")
127 return None
128
129 # Read the arguments
130@@ -373,12 +392,15 @@
131
132 # Check that the directory exists
133 if not os.path.exists(cdimage_path):
134+ logger.debug("Directory not found: %s" % cdimage_path)
135 return None
136
137 for version in list_versions(cdimage_path):
138 # Skip directory without checksums
139- if not os.path.exists(os.path.join(cdimage_path, version,
140- "SHA256SUMS")):
141+ checksum_path = os.path.exists(os.path.join(cdimage_path, version,
142+ "SHA256SUMS"))
143+ if not checksum_path:
144+ logger.debug("Missing checksum: %s" % checksum_path)
145 continue
146
147 # Check for the rootfs
148@@ -387,6 +409,7 @@
149 (series, options.get("product", "touch"),
150 arch))
151 if not os.path.exists(rootfs_path):
152+ logger.debug("Missing rootfs tarball: %s" % rootfs_path)
153 continue
154
155 # Check if we should only import tested images
156@@ -414,6 +437,7 @@
157 # Generate the path
158 path = os.path.join(conf.publish_path, "pool",
159 "ubuntu-%s.tar.xz" % rootfs_hash)
160+ logger.debug("Path generated: %s" % path)
161
162 # Return pre-existing entries
163 if os.path.exists(path):
164@@ -431,6 +455,7 @@
165 temp_dir = tempfile.mkdtemp()
166
167 # Unpack the source tarball
168+ logger.debug("Opening tarball for processing")
169 tools.gzip_uncompress(rootfs_path, os.path.join(temp_dir,
170 "source.tar"))
171
172@@ -534,6 +559,7 @@
173 new_file.gname = "root"
174 target_tarball.addfile(new_file)
175
176+ logger.debug("Closing tarball")
177 source_tarball.close()
178 target_tarball.close()
179
180@@ -575,6 +601,7 @@
181
182 # We need at least a path and a series
183 if len(arguments) < 2:
184+ logger.debug("Too few arguments")
185 return None
186
187 # Read the arguments
188@@ -593,12 +620,15 @@
189
190 # Check that the directory exists
191 if not os.path.exists(cdimage_path):
192+ logger.debug("Directory not found: %s" % cdimage_path)
193 return None
194
195 for version in list_versions(cdimage_path):
196 # Skip directory without checksums
197- if not os.path.exists(os.path.join(cdimage_path, version,
198- "SHA256SUMS")):
199+ checksum_path = os.path.exists(os.path.join(cdimage_path, version,
200+ "SHA256SUMS"))
201+ if not checksum_path:
202+ logger.debug("Missing checksum: %s" % checksum_path)
203 continue
204
205 # Check for the custom tarball
206@@ -607,6 +637,7 @@
207 (series, options.get("product", "touch"),
208 arch))
209 if not os.path.exists(custom_path):
210+ logger.debug("Missing custom tarball: %s" % custom_path)
211 continue
212
213 # Check if we should only import tested images
214@@ -634,6 +665,7 @@
215 # Generate the path
216 path = os.path.join(conf.publish_path, "pool",
217 "custom-%s.tar.xz" % custom_hash)
218+ logger.debug("Path generated: %s" % path)
219
220 # Return pre-existing entries
221 if os.path.exists(path):
222@@ -692,6 +724,7 @@
223
224 # We need at least a path and a series
225 if len(arguments) < 2:
226+ logger.debug("Too few arguments")
227 return None
228
229 # Read the arguments
230@@ -712,6 +745,7 @@
231
232 # Check that the directory exists
233 if not os.path.exists(cdimage_path):
234+ logger.debug("Directory not found: %s" % cdimage_path)
235 return None
236
237 for version in list_versions(cdimage_path):
238@@ -754,6 +788,7 @@
239 # Generate the path
240 path = os.path.join(conf.publish_path, "pool",
241 "device-%s.tar.xz" % raw_device_hash)
242+ logger.debug("Path generated: %s" % path)
243
244 # Return pre-existing entries
245 if os.path.exists(path):
246@@ -813,6 +848,7 @@
247
248 # We need at least a URL
249 if len(arguments) == 0:
250+ logger.debug("Too few arguments")
251 return None
252
253 # Read the arguments
254@@ -844,6 +880,7 @@
255
256 # Validate the version number
257 if not version or len(version.split("\n")) > 1:
258+ logger.debug("Invalid or missing version number")
259 return None
260
261 # Push the result in the cache
262@@ -857,6 +894,8 @@
263 "%s-%s.tar.xz" %
264 (options.get("name", "http"),
265 version)))
266+ logger.debug("Path generated: %s" % old_path)
267+
268 if os.path.exists(old_path):
269 # Get the real version number (in case it got copied)
270 if os.path.exists(old_path.replace(".tar.xz", ".json")):
271@@ -876,6 +915,7 @@
272 "%s-%s.tar.xz" %
273 (options.get("name", "http"),
274 global_hash)))
275+ logger.debug("Path generated: %s" % path)
276
277 # Return pre-existing entries
278 if os.path.exists(path):
279@@ -921,6 +961,8 @@
280 "%s-%s.tar.xz" %
281 (options.get("name", "http"),
282 version)))
283+ logger.debug("Path generated: %s" % path)
284+
285 # Return pre-existing entries
286 if os.path.exists(path):
287 # Get the real version number (in case it got copied)
288@@ -969,10 +1011,12 @@
289
290 # Don't generate keyring tarballs when nothing changed
291 if len(environment['new_files']) == 0:
292+ logger.debug("Nothing has changed, no new files")
293 return None
294
295 # We need a keyring name
296 if len(arguments) == 0:
297+ logger.debug("Too few arguments")
298 return None
299
300 # Read the arguments
301@@ -997,6 +1041,7 @@
302 path = os.path.realpath(os.path.join(conf.publish_path, "pool",
303 "keyring-%s.tar.xz" %
304 global_hash))
305+ logger.debug("Path generated: %s" % path)
306
307 # Set the version_detail string
308 environment['version_detail'].append("keyring=%s" % keyring_name)
309@@ -1051,6 +1096,7 @@
310
311 # We need at least a channel name and a file prefix
312 if len(arguments) < 3:
313+ logger.debug("Too few arguments")
314 return None
315
316 # Read the arguments
317@@ -1079,20 +1125,25 @@
318 socket.setdefaulttimeout(old_timeout)
319
320 if channel_name not in channel_json:
321+ logger.debug("Missing channel name in JSON: %s" % channel_name)
322 return None
323
324 if "devices" not in channel_json[channel_name]:
325+ logger.debug("Missing devices for channel name in JSON")
326 return None
327
328 if device_name not in channel_json[channel_name]['devices']:
329+ logger.debug("Missing device name in JSON: %s" % device_name)
330 return None
331
332 if "index" not in (channel_json[channel_name]['devices']
333 [device_name]):
334+ logger.debug("Missing index for the channel device in JSON")
335 return None
336
337 index_url = "%s/%s" % (base_url, channel_json[channel_name]['devices']
338 [device_name]['index'])
339+ logger.debug("Index file for the devices in channel: %s" % index_url)
340
341 # Fetch and validate the remote index.json
342 old_timeout = socket.getdefaulttimeout()
343@@ -1109,6 +1160,7 @@
344 full_images = sorted([image for image in index_json['images']
345 if image['type'] == "full"],
346 key=lambda image: image['version'])
347+ logger.debug("List of full images founds %s" % full_images)
348
349 # No images
350 if not full_images:
351@@ -1121,6 +1173,8 @@
352 if file_prefix == prefix:
353 path = os.path.realpath("%s/%s" % (conf.publish_path,
354 file_entry['path']))
355+ logger.debug("Path generated: %s" % path)
356+
357 if os.path.exists(path):
358 return path
359
360@@ -1187,6 +1241,7 @@
361
362 # We need at least a channel name and a file prefix
363 if len(arguments) < 2:
364+ logger.debug("Too few arguments")
365 return None
366
367 # Read the arguments
368@@ -1196,10 +1251,12 @@
369 # Run some checks
370 pub = tree.Tree(conf)
371 if channel_name not in pub.list_channels():
372+ logger.debug("Channel not in the published list: %s" % channel_name)
373 return None
374
375 if (not environment['device_name'] in
376 pub.list_channels()[channel_name]['devices']):
377+ logger.debug("Device not in the channel list")
378 return None
379
380 # Try to find the file
381@@ -1208,6 +1265,7 @@
382 full_images = sorted([image for image in device.list_images()
383 if image['type'] == "full"],
384 key=lambda image: image['version'])
385+ logger.debug("List of full images founds %s" % full_images)
386
387 # No images
388 if not full_images:
389@@ -1220,6 +1278,7 @@
390 if file_prefix == prefix:
391 path = os.path.realpath("%s/%s" % (conf.publish_path,
392 file_entry['path']))
393+ logger.debug("Path generated: %s" % path)
394
395 if os.path.exists(path.replace(".tar.xz", ".json")):
396 with open(path.replace(".tar.xz", ".json"), "r") as fd:
397@@ -1241,16 +1300,19 @@
398
399 # Don't generate version tarballs when nothing changed
400 if len(environment['new_files']) == 0:
401+ logger.debug("Nothing has changed, no new files")
402 return None
403
404 path = os.path.realpath(os.path.join(environment['device'].path,
405 "version-%s.tar.xz" % environment['version']))
406+ logger.debug("Path generated: %s" % path)
407
408 # Set the version_detail string
409 environment['version_detail'].append("version=%s" % environment['version'])
410
411 # Don't bother re-generating a file if it already exists
412 if os.path.exists(path):
413+ logger.debug("Version file already exists")
414 return path
415
416 # Generate version_detail
417
418=== modified file 'lib/systemimage/gpg.py'
419--- lib/systemimage/gpg.py 2015-03-11 20:55:00 +0000
420+++ lib/systemimage/gpg.py 2015-03-11 21:18:58 +0000
421@@ -17,11 +17,14 @@
422
423 import json
424 import gpgme
425+import logging
426 import os
427 import tarfile
428
429 from io import BytesIO
430
431+logger = logging.getLogger(__name__)
432+
433
434 def generate_signing_key(keyring_path, key_name, key_email, key_expiry):
435 """
436@@ -79,7 +82,7 @@
437 destination = "%s.gpg" % path
438
439 if os.path.exists(destination):
440- raise Exception("destination already exists.")
441+ raise Exception("Destination already exists: %s" % destination)
442
443 os.environ['GNUPGHOME'] = key_path
444
445@@ -89,6 +92,8 @@
446 [key] = ctx.keylist()
447 ctx.signers = [key]
448
449+ logger.debug("Signing file: %s" % destination)
450+
451 with open(path, "rb") as fd_in, open(destination, "wb+") as fd_out:
452 if detach:
453 retval = ctx.sign(fd_in, fd_out, gpgme.SIG_MODE_DETACH)
454
455=== modified file 'lib/systemimage/tools.py'
456--- lib/systemimage/tools.py 2015-03-11 20:55:00 +0000
457+++ lib/systemimage/tools.py 2015-03-11 21:18:58 +0000
458@@ -18,6 +18,7 @@
459 from io import BytesIO
460
461 import gzip
462+import logging
463 import os
464 import re
465 import shutil
466@@ -26,6 +27,8 @@
467 import tempfile
468 import time
469
470+logger = logging.getLogger(__name__)
471+
472
473 def expand_path(path, base="/"):
474 """
475@@ -134,7 +137,9 @@
476 destination = "%s.gz" % path
477
478 if os.path.exists(destination):
479- raise Exception("destination already exists.")
480+ raise Exception("Destination already exists: %s" % destination)
481+
482+ logger.debug("Gzipping file: %s" % destination)
483
484 uncompressed = open(path, "rb")
485 compressed = gzip.open(destination, "wb+", level)
486@@ -153,14 +158,16 @@
487 """
488
489 if not destination and path[-3:] != ".gz":
490- raise Exception("unspecified destination and path doesn't end"
491+ raise Exception("Unspecified destination and path doesn't end"
492 " with .gz")
493
494 if not destination:
495 destination = path[:-3]
496
497 if os.path.exists(destination):
498- raise Exception("destination already exists.")
499+ raise Exception("Destination already exists: %s" % destination)
500+
501+ logger.debug("Ungzipping file: %s" % destination)
502
503 compressed = gzip.open(path, "rb")
504 uncompressed = open(destination, "wb+")
505@@ -185,7 +192,9 @@
506 destination = "%s.xz" % path
507
508 if os.path.exists(destination):
509- raise Exception("destination already exists.")
510+ raise Exception("Destination already exists: %s" % destination)
511+
512+ logger.debug("Xzipping file: %s" % destination)
513
514 if find_on_path("pxz"):
515 xz_command = "pxz"
516@@ -208,14 +217,16 @@
517 # NOTE: Once we can drop support for < 3.3, the new lzma module can be used
518
519 if not destination and path[-3:] != ".xz":
520- raise Exception("unspecified destination and path doesn't end"
521+ raise Exception("Unspecified destination and path doesn't end"
522 " with .xz")
523
524 if not destination:
525 destination = path[:-3]
526
527 if os.path.exists(destination):
528- raise Exception("destination already exists.")
529+ raise Exception("Destination already exists: %s" % destination)
530+
531+ logger.debug("Unxzipping file: %s" % destination)
532
533 with open(destination, "wb+") as fd:
534 retval = subprocess.call(['xz', '-d', '-c', path],
535
536=== modified file 'lib/systemimage/tree.py'
537--- lib/systemimage/tree.py 2015-03-11 20:55:00 +0000
538+++ lib/systemimage/tree.py 2015-03-11 21:18:58 +0000
539@@ -17,6 +17,7 @@
540
541 import copy
542 import json
543+import logging
544 import os
545 import shutil
546 import time
547@@ -25,6 +26,8 @@
548 from hashlib import sha256
549 from systemimage import gpg, tools
550
551+logger = logging.getLogger(__name__)
552+
553
554 # Context managers
555 @contextmanager
556@@ -242,7 +245,9 @@
557 Remove any orphaned file from the tree.
558 """
559
560- for entry in self.list_orphaned_files():
561+ orphaned_files = self.list_orphaned_files()
562+ for entry in orphaned_files:
563+
564 if os.path.isdir(entry):
565 os.rmdir(entry)
566 else:
567@@ -466,6 +471,7 @@
568 if tarname in referenced_files:
569 orphaned_files.remove(entry)
570
571+ logger.debug("Orphaned files: %s" % orphaned_files)
572 return sorted(orphaned_files)
573
574 def publish_keyring(self, keyring_name):

Subscribers

People subscribed via source and target branches

to all changes: