Merge lp:~gocept/landscape-client/py3-package-facade into lp:~landscape/landscape-client/trunk
- py3-package-facade
- Merge into trunk
Proposed by
Steffen Allner
Status: | Superseded |
---|---|
Proposed branch: | lp:~gocept/landscape-client/py3-package-facade |
Merge into: | lp:~landscape/landscape-client/trunk |
Diff against target: |
757 lines (+141/-131) 12 files modified
landscape/broker/store.py (+3/-10) landscape/compat.py (+0/-13) landscape/package/facade.py (+2/-1) landscape/package/reporter.py (+4/-2) landscape/package/skeleton.py (+6/-2) landscape/package/store.py (+10/-9) landscape/package/tests/helpers.py (+32/-17) landscape/package/tests/test_facade.py (+17/-13) landscape/package/tests/test_reporter.py (+25/-27) landscape/package/tests/test_store.py (+35/-35) landscape/schema.py (+2/-2) py3_ready_tests (+5/-0) |
To merge this branch: | bzr merge lp:~gocept/landscape-client/py3-package-facade |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Gocept | Pending | ||
Landscape | Pending | ||
🤖 Landscape Builder | test results | Pending | |
Review via email:
|
Commit message
Description of the change
This MP considers some test for landscape.
To post a comment you must log in.
- 990. By Steffen Allner
-
Enforce a fixed field order in stanza.
- 991. By Steffen Allner
-
Backmerge from trunk.
Unmerged revisions
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'landscape/broker/store.py' |
2 | --- landscape/broker/store.py 2017-03-17 09:26:45 +0000 |
3 | +++ landscape/broker/store.py 2017-03-21 18:33:15 +0000 |
4 | @@ -101,7 +101,7 @@ |
5 | |
6 | from landscape import DEFAULT_SERVER_API |
7 | from landscape.lib import bpickle |
8 | -from landscape.lib.fs import create_binary_file |
9 | +from landscape.lib.fs import create_binary_file, read_binary_file |
10 | from landscape.lib.versioning import sort_versions, is_version_higher |
11 | |
12 | |
13 | @@ -260,7 +260,7 @@ |
14 | for filename in self._walk_pending_messages(): |
15 | if max is not None and len(messages) >= max: |
16 | break |
17 | - data = self._get_content(self._message_dir(filename)) |
18 | + data = read_binary_file(self._message_dir(filename)) |
19 | try: |
20 | message = bpickle.loads(data) |
21 | except ValueError as e: |
22 | @@ -436,13 +436,6 @@ |
23 | def _message_dir(self, *args): |
24 | return os.path.join(self._directory, *args) |
25 | |
26 | - def _get_content(self, filename): |
27 | - file = open(filename, 'rb') |
28 | - try: |
29 | - return file.read() |
30 | - finally: |
31 | - file.close() |
32 | - |
33 | def _reprocess_holding(self): |
34 | """ |
35 | Unhold accepted messages left behind, and hold unaccepted |
36 | @@ -454,7 +447,7 @@ |
37 | for old_filename in self._walk_messages(): |
38 | flags = self._get_flags(old_filename) |
39 | try: |
40 | - message = bpickle.loads(self._get_content(old_filename)) |
41 | + message = bpickle.loads(read_binary_file(old_filename)) |
42 | except ValueError as e: |
43 | logging.exception(e) |
44 | if HELD not in flags: |
45 | |
46 | === modified file 'landscape/compat.py' |
47 | --- landscape/compat.py 2017-03-17 09:26:45 +0000 |
48 | +++ landscape/compat.py 2017-03-21 18:33:15 +0000 |
49 | @@ -46,16 +46,3 @@ |
50 | return s.decode(encoding, errors) |
51 | else: |
52 | return s |
53 | - |
54 | - |
55 | -def convert_buffer_to_string(mem_view): |
56 | - """ |
57 | - Converts a buffer in Python 2 or a memoryview in Python 3 to str. |
58 | - |
59 | - @param mem_view: The view to convert. |
60 | - """ |
61 | - if _PY3: |
62 | - result = mem_view.decode('ascii') |
63 | - else: |
64 | - result = str(mem_view) |
65 | - return result |
66 | |
67 | === modified file 'landscape/package/facade.py' |
68 | --- landscape/package/facade.py 2017-03-13 15:38:09 +0000 |
69 | +++ landscape/package/facade.py 2017-03-21 18:33:15 +0000 |
70 | @@ -196,7 +196,8 @@ |
71 | process = subprocess.Popen( |
72 | ["dpkg", "--set-selections"] + self._dpkg_args, |
73 | stdin=subprocess.PIPE) |
74 | - process.communicate(selection) |
75 | + # We need bytes here to communicate with the process. |
76 | + process.communicate(selection.encode("utf-8")) |
77 | |
78 | def set_package_hold(self, version): |
79 | """Add a dpkg hold for a package. |
80 | |
81 | === modified file 'landscape/package/reporter.py' |
82 | --- landscape/package/reporter.py 2017-03-20 09:43:08 +0000 |
83 | +++ landscape/package/reporter.py 2017-03-21 18:33:15 +0000 |
84 | @@ -20,7 +20,6 @@ |
85 | from landscape.lib.fs import touch_file |
86 | from landscape.lib.lsb_release import parse_lsb_release, LSB_RELEASE_FILENAME |
87 | |
88 | -from landscape.compat import convert_buffer_to_string |
89 | from landscape.package.taskhandler import ( |
90 | PackageTaskHandlerConfiguration, PackageTaskHandler, run_task_handler) |
91 | from landscape.package.store import UnknownHashIDRequest, FakePackageStore |
92 | @@ -231,6 +230,9 @@ |
93 | self._reactor.call_later( |
94 | LOCK_RETRY_DELAYS[retry], self._apt_update, deferred) |
95 | out, err, code = yield deferred |
96 | + out = out.decode("utf-8") |
97 | + err = err.decode("utf-8") |
98 | + |
99 | timestamp = self._reactor.time() |
100 | |
101 | touch_file(self._config.update_stamp_filename) |
102 | @@ -730,7 +732,7 @@ |
103 | messages = global_store.get_messages_by_ids(not_sent) |
104 | sent = [] |
105 | for message_id, message in messages: |
106 | - message = bpickle.loads(convert_buffer_to_string(message)) |
107 | + message = bpickle.loads(message) |
108 | if message["type"] not in got_type: |
109 | got_type.add(message["type"]) |
110 | sent.append(message_id) |
111 | |
112 | === modified file 'landscape/package/skeleton.py' |
113 | --- landscape/package/skeleton.py 2017-03-10 12:19:57 +0000 |
114 | +++ landscape/package/skeleton.py 2017-03-21 18:33:15 +0000 |
115 | @@ -50,10 +50,14 @@ |
116 | """ |
117 | if self._hash is not None: |
118 | return self._hash |
119 | - digest = sha1("[%d %s %s]" % (self.type, self.name, self.version)) |
120 | + # We use ascii here as encoding for backwards compatibility as it was |
121 | + # default encoding for conversion from unicode to bytes in Python 2.7. |
122 | + package_info = b"[%d %s %s]" % ( |
123 | + self.type, self.name.encode("ascii"), self.version.encode("ascii")) |
124 | + digest = sha1(package_info) |
125 | self.relations.sort() |
126 | for pair in self.relations: |
127 | - digest.update("[%d %s]" % pair) |
128 | + digest.update(b"[%d %s]" % (pair[0], pair[1].encode("ascii"))) |
129 | return digest.digest() |
130 | |
131 | def set_hash(self, package_hash): |
132 | |
133 | === modified file 'landscape/package/store.py' |
134 | --- landscape/package/store.py 2017-03-17 09:26:45 +0000 |
135 | +++ landscape/package/store.py 2017-03-21 18:33:15 +0000 |
136 | @@ -7,9 +7,7 @@ |
137 | from pysqlite2 import dbapi2 as sqlite3 |
138 | |
139 | from twisted.python.compat import iteritems, long |
140 | -from twisted.python.compat import StringType as basestring |
141 | |
142 | -from landscape.compat import convert_buffer_to_string |
143 | from landscape.lib import bpickle |
144 | from landscape.lib.store import with_cursor |
145 | |
146 | @@ -50,7 +48,10 @@ |
147 | |
148 | @with_cursor |
149 | def get_hash_id(self, cursor, hash): |
150 | - """Return the id associated to C{hash}, or C{None} if not available.""" |
151 | + """Return the id associated to C{hash}, or C{None} if not available. |
152 | + |
153 | + @param hash: a C{bytes} representing a hash. |
154 | + """ |
155 | cursor.execute("SELECT id FROM hash WHERE hash=?", |
156 | (sqlite3.Binary(hash),)) |
157 | value = cursor.fetchone() |
158 | @@ -62,7 +63,7 @@ |
159 | def get_hash_ids(self, cursor): |
160 | """Return a C{dict} holding all the available hash=>id mappings.""" |
161 | cursor.execute("SELECT hash, id FROM hash") |
162 | - return dict([(str(row[0]), row[1]) for row in cursor.fetchall()]) |
163 | + return {bytes(row[0]): row[1] for row in cursor.fetchall()} |
164 | |
165 | @with_cursor |
166 | def get_id_hash(self, cursor, id): |
167 | @@ -71,7 +72,7 @@ |
168 | cursor.execute("SELECT hash FROM hash WHERE id=?", (id,)) |
169 | value = cursor.fetchone() |
170 | if value: |
171 | - return str(value[0]) |
172 | + return bytes(value[0]) |
173 | return None |
174 | |
175 | @with_cursor |
176 | @@ -149,7 +150,7 @@ |
177 | the attached lookaside databases, falling back to the main one, as |
178 | described in L{add_hash_id_db}. |
179 | """ |
180 | - assert isinstance(hash, basestring) |
181 | + assert isinstance(hash, bytes) |
182 | |
183 | # Check if we can find the hash=>id mapping in the lookaside stores |
184 | for store in self._hash_id_stores: |
185 | @@ -332,7 +333,7 @@ |
186 | result = cursor.execute( |
187 | "SELECT id, data FROM message WHERE id IN (%s) " |
188 | "ORDER BY id" % params, tuple(message_ids)).fetchall() |
189 | - return [(row[0], row[1]) for row in result] |
190 | + return [(row[0], bytes(row[1])) for row in result] |
191 | |
192 | |
193 | class HashIDRequest(object): |
194 | @@ -346,7 +347,7 @@ |
195 | def hashes(self, cursor): |
196 | cursor.execute("SELECT hashes FROM hash_id_request WHERE id=?", |
197 | (self.id,)) |
198 | - return bpickle.loads(convert_buffer_to_string(cursor.fetchone()[0])) |
199 | + return bpickle.loads(bytes(cursor.fetchone()[0])) |
200 | |
201 | @with_cursor |
202 | def _get_timestamp(self, cursor): |
203 | @@ -395,7 +396,7 @@ |
204 | |
205 | self.queue = row[0] |
206 | self.timestamp = row[1] |
207 | - self.data = bpickle.loads(convert_buffer_to_string(row[2])) |
208 | + self.data = bpickle.loads(bytes(row[2])) |
209 | |
210 | @with_cursor |
211 | def remove(self, cursor): |
212 | |
213 | === modified file 'landscape/package/tests/helpers.py' |
214 | --- landscape/package/tests/helpers.py 2017-03-13 15:15:46 +0000 |
215 | +++ landscape/package/tests/helpers.py 2017-03-21 18:33:15 +0000 |
216 | @@ -6,7 +6,7 @@ |
217 | import apt_inst |
218 | import apt_pkg |
219 | |
220 | -from landscape.lib.fs import append_binary_file, append_text_file |
221 | +from landscape.lib.fs import append_binary_file |
222 | from landscape.lib.fs import create_binary_file |
223 | from landscape.package.facade import AptFacade |
224 | |
225 | @@ -49,11 +49,17 @@ |
226 | """ % { |
227 | "name": name, "version": version, |
228 | "architecture": architecture, |
229 | - "description": description.encode("utf-8")}) |
230 | + "description": description}).encode("utf-8") |
231 | + # We want to re-order the TagSection, but it requires bytes as input. |
232 | + # As we also want to write a binary file, we have to explicitly pass |
233 | + # the hardly documented `bytes=True` to TagSection as it would be |
234 | + # returned as unicode in Python 3 otherwise. In future versions of |
235 | + # apt_pkg there should be a TagSection.write() which is recommended. |
236 | package_stanza = apt_pkg.rewrite_section( |
237 | - apt_pkg.TagSection(package_stanza), apt_pkg.REWRITE_PACKAGE_ORDER, |
238 | - control_fields.items()) |
239 | - append_binary_file(packages_file, "\n" + package_stanza + "\n") |
240 | + apt_pkg.TagSection(package_stanza, bytes=True), |
241 | + apt_pkg.REWRITE_PACKAGE_ORDER, |
242 | + list(control_fields.items())) |
243 | + append_binary_file(packages_file, b"\n" + package_stanza + b"\n") |
244 | |
245 | def _add_system_package(self, name, architecture="all", version="1.0", |
246 | control_fields=None): |
247 | @@ -72,9 +78,9 @@ |
248 | control = deb.control.extractdata("control") |
249 | deb_file.close() |
250 | lines = control.splitlines() |
251 | - lines.insert(1, "Status: install ok installed") |
252 | - status = "\n".join(lines) |
253 | - append_text_file(self.dpkg_status, status + "\n\n") |
254 | + lines.insert(1, b"Status: install ok installed") |
255 | + status = b"\n".join(lines) |
256 | + append_binary_file(self.dpkg_status, status + b"\n\n") |
257 | |
258 | def _add_package_to_deb_dir(self, path, name, architecture="all", |
259 | version="1.0", description="description", |
260 | @@ -161,7 +167,8 @@ |
261 | "Mzepwz6J7y5jpkIOH6sDKssF1rmUqYzBX2piZj9zyFad5RHv8dLoXsqua2spF3v+PQ" |
262 | "ffXIlN8aYepsu3x2u0202VX+QFC10st6vvMfDdacgtdzKtpe5G5tuFYx5elcpXm27O" |
263 | "d8LH7Oj3mqP7VgD8P6dTmJ33dsPnpuBnPO3SvLDNlu6ay9It6yZon0BIZRMApGwSgY" |
264 | - "BaNgFIyCUTAKRsEoGAWjYBSMglEwCkbBKBgFo2AUjIJRMApGAUkAADhX8vgAKAAA ") |
265 | + "BaNgFIyCUTAKRsEoGAWjYBSMglEwCkbBKBgFo2AUjIJRMApGAUkAADhX8vgAKAAA " |
266 | + ).encode("ascii") |
267 | |
268 | PKGDEB2 = ("ITxhcmNoPgpkZWJpYW4tYmluYXJ5ICAgMTE2NjExNDUyMiAgMCAgICAgMCAgICAgMT" |
269 | "AwNjQ0ICA0ICAgICAgICAgYAoyLjAKY29udHJvbC50YXIuZ3ogIDExNjYxMTQ1MjIg" |
270 | @@ -183,7 +190,8 @@ |
271 | "jR45xB99RGrkMGEq4Pbf0L3UWDL4XIRIk6Hjx7Urzj6SSxS/YTzKbu28sqe/64oPmF" |
272 | "JGPj3lqR1cLMdz12u04rLHp/gM2y0mv3HOc/GqxvCl7PqWh7kbux6VrFk69zlefZsu" |
273 | "v5WPycH/NUv7VgF8N6vfeBcgXp3NlnBFNDw5eZsd1as/aK+JzyvZ0TGEbBKBgFo2AU" |
274 | - "jIJRMApGwSgYBaNgFIyCUTAKRsEoGAWjYBSMglEwCkbBKBgFJAEAu4OlKQAoAAAK") |
275 | + "jIJRMApGwSgYBaNgFIyCUTAKRsEoGAWjYBSMglEwCkbBKBgFJAEAu4OlKQAoAAAK" |
276 | + ).encode("ascii") |
277 | |
278 | PKGDEB3 = ("ITxhcmNoPgpkZWJpYW4tYmluYXJ5ICAgMTE2OTE0ODIwMyAgMCAgICAgMCAgICAgMT" |
279 | "AwNjQ0ICA0ICAgICAgICAgYAoyLjAKY29udHJvbC50YXIuZ3ogIDExNjkxNDgyMDMg" |
280 | @@ -206,7 +214,8 @@ |
281 | "bOTd7zh0Xz0y5bdGmDrbLp/dbhNtdpU/EFSt9LKe7/xHgzWn4PWcirYXuVsbrlVMeT" |
282 | "pXaZ4t+zkfi5/zY57qTy3Yw7B+XU7g+8L07rmG7Fe2bVxmyHZLZ+0V8Sl2Xj8mMIyC" |
283 | "UTAKRsEoGAWjYBSMglEwCkbBKBgFo2AUjIJRMApGwSgYBaNgFIyCUTAKSAIAY/FOKA" |
284 | - "AoAAAK") |
285 | + "AoAAAK" |
286 | + ).encode("ascii") |
287 | |
288 | PKGDEB4 = ("ITxhcmNoPgpkZWJpYW4tYmluYXJ5ICAgMTI3NjUxMTU3OC41MCAgICAgMCAgICAgNj" |
289 | "Q0ICAgICA0\nICAgICAgICAgYAoyLjAKY29udHJvbC50YXIuZ3ogIDEyNzY1MTE1Nz" |
290 | @@ -221,7 +230,8 @@ |
291 | "ICAgYAofiwgAWgUWTAL/7dFBCsMgEEDRWfcUniCZ\nsU57kJ5ASJdFSOz9K9kULLQr" |
292 | "C4H/NiPqQvnTLMNpc3XfZ9PPfW2W1JOae9s3i5okuPzBc6t5bU9Z\nS6nf7v067z93" |
293 | "ENO8lcd9fP/LZ/d3f4td/6h+lqD0H+7W6ocl13wSAAAAAAAAAAAAAAAAAAfzAqr5\n" |
294 | - "GFYAKAAACg==\n") |
295 | + "GFYAKAAACg==\n" |
296 | + ).encode("ascii") |
297 | |
298 | PKGDEB_MINIMAL = ( |
299 | "ITxhcmNoPgpkZWJpYW4tYmluYXJ5ICAgMTMxNzg5MDQ3OSAgMCAgICAgMCAgICAgMTAwNj" |
300 | @@ -234,7 +244,8 @@ |
301 | "AAAAAAAAAAAAAAAAAAAAAMBF70s1/foAKAAAZGF0YS50YXIu Z3ogICAgIDEzMTc4OTA0N" |
302 | "zkgIDAgICAgIDAgICAgIDEwMDY0NCAgMTA3ICAgICAgIGAKH4sIAAAA AAACA+3KsQ3CQB" |
303 | "AEwCvlK4D/N4frMSGBkQz0jwmQiHCEo5lkpd09HOPv6mrMfGcbs37nR7R2Pg01" |
304 | - "ew5r32rvNUrGDp73x7SUEpfrbZl//LZ2AAAAAAAAAAAA2NELx33R7wAoAAAK") |
305 | + "ew5r32rvNUrGDp73x7SUEpfrbZl//LZ2AAAAAAAAAAAA2NELx33R7wAoAAAK" |
306 | +).encode("ascii") |
307 | |
308 | PKGDEB_SIMPLE_RELATIONS = ( |
309 | "ITxhcmNoPgpkZWJpYW4tYmluYXJ5ICAgMTMxODUxNjMyMiAgMCAgICAgMCAgICAgMTAwNj" |
310 | @@ -249,7 +260,8 @@ |
311 | "EKgcHt1gAoAABkYXRhLnRhci5neiAgICAgMTMxODUxNjMyMiAgMCAgICAgMCAg ICAgMTA" |
312 | "wNjQ0ICAxMDcgICAgICAgYAofiwgAAAAAAAID7cqxDcJQEETBK8UVwH2b+64HQgIjGegf " |
313 | "CJCIIMLRTPKC3d0+/i6f5qpX21z52bdorR+m7Fl9imw5jhVDxQbu19txHYY4nS/r8uX3aw" |
314 | - "cAAAAA AAAAAIANPQALnD6FACgAAAo=") |
315 | + "cAAAAA AAAAAIANPQALnD6FACgAAAo=" |
316 | +).encode("ascii") |
317 | |
318 | |
319 | PKGDEB_VERSION_RELATIONS = ( |
320 | @@ -265,7 +277,8 @@ |
321 | "AAAACAy/sAwTtOtwAoAABkYXRhLnRhci5neiAgICAgMTMxODUxNjQ5OCAgMCAg ICAgMCA" |
322 | "gICAgMTAwNjQ0ICAxMDcgICAgICAgYAofiwgAAAAAAAID7cqxEcIwEETRK0UVgCT7UD0Q " |
323 | "EpgZA/0DATNEEOHoveQHu7t9/F19GpmvtpH1s2/R2mGeemYfc9RW+9SjZGzgfr0d11LidL" |
324 | - "6sy5ff rx0AAAAAAAAAAAA29AD/ixlwACgAAAo=") |
325 | + "6sy5ff rx0AAAAAAAAAAAA29AD/ixlwACgAAAo=" |
326 | +).encode("ascii") |
327 | |
328 | |
329 | PKGDEB_MULTIPLE_RELATIONS = ( |
330 | @@ -282,7 +295,8 @@ |
331 | "0YS50YXIuZ3ogICAgIDEzMTg1ODAwNzkgIDAgICAgIDAgICAgIDEwMDY0NCAgMTA3ICAg " |
332 | "ICAgIGAKH4sIAAAAAAACA+3KsRHCMBBE0StFFYBkfFY9EBKYGWP3DwTMEEGEo/eSH+wejv" |
333 | "F39aln vtp61s++RWvTeBpy6tmjtjqMLUrGDrb7el5Kicv1tsxffr92AAAAAAAAAAAA2NE" |
334 | - "Db6L1AQAoAAAK") |
335 | + "Db6L1AQAoAAAK" |
336 | +).encode("ascii") |
337 | |
338 | |
339 | PKGDEB_OR_RELATIONS = ( |
340 | @@ -299,7 +313,8 @@ |
341 | "6ICAgICAxMzE3ODg4ODY5ICAwICAgICAwICAgICAxMDA2NDQgIDEwNyAgICAgICBgCh+L " |
342 | "CAAAAAAAAgPtyrsRwjAURNFXiioAfZBcjwkJzIyB/oGAGSIc4eic5Aa7h2P8XX6Zen+3TD" |
343 | "1/9yNK" |
344 | - "GadWR2ltRC651hGpxw4et/u8phTny3Vdfvy2dgAAAAAAAAAAANjRE6Lr2rEAKAAACg==") |
345 | + "GadWR2ltRC651hGpxw4et/u8phTny3Vdfvy2dgAAAAAAAAAAANjRE6Lr2rEAKAAACg==" |
346 | +).encode("ascii") |
347 | |
348 | |
349 | HASH1 = base64.decodestring(b"/ezv4AefpJJ8DuYFSq4RiEHJYP4=") |
350 | |
351 | === modified file 'landscape/package/tests/test_facade.py' |
352 | --- landscape/package/tests/test_facade.py 2017-03-13 15:38:09 +0000 |
353 | +++ landscape/package/tests/test_facade.py 2017-03-21 18:33:15 +0000 |
354 | @@ -9,7 +9,7 @@ |
355 | from aptsources.sourceslist import SourcesList |
356 | from apt.cache import LockFailedException |
357 | |
358 | -from twisted.python.compat import unicode |
359 | +from twisted.python.compat import unicode, _PY3 |
360 | |
361 | from landscape.lib.fs import read_text_file, create_text_file |
362 | from landscape.package.facade import ( |
363 | @@ -289,7 +289,11 @@ |
364 | stanza = self.facade.get_package_stanza(deb_file).split("\n") |
365 | SHA256 = ( |
366 | "f899cba22b79780dbe9bbbb802ff901b7e432425c264dc72e6bb20c0061e4f26") |
367 | - self.assertItemsEqual(textwrap.dedent("""\ |
368 | + if _PY3: |
369 | + assertion = self.assertCountEqual |
370 | + else: |
371 | + assertion = self.assertItemsEqual |
372 | + assertion(textwrap.dedent("""\ |
373 | Package: name1 |
374 | Priority: optional |
375 | Section: Group1 |
376 | @@ -1029,9 +1033,9 @@ |
377 | self.facade.mark_install(foo) |
378 | |
379 | def print_output(fetch_progress, install_progress): |
380 | - os.write(1, "Stdout output\n") |
381 | - os.write(2, "Stderr output\n") |
382 | - os.write(1, "Stdout output again\n") |
383 | + os.write(1, b"Stdout output\n") |
384 | + os.write(2, b"Stderr output\n") |
385 | + os.write(1, b"Stdout output again\n") |
386 | |
387 | self.patch_cache_commit(print_output) |
388 | output = [ |
389 | @@ -1054,9 +1058,9 @@ |
390 | self.facade.mark_install(foo) |
391 | |
392 | def commit(fetch_progress, install_progress): |
393 | - os.write(1, "Stdout output\n") |
394 | - os.write(2, "Stderr output\n") |
395 | - os.write(1, "Stdout output again\n") |
396 | + os.write(1, b"Stdout output\n") |
397 | + os.write(2, b"Stderr output\n") |
398 | + os.write(1, b"Stdout output again\n") |
399 | raise SystemError("Oops") |
400 | |
401 | self.facade._cache.commit = commit |
402 | @@ -1084,12 +1088,12 @@ |
403 | |
404 | def commit1(fetch_progress, install_progress): |
405 | self.facade._cache.commit = commit2 |
406 | - os.write(2, "bad stuff!\n") |
407 | + os.write(2, b"bad stuff!\n") |
408 | raise LockFailedException("Oops") |
409 | |
410 | def commit2(fetch_progress, install_progress): |
411 | install_progress.dpkg_exited = True |
412 | - os.write(1, "good stuff!") |
413 | + os.write(1, b"good stuff!") |
414 | |
415 | self.facade._cache.commit = commit1 |
416 | output = [ |
417 | @@ -1113,12 +1117,12 @@ |
418 | |
419 | def commit1(fetch_progress, install_progress): |
420 | self.facade._cache.commit = commit2 |
421 | - os.write(2, "bad stuff!\n") |
422 | + os.write(2, b"bad stuff!\n") |
423 | raise SystemError("Oops") |
424 | |
425 | def commit2(fetch_progress, install_progress): |
426 | install_progress.dpkg_exited = True |
427 | - os.write(1, "good stuff!") |
428 | + os.write(1, b"good stuff!") |
429 | |
430 | self.facade._cache.commit = commit1 |
431 | self.assertRaises(TransactionError, self.facade.perform_changes) |
432 | @@ -1216,7 +1220,7 @@ |
433 | |
434 | def commit(fetch_progress, install_progress): |
435 | install_progress.dpkg_exited = False |
436 | - os.write(1, "Stdout output\n") |
437 | + os.write(1, b"Stdout output\n") |
438 | |
439 | self.facade._cache.commit = commit |
440 | exception = self.assertRaises( |
441 | |
442 | === modified file 'landscape/package/tests/test_reporter.py' |
443 | --- landscape/package/tests/test_reporter.py 2017-03-20 09:43:08 +0000 |
444 | +++ landscape/package/tests/test_reporter.py 2017-03-21 18:33:15 +0000 |
445 | @@ -27,7 +27,6 @@ |
446 | LandscapeTest, BrokerServiceHelper, EnvironSaverHelper) |
447 | from landscape.reactor import FakeReactor |
448 | |
449 | -from landscape.compat import convert_buffer_to_string |
450 | |
451 | SAMPLE_LSB_RELEASE = "DISTRIB_CODENAME=codename\n" |
452 | |
453 | @@ -96,21 +95,21 @@ |
454 | os.chmod(self.reporter.apt_update_filename, 0o755) |
455 | |
456 | def test_set_package_ids_with_all_known(self): |
457 | - self.store.add_hash_id_request(["hash1", "hash2"]) |
458 | - request2 = self.store.add_hash_id_request(["hash3", "hash4"]) |
459 | - self.store.add_hash_id_request(["hash5", "hash6"]) |
460 | + self.store.add_hash_id_request([b"hash1", b"hash2"]) |
461 | + request2 = self.store.add_hash_id_request([b"hash3", b"hash4"]) |
462 | + self.store.add_hash_id_request([b"hash5", b"hash6"]) |
463 | |
464 | self.store.add_task("reporter", |
465 | {"type": "package-ids", "ids": [123, 456], |
466 | "request-id": request2.id}) |
467 | |
468 | def got_result(result): |
469 | - self.assertEqual(self.store.get_hash_id("hash1"), None) |
470 | - self.assertEqual(self.store.get_hash_id("hash2"), None) |
471 | - self.assertEqual(self.store.get_hash_id("hash3"), 123) |
472 | - self.assertEqual(self.store.get_hash_id("hash4"), 456) |
473 | - self.assertEqual(self.store.get_hash_id("hash5"), None) |
474 | - self.assertEqual(self.store.get_hash_id("hash6"), None) |
475 | + self.assertEqual(self.store.get_hash_id(b"hash1"), None) |
476 | + self.assertEqual(self.store.get_hash_id(b"hash2"), None) |
477 | + self.assertEqual(self.store.get_hash_id(b"hash3"), 123) |
478 | + self.assertEqual(self.store.get_hash_id(b"hash4"), 456) |
479 | + self.assertEqual(self.store.get_hash_id(b"hash5"), None) |
480 | + self.assertEqual(self.store.get_hash_id(b"hash6"), None) |
481 | |
482 | deferred = self.reporter.handle_tasks() |
483 | return deferred.addCallback(got_result) |
484 | @@ -129,7 +128,7 @@ |
485 | |
486 | message_store.set_accepted_types(["add-packages"]) |
487 | |
488 | - request1 = self.store.add_hash_id_request(["foo", HASH1, "bar"]) |
489 | + request1 = self.store.add_hash_id_request([b"foo", HASH1, b"bar"]) |
490 | |
491 | self.store.add_task("reporter", |
492 | {"type": "package-ids", |
493 | @@ -184,7 +183,7 @@ |
494 | |
495 | message_store.set_accepted_types(["add-packages"]) |
496 | |
497 | - request1 = self.store.add_hash_id_request(["foo", HASH1, "bar"]) |
498 | + request1 = self.store.add_hash_id_request([b"foo", HASH1, b"bar"]) |
499 | |
500 | self.store.add_task("reporter", |
501 | {"type": "package-ids", |
502 | @@ -238,7 +237,7 @@ |
503 | deferred = Deferred() |
504 | deferred.errback(Boom()) |
505 | |
506 | - request_id = self.store.add_hash_id_request(["foo", HASH1, "bar"]).id |
507 | + request_id = self.store.add_hash_id_request([b"foo", HASH1, b"bar"]).id |
508 | |
509 | self.store.add_task("reporter", {"type": "package-ids", |
510 | "ids": [123, None, 456], |
511 | @@ -259,7 +258,7 @@ |
512 | return result.addCallback(got_result, send_mock) |
513 | |
514 | def test_set_package_ids_removes_request_id_when_done(self): |
515 | - request = self.store.add_hash_id_request(["hash1"]) |
516 | + request = self.store.add_hash_id_request([b"hash1"]) |
517 | self.store.add_task("reporter", {"type": "package-ids", "ids": [123], |
518 | "request-id": request.id}) |
519 | |
520 | @@ -562,7 +561,7 @@ |
521 | self.assertTrue(self.reporter._apt_sources_have_changed()) |
522 | |
523 | def test_remove_expired_hash_id_request(self): |
524 | - request = self.store.add_hash_id_request(["hash1"]) |
525 | + request = self.store.add_hash_id_request([b"hash1"]) |
526 | request.message_id = 9999 |
527 | |
528 | request.timestamp -= HASH_ID_REQUEST_TIMEOUT |
529 | @@ -575,7 +574,7 @@ |
530 | return result.addCallback(got_result) |
531 | |
532 | def test_remove_expired_hash_id_request_wont_remove_before_timeout(self): |
533 | - request1 = self.store.add_hash_id_request(["hash1"]) |
534 | + request1 = self.store.add_hash_id_request([b"hash1"]) |
535 | request1.message_id = 9999 |
536 | request1.timestamp -= HASH_ID_REQUEST_TIMEOUT / 2 |
537 | |
538 | @@ -592,7 +591,7 @@ |
539 | return result.addCallback(got_result) |
540 | |
541 | def test_remove_expired_hash_id_request_updates_timestamps(self): |
542 | - request = self.store.add_hash_id_request(["hash1"]) |
543 | + request = self.store.add_hash_id_request([b"hash1"]) |
544 | message_store = self.broker_service.message_store |
545 | message_id = message_store.add({"type": "add-packages", |
546 | "packages": [], |
547 | @@ -607,7 +606,7 @@ |
548 | return result.addCallback(got_result) |
549 | |
550 | def test_remove_expired_hash_id_request_removes_when_no_message_id(self): |
551 | - request = self.store.add_hash_id_request(["hash1"]) |
552 | + request = self.store.add_hash_id_request([b"hash1"]) |
553 | |
554 | def got_result(result): |
555 | self.assertRaises(UnknownHashIDRequest, |
556 | @@ -1305,9 +1304,9 @@ |
557 | spawn_patcher = mock.patch.object(reporter, "spawn_process", |
558 | side_effect=[ |
559 | # Simulate series of failures to acquire the apt lock. |
560 | - succeed(('', '', 100)), |
561 | - succeed(('', '', 100)), |
562 | - succeed(('', '', 100))]) |
563 | + succeed((b'', b'', 100)), |
564 | + succeed((b'', b'', 100)), |
565 | + succeed((b'', b'', 100))]) |
566 | spawn_patcher.start() |
567 | self.addCleanup(spawn_patcher.stop) |
568 | |
569 | @@ -1343,8 +1342,8 @@ |
570 | spawn_patcher = mock.patch.object(reporter, "spawn_process", |
571 | side_effect=[ |
572 | # Simulate a failed apt lock grab then a successful one. |
573 | - succeed(('', '', 100)), |
574 | - succeed(('output', 'error', 0))]) |
575 | + succeed((b'', b'', 100)), |
576 | + succeed((b'output', b'error', 0))]) |
577 | spawn_patcher.start() |
578 | self.addCleanup(spawn_patcher.stop) |
579 | |
580 | @@ -1628,7 +1627,7 @@ |
581 | return deferred |
582 | |
583 | @mock.patch("landscape.package.reporter.spawn_process", |
584 | - return_value=succeed(("", "", 0))) |
585 | + return_value=succeed((b"", b"", 0))) |
586 | def test_run_apt_update_honors_http_proxy(self, mock_spawn_process): |
587 | """ |
588 | The PackageReporter.run_apt_update method honors the http_proxy |
589 | @@ -1647,7 +1646,7 @@ |
590 | env={"http_proxy": "http://proxy_server:8080"}) |
591 | |
592 | @mock.patch("landscape.package.reporter.spawn_process", |
593 | - return_value=succeed(("", "", 0))) |
594 | + return_value=succeed((b"", b"", 0))) |
595 | def test_run_apt_update_honors_https_proxy(self, mock_spawn_process): |
596 | """ |
597 | The PackageReporter.run_apt_update method honors the https_proxy |
598 | @@ -1869,8 +1868,7 @@ |
599 | "SELECT id, data FROM message").fetchall()) |
600 | self.assertEqual(1, len(stored)) |
601 | self.assertEqual(1, stored[0][0]) |
602 | - self.assertEqual(message, |
603 | - bpickle.loads(convert_buffer_to_string(stored[0][1]))) |
604 | + self.assertEqual(message, bpickle.loads(bytes(stored[0][1]))) |
605 | result.addCallback(callback) |
606 | result.chainDeferred(deferred) |
607 | |
608 | |
609 | === modified file 'landscape/package/tests/test_store.py' |
610 | --- landscape/package/tests/test_store.py 2017-01-09 14:29:54 +0000 |
611 | +++ landscape/package/tests/test_store.py 2017-03-21 18:33:15 +0000 |
612 | @@ -19,12 +19,12 @@ |
613 | self.store2 = HashIdStore(self.filename) |
614 | |
615 | def test_set_and_get_hash_id(self): |
616 | - self.store1.set_hash_ids({"ha\x00sh1": 123, "ha\x00sh2": 456}) |
617 | - self.assertEqual(self.store1.get_hash_id("ha\x00sh1"), 123) |
618 | - self.assertEqual(self.store1.get_hash_id("ha\x00sh2"), 456) |
619 | + self.store1.set_hash_ids({b"ha\x00sh1": 123, b"ha\x00sh2": 456}) |
620 | + self.assertEqual(self.store1.get_hash_id(b"ha\x00sh1"), 123) |
621 | + self.assertEqual(self.store1.get_hash_id(b"ha\x00sh2"), 456) |
622 | |
623 | def test_get_hash_ids(self): |
624 | - hash_ids = {"hash1": 123, "hash2": 456} |
625 | + hash_ids = {b"hash1": 123, b"hash2": 456} |
626 | self.store1.set_hash_ids(hash_ids) |
627 | self.assertEqual(self.store1.get_hash_ids(), hash_ids) |
628 | |
629 | @@ -80,33 +80,33 @@ |
630 | self.assertEqual([None], rollbacks) |
631 | |
632 | def test_get_id_hash(self): |
633 | - self.store1.set_hash_ids({"hash1": 123, "hash2": 456}) |
634 | - self.assertEqual(self.store2.get_id_hash(123), "hash1") |
635 | - self.assertEqual(self.store2.get_id_hash(456), "hash2") |
636 | + self.store1.set_hash_ids({b"hash1": 123, b"hash2": 456}) |
637 | + self.assertEqual(self.store2.get_id_hash(123), b"hash1") |
638 | + self.assertEqual(self.store2.get_id_hash(456), b"hash2") |
639 | |
640 | def test_clear_hash_ids(self): |
641 | - self.store1.set_hash_ids({"ha\x00sh1": 123, "ha\x00sh2": 456}) |
642 | + self.store1.set_hash_ids({b"ha\x00sh1": 123, b"ha\x00sh2": 456}) |
643 | self.store1.clear_hash_ids() |
644 | - self.assertEqual(self.store2.get_hash_id("ha\x00sh1"), None) |
645 | - self.assertEqual(self.store2.get_hash_id("ha\x00sh2"), None) |
646 | + self.assertEqual(self.store2.get_hash_id(b"ha\x00sh1"), None) |
647 | + self.assertEqual(self.store2.get_hash_id(b"ha\x00sh2"), None) |
648 | |
649 | def test_get_unexistent_hash(self): |
650 | - self.assertEqual(self.store1.get_hash_id("hash1"), None) |
651 | + self.assertEqual(self.store1.get_hash_id(b"hash1"), None) |
652 | |
653 | def test_get_unexistent_id(self): |
654 | self.assertEqual(self.store1.get_id_hash(123), None) |
655 | |
656 | def test_overwrite_id_hash(self): |
657 | - self.store1.set_hash_ids({"hash1": 123}) |
658 | - self.store2.set_hash_ids({"hash2": 123}) |
659 | - self.assertEqual(self.store1.get_hash_id("hash1"), None) |
660 | - self.assertEqual(self.store1.get_hash_id("hash2"), 123) |
661 | + self.store1.set_hash_ids({b"hash1": 123}) |
662 | + self.store2.set_hash_ids({b"hash2": 123}) |
663 | + self.assertEqual(self.store1.get_hash_id(b"hash1"), None) |
664 | + self.assertEqual(self.store1.get_hash_id(b"hash2"), 123) |
665 | |
666 | def test_overwrite_hash_id(self): |
667 | - self.store1.set_hash_ids({"hash1": 123}) |
668 | - self.store2.set_hash_ids({"hash1": 456}) |
669 | + self.store1.set_hash_ids({b"hash1": 123}) |
670 | + self.store2.set_hash_ids({b"hash1": 456}) |
671 | self.assertEqual(self.store1.get_id_hash(123), None) |
672 | - self.assertEqual(self.store1.get_id_hash(456), "hash1") |
673 | + self.assertEqual(self.store1.get_id_hash(456), b"hash1") |
674 | |
675 | def test_check_sanity(self): |
676 | |
677 | @@ -184,22 +184,22 @@ |
678 | |
679 | def test_get_hash_id_using_hash_id_dbs(self): |
680 | # Without hash=>id dbs |
681 | - self.assertEqual(self.store1.get_hash_id("hash1"), None) |
682 | - self.assertEqual(self.store1.get_hash_id("hash2"), None) |
683 | + self.assertEqual(self.store1.get_hash_id(b"hash1"), None) |
684 | + self.assertEqual(self.store1.get_hash_id(b"hash2"), None) |
685 | |
686 | # This hash=>id will be overriden |
687 | - self.store1.set_hash_ids({"hash1": 1}) |
688 | + self.store1.set_hash_ids({b"hash1": 1}) |
689 | |
690 | # Add a couple of hash=>id dbs |
691 | - self.store1.add_hash_id_db(self.hash_id_db_factory({"hash1": 2, |
692 | - "hash2": 3})) |
693 | - self.store1.add_hash_id_db(self.hash_id_db_factory({"hash2": 4, |
694 | - "ha\x00sh1": 5})) |
695 | + self.store1.add_hash_id_db(self.hash_id_db_factory({b"hash1": 2, |
696 | + b"hash2": 3})) |
697 | + self.store1.add_hash_id_db(self.hash_id_db_factory({b"hash2": 4, |
698 | + b"ha\x00sh1": 5})) |
699 | |
700 | # Check look-up priorities and binary hashes |
701 | - self.assertEqual(self.store1.get_hash_id("hash1"), 2) |
702 | - self.assertEqual(self.store1.get_hash_id("hash2"), 3) |
703 | - self.assertEqual(self.store1.get_hash_id("ha\x00sh1"), 5) |
704 | + self.assertEqual(self.store1.get_hash_id(b"hash1"), 2) |
705 | + self.assertEqual(self.store1.get_hash_id(b"hash2"), 3) |
706 | + self.assertEqual(self.store1.get_hash_id(b"ha\x00sh1"), 5) |
707 | |
708 | def test_get_id_hash_using_hash_id_db(self): |
709 | """ |
710 | @@ -207,13 +207,13 @@ |
711 | to query them first, falling back to the regular db in case |
712 | the desired mapping is not found. |
713 | """ |
714 | - self.store1.add_hash_id_db(self.hash_id_db_factory({"hash1": 123})) |
715 | - self.store1.add_hash_id_db(self.hash_id_db_factory({"hash1": 999, |
716 | - "hash2": 456})) |
717 | - self.store1.set_hash_ids({"hash3": 789}) |
718 | - self.assertEqual(self.store1.get_id_hash(123), "hash1") |
719 | - self.assertEqual(self.store1.get_id_hash(456), "hash2") |
720 | - self.assertEqual(self.store1.get_id_hash(789), "hash3") |
721 | + self.store1.add_hash_id_db(self.hash_id_db_factory({b"hash1": 123})) |
722 | + self.store1.add_hash_id_db(self.hash_id_db_factory({b"hash1": 999, |
723 | + b"hash2": 456})) |
724 | + self.store1.set_hash_ids({b"hash3": 789}) |
725 | + self.assertEqual(self.store1.get_id_hash(123), b"hash1") |
726 | + self.assertEqual(self.store1.get_id_hash(456), b"hash2") |
727 | + self.assertEqual(self.store1.get_id_hash(789), b"hash3") |
728 | |
729 | def test_add_and_get_available_packages(self): |
730 | self.store1.add_available([1, 2]) |
731 | |
732 | === modified file 'landscape/schema.py' |
733 | --- landscape/schema.py 2017-03-10 12:40:17 +0000 |
734 | +++ landscape/schema.py 2017-03-21 18:33:15 +0000 |
735 | @@ -67,8 +67,8 @@ |
736 | class Bytes(object): |
737 | """A binary string.""" |
738 | def coerce(self, value): |
739 | - if not isinstance(value, str): |
740 | - raise InvalidError("%r isn't a str" % (value,)) |
741 | + if not isinstance(value, bytes): |
742 | + raise InvalidError("%r isn't a bytestring" % (value,)) |
743 | return value |
744 | |
745 | |
746 | |
747 | === modified file 'py3_ready_tests' |
748 | --- py3_ready_tests 2017-03-15 08:40:11 +0000 |
749 | +++ py3_ready_tests 2017-03-21 18:33:15 +0000 |
750 | @@ -1,2 +1,7 @@ |
751 | landscape.lib.tests |
752 | landscape.sysinfo.tests |
753 | +landscape.package.tests.test_store |
754 | +landscape.package.tests.test_reporter |
755 | + |
756 | +landscape.package.tests.test_facade |
757 | + |