Merge lp:~maxb/launchpad/devel-10306-to-db-devel-resolve-conflict into lp:launchpad/db-devel
- devel-10306-to-db-devel-resolve-conflict
- Merge into db-devel
Proposed by
Max Bowsher
on 2010-02-12
| Status: | Rejected |
|---|---|
| Rejected by: | Guilherme Salgado on 2010-02-12 |
| Proposed branch: | lp:~maxb/launchpad/devel-10306-to-db-devel-resolve-conflict |
| Merge into: | lp:launchpad/db-devel |
| Diff against target: |
901 lines (+110/-177) 27 files modified
lib/canonical/base.py (+3/-3) lib/canonical/buildd/slave.py (+3/-3) lib/canonical/launchpad/database/emailaddress.py (+3/-2) lib/canonical/launchpad/database/temporaryblobstorage.py (+4/-3) lib/canonical/launchpad/helpers.py (+4/-3) lib/canonical/launchpad/mailman/monkeypatches/lphandler.py (+3/-2) lib/canonical/launchpad/scripts/logger.py (+4/-2) lib/canonical/launchpad/webapp/authentication.py (+3/-3) lib/canonical/launchpad/webapp/login.py (+5/-4) lib/canonical/launchpad/webapp/tests/test_encryptor.py (+10/-5) lib/canonical/librarian/client.py (+7/-6) lib/canonical/librarian/ftests/test_storage.py (+5/-4) lib/canonical/librarian/storage.py (+3/-4) lib/canonical/librarian/tests/test_storage.py (+0/-1) lib/canonical/librarian/utils.py (+3/-3) lib/lp/archivepublisher/library.py (+4/-2) lib/lp/archivepublisher/publishing.py (+8/-34) lib/lp/archivepublisher/tests/test_librarianwrapper.py (+2/-2) lib/lp/archivepublisher/tests/test_pool.py (+4/-4) lib/lp/archivepublisher/tests/test_publisher.py (+2/-66) lib/lp/archiveuploader/nascentuploadfile.py (+3/-4) lib/lp/services/mail/sendmail.py (+7/-3) lib/lp/soyuz/doc/soyuz-upload.txt.disabled (+2/-2) lib/lp/soyuz/scripts/ftpmaster.py (+2/-2) lib/lp/soyuz/scripts/gina/library.py (+5/-2) lib/lp/soyuz/scripts/queue.py (+3/-3) lib/lp/soyuz/scripts/tests/test_queue.py (+8/-5) |
| To merge this branch: | bzr merge lp:~maxb/launchpad/devel-10306-to-db-devel-resolve-conflict |
| Related bugs: |
| Reviewer | Review Type | Date Requested | Status |
|---|---|---|---|
| Canonical Launchpad Engineering | 2010-02-12 | Pending | |
|
Review via email:
|
|||
Commit Message
Description of the Change
To post a comment you must log in.
| Max Bowsher (maxb) wrote : | # |
| Guilherme Salgado (salgado) wrote : | # |
I think we should wait for the automatic merge to kick in (and fail), to make sure we don't merge any devel revisions not blessed by buildbot into db-devel.
| Abel Deuring (adeuring) wrote : | # |
A branch from noodles already resolved these conflicts in db-devel revision 8990
Unmerged revisions
- 8990. By Max Bowsher on 2010-02-12
-
Manual merge of devel r10306 to db-devel, resolving conflict.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
| 1 | === modified file 'lib/canonical/base.py' |
| 2 | --- lib/canonical/base.py 2009-06-25 05:30:52 +0000 |
| 3 | +++ lib/canonical/base.py 2010-02-12 02:27:18 +0000 |
| 4 | @@ -33,8 +33,8 @@ |
| 5 | This method is useful for shrinking sha1 and md5 hashes, but keeping |
| 6 | them in simple ASCII suitable for URL's etc. |
| 7 | |
| 8 | - >>> import sha, md5 |
| 9 | - >>> s = sha.new('foo').hexdigest() |
| 10 | + >>> import hashlib |
| 11 | + >>> s = hashlib.sha1('foo').hexdigest() |
| 12 | >>> s |
| 13 | '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' |
| 14 | >>> i = long(s, 16) |
| 15 | @@ -42,7 +42,7 @@ |
| 16 | 68123873083688143418383284816464454849230703155L |
| 17 | >>> base(i, 62) |
| 18 | '1HyPQr2xj1nmnkQXBCJXUdQoy5l' |
| 19 | - >>> base(int(md5.new('foo').hexdigest(), 16), 62) |
| 20 | + >>> base(int(hashlib.md5('foo').hexdigest(), 16), 62) |
| 21 | '5fX649Stem9fET0lD46zVe' |
| 22 | |
| 23 | A sha1 hash can be compressed to 27 characters or less |
| 24 | |
| 25 | === modified file 'lib/canonical/buildd/slave.py' |
| 26 | --- lib/canonical/buildd/slave.py 2010-01-14 02:36:36 +0000 |
| 27 | +++ lib/canonical/buildd/slave.py 2010-02-12 02:27:18 +0000 |
| 28 | @@ -8,9 +8,9 @@ |
| 29 | |
| 30 | __metaclass__ = type |
| 31 | |
| 32 | +import hashlib |
| 33 | import os |
| 34 | import re |
| 35 | -import sha |
| 36 | import urllib2 |
| 37 | import xmlrpclib |
| 38 | |
| 39 | @@ -301,7 +301,7 @@ |
| 40 | else: |
| 41 | of = open(self.cachePath(sha1sum), "w") |
| 42 | # Upped for great justice to 256k |
| 43 | - check_sum = sha.sha() |
| 44 | + check_sum = hashlib.sha1() |
| 45 | for chunk in iter(lambda: f.read(256*1024), ''): |
| 46 | of.write(chunk) |
| 47 | check_sum.update(chunk) |
| 48 | @@ -316,7 +316,7 @@ |
| 49 | |
| 50 | def storeFile(self, content): |
| 51 | """Take the provided content and store it in the file cache.""" |
| 52 | - sha1sum = sha.sha(content).hexdigest() |
| 53 | + sha1sum = hashlib.sha1(content).hexdigest() |
| 54 | present, info = self.ensurePresent(sha1sum) |
| 55 | if present: |
| 56 | return sha1sum |
| 57 | |
| 58 | === modified file 'lib/canonical/launchpad/database/emailaddress.py' |
| 59 | --- lib/canonical/launchpad/database/emailaddress.py 2009-06-25 05:30:52 +0000 |
| 60 | +++ lib/canonical/launchpad/database/emailaddress.py 2010-02-12 02:27:18 +0000 |
| 61 | @@ -11,8 +11,9 @@ |
| 62 | 'UndeletableEmailAddress', |
| 63 | ] |
| 64 | |
| 65 | + |
| 66 | +import hashlib |
| 67 | import operator |
| 68 | -import sha |
| 69 | |
| 70 | from zope.interface import implements |
| 71 | |
| 72 | @@ -80,7 +81,7 @@ |
| 73 | @property |
| 74 | def rdf_sha1(self): |
| 75 | """See `IEmailAddress`.""" |
| 76 | - return sha.new('mailto:' + self.email).hexdigest().upper() |
| 77 | + return hashlib.sha1('mailto:' + self.email).hexdigest().upper() |
| 78 | |
| 79 | |
| 80 | class EmailAddressSet: |
| 81 | |
| 82 | === modified file 'lib/canonical/launchpad/database/temporaryblobstorage.py' |
| 83 | --- lib/canonical/launchpad/database/temporaryblobstorage.py 2009-06-25 05:30:52 +0000 |
| 84 | +++ lib/canonical/launchpad/database/temporaryblobstorage.py 2010-02-12 02:27:18 +0000 |
| 85 | @@ -10,13 +10,14 @@ |
| 86 | 'TemporaryStorageManager', |
| 87 | ] |
| 88 | |
| 89 | -from cStringIO import StringIO |
| 90 | -from datetime import timedelta, datetime |
| 91 | + |
| 92 | import random |
| 93 | -import sha |
| 94 | import time |
| 95 | import thread |
| 96 | |
| 97 | +from cStringIO import StringIO |
| 98 | +from datetime import timedelta, datetime |
| 99 | + |
| 100 | from pytz import utc |
| 101 | from sqlobject import StringCol, ForeignKey |
| 102 | from zope.component import getUtility |
| 103 | |
| 104 | === modified file 'lib/canonical/launchpad/helpers.py' |
| 105 | --- lib/canonical/launchpad/helpers.py 2009-12-10 13:20:12 +0000 |
| 106 | +++ lib/canonical/launchpad/helpers.py 2010-02-12 02:27:18 +0000 |
| 107 | @@ -10,16 +10,17 @@ |
| 108 | |
| 109 | __metaclass__ = type |
| 110 | |
| 111 | -import subprocess |
| 112 | +import hashlib |
| 113 | import gettextpo |
| 114 | import os |
| 115 | import random |
| 116 | import re |
| 117 | +import subprocess |
| 118 | import tarfile |
| 119 | import warnings |
| 120 | + |
| 121 | from StringIO import StringIO |
| 122 | from difflib import unified_diff |
| 123 | -import sha |
| 124 | |
| 125 | from zope.component import getUtility |
| 126 | from zope.security.interfaces import ForbiddenAttribute |
| 127 | @@ -462,7 +463,7 @@ |
| 128 | """ |
| 129 | return '%s.msg' % ( |
| 130 | canonical.base.base( |
| 131 | - long(sha.new(message_id).hexdigest(), 16), 62)) |
| 132 | + long(hashlib.sha1(message_id).hexdigest(), 16), 62)) |
| 133 | |
| 134 | |
| 135 | def intOrZero(value): |
| 136 | |
| 137 | === modified file 'lib/canonical/launchpad/mailman/monkeypatches/lphandler.py' |
| 138 | --- lib/canonical/launchpad/mailman/monkeypatches/lphandler.py 2009-06-25 05:30:52 +0000 |
| 139 | +++ lib/canonical/launchpad/mailman/monkeypatches/lphandler.py 2010-02-12 02:27:18 +0000 |
| 140 | @@ -3,7 +3,8 @@ |
| 141 | |
| 142 | """A global pipeline handler for determining Launchpad membership.""" |
| 143 | |
| 144 | -import sha |
| 145 | + |
| 146 | +import hashlib |
| 147 | import xmlrpclib |
| 148 | |
| 149 | from Mailman import Errors |
| 150 | @@ -24,7 +25,7 @@ |
| 151 | secret = msg['x-launchpad-hash'] |
| 152 | message_id = msg['message-id'] |
| 153 | if secret and message_id: |
| 154 | - hash = sha.new(mm_cfg.LAUNCHPAD_SHARED_SECRET) |
| 155 | + hash = hashlib.sha1(mm_cfg.LAUNCHPAD_SHARED_SECRET) |
| 156 | hash.update(message_id) |
| 157 | if secret == hash.hexdigest(): |
| 158 | # Since this message is coming from Launchpad, pre-approve it. |
| 159 | |
| 160 | === modified file 'lib/canonical/launchpad/scripts/logger.py' |
| 161 | --- lib/canonical/launchpad/scripts/logger.py 2009-10-16 18:07:01 +0000 |
| 162 | +++ lib/canonical/launchpad/scripts/logger.py 2010-02-12 02:27:18 +0000 |
| 163 | @@ -25,12 +25,14 @@ |
| 164 | 'DEBUG6', 'DEBUG7', 'DEBUG8', 'DEBUG9' |
| 165 | ] |
| 166 | |
| 167 | + |
| 168 | +import hashlib |
| 169 | import logging |
| 170 | import re |
| 171 | -import sha |
| 172 | import sys |
| 173 | import traceback |
| 174 | import time |
| 175 | + |
| 176 | from optparse import OptionParser |
| 177 | from cStringIO import StringIO |
| 178 | from datetime import datetime, timedelta |
| 179 | @@ -168,7 +170,7 @@ |
| 180 | expiry = datetime.now().replace(tzinfo=utc) + timedelta(days=90) |
| 181 | try: |
| 182 | filename = base( |
| 183 | - long(sha.new(traceback).hexdigest(),16), 62 |
| 184 | + long(hashlib.sha1(traceback).hexdigest(),16), 62 |
| 185 | ) + '.txt' |
| 186 | url = librarian.remoteAddFile( |
| 187 | filename, len(traceback), StringIO(traceback), |
| 188 | |
| 189 | === modified file 'lib/canonical/launchpad/webapp/authentication.py' |
| 190 | --- lib/canonical/launchpad/webapp/authentication.py 2009-10-20 22:13:21 +0000 |
| 191 | +++ lib/canonical/launchpad/webapp/authentication.py 2010-02-12 02:27:18 +0000 |
| 192 | @@ -14,8 +14,8 @@ |
| 193 | |
| 194 | |
| 195 | import binascii |
| 196 | +import hashlib |
| 197 | import random |
| 198 | -import sha |
| 199 | |
| 200 | from contrib.oauth import OAuthRequest |
| 201 | |
| 202 | @@ -178,7 +178,7 @@ |
| 203 | plaintext = str(plaintext) |
| 204 | if salt is None: |
| 205 | salt = self.generate_salt() |
| 206 | - v = binascii.b2a_base64(sha.new(plaintext + salt).digest() + salt) |
| 207 | + v = binascii.b2a_base64(hashlib.sha1(plaintext + salt).digest() + salt) |
| 208 | return v[:-1] |
| 209 | |
| 210 | def validate(self, plaintext, encrypted): |
| 211 | @@ -191,7 +191,7 @@ |
| 212 | return False |
| 213 | salt = ref[20:] |
| 214 | v = binascii.b2a_base64( |
| 215 | - sha.new(plaintext + salt).digest() + salt)[:-1] |
| 216 | + hashlib.sha1(plaintext + salt).digest() + salt)[:-1] |
| 217 | pw1 = (v or '').strip() |
| 218 | pw2 = (encrypted or '').strip() |
| 219 | return pw1 == pw2 |
| 220 | |
| 221 | === modified file 'lib/canonical/launchpad/webapp/login.py' |
| 222 | --- lib/canonical/launchpad/webapp/login.py 2010-01-14 13:25:34 +0000 |
| 223 | +++ lib/canonical/launchpad/webapp/login.py 2010-02-12 02:27:18 +0000 |
| 224 | @@ -6,10 +6,11 @@ |
| 225 | __metaclass__ = type |
| 226 | |
| 227 | import cgi |
| 228 | +import hashlib |
| 229 | +import random |
| 230 | import urllib |
| 231 | + |
| 232 | from datetime import datetime, timedelta |
| 233 | -import md5 |
| 234 | -import random |
| 235 | |
| 236 | from BeautifulSoup import UnicodeDammit |
| 237 | |
| 238 | @@ -162,7 +163,7 @@ |
| 239 | expected = self.request.form.get(self.captcha_hash) |
| 240 | submitted = self.request.form.get(self.captcha_submission) |
| 241 | if expected is not None and submitted is not None: |
| 242 | - return md5.new(submitted).hexdigest() == expected |
| 243 | + return hashlib.md5(submitted).hexdigest() == expected |
| 244 | return False |
| 245 | |
| 246 | @cachedproperty |
| 247 | @@ -182,7 +183,7 @@ |
| 248 | |
| 249 | The hash is the value we put in the form for later comparison. |
| 250 | """ |
| 251 | - return md5.new(str(self.captcha_answer)).hexdigest() |
| 252 | + return hashlib.md5(str(self.captcha_answer)).hexdigest() |
| 253 | |
| 254 | @property |
| 255 | def captcha_problem(self): |
| 256 | |
| 257 | === modified file 'lib/canonical/launchpad/webapp/tests/test_encryptor.py' |
| 258 | --- lib/canonical/launchpad/webapp/tests/test_encryptor.py 2009-06-25 05:30:52 +0000 |
| 259 | +++ lib/canonical/launchpad/webapp/tests/test_encryptor.py 2010-02-12 02:27:18 +0000 |
| 260 | @@ -3,15 +3,19 @@ |
| 261 | |
| 262 | __metaclass__ = type |
| 263 | |
| 264 | + |
| 265 | +import binascii |
| 266 | +import hashlib |
| 267 | import unittest |
| 268 | -import sha |
| 269 | -import binascii |
| 270 | + |
| 271 | from zope.component import getUtility |
| 272 | from zope.app.testing import ztapi |
| 273 | from zope.app.testing.placelesssetup import PlacelessSetup |
| 274 | + |
| 275 | from canonical.launchpad.webapp.authentication import SSHADigestEncryptor |
| 276 | from canonical.launchpad.interfaces import IPasswordEncryptor |
| 277 | |
| 278 | + |
| 279 | class TestSSHADigestEncryptor(PlacelessSetup, unittest.TestCase): |
| 280 | |
| 281 | def setUp(self): |
| 282 | @@ -26,7 +30,7 @@ |
| 283 | self.failIfEqual(encrypted1, encrypted2) |
| 284 | salt = encrypted1[20:] |
| 285 | v = binascii.b2a_base64( |
| 286 | - sha.new('motorhead' + salt).digest() + salt |
| 287 | + hashlib.sha1('motorhead' + salt).digest() + salt |
| 288 | )[:-1] |
| 289 | return (v == encrypted1) |
| 290 | |
| 291 | @@ -41,8 +45,9 @@ |
| 292 | encrypted2 = encryptor.encrypt(u'motorhead') |
| 293 | self.failIfEqual(encrypted1, encrypted2) |
| 294 | salt = encrypted1[20:] |
| 295 | - v = binascii.b2a_base64(sha.new('motorhead' + salt).digest() + salt)[:-1] |
| 296 | - return (v == encrypted1) |
| 297 | + v = binascii.b2a_base64( |
| 298 | + hashlib.sha1('motorhead' + salt).digest() + salt)[:-1] |
| 299 | + return v == encrypted1 |
| 300 | |
| 301 | def test_unicode_validate(self): |
| 302 | encryptor = getUtility(IPasswordEncryptor) |
| 303 | |
| 304 | === modified file 'lib/canonical/librarian/client.py' |
| 305 | --- lib/canonical/librarian/client.py 2010-01-11 18:06:23 +0000 |
| 306 | +++ lib/canonical/librarian/client.py 2010-02-12 02:27:18 +0000 |
| 307 | @@ -10,16 +10,17 @@ |
| 308 | 'RestrictedLibrarianClient', |
| 309 | ] |
| 310 | |
| 311 | -import md5 |
| 312 | + |
| 313 | +import hashlib |
| 314 | import re |
| 315 | -import sha |
| 316 | import socket |
| 317 | -from socket import SOCK_STREAM, AF_INET |
| 318 | -from select import select |
| 319 | import time |
| 320 | import threading |
| 321 | import urllib |
| 322 | import urllib2 |
| 323 | + |
| 324 | +from select import select |
| 325 | +from socket import SOCK_STREAM, AF_INET |
| 326 | from urlparse import urljoin |
| 327 | |
| 328 | from storm.store import Store |
| 329 | @@ -130,8 +131,8 @@ |
| 330 | self._sendLine('') |
| 331 | |
| 332 | # Prepare to the upload the file |
| 333 | - shaDigester = sha.sha() |
| 334 | - md5Digester = md5.md5() |
| 335 | + shaDigester = hashlib.sha1() |
| 336 | + md5Digester = hashlib.md5() |
| 337 | bytesWritten = 0 |
| 338 | |
| 339 | # Read in and upload the file 64kb at a time, by using the two-arg |
| 340 | |
| 341 | === modified file 'lib/canonical/librarian/ftests/test_storage.py' |
| 342 | --- lib/canonical/librarian/ftests/test_storage.py 2009-06-25 05:30:52 +0000 |
| 343 | +++ lib/canonical/librarian/ftests/test_storage.py 2010-02-12 02:27:18 +0000 |
| 344 | @@ -1,7 +1,7 @@ |
| 345 | # Copyright 2009 Canonical Ltd. This software is licensed under the |
| 346 | # GNU Affero General Public License version 3 (see the file LICENSE). |
| 347 | |
| 348 | -import sha |
| 349 | +import hashlib |
| 350 | import shutil |
| 351 | import tempfile |
| 352 | import unittest |
| 353 | @@ -13,6 +13,7 @@ |
| 354 | from canonical.launchpad.database import LibraryFileContent, LibraryFileAlias |
| 355 | from canonical.testing import LaunchpadZopelessLayer |
| 356 | |
| 357 | + |
| 358 | class LibrarianStorageDBTests(unittest.TestCase): |
| 359 | layer = LaunchpadZopelessLayer |
| 360 | |
| 361 | @@ -26,7 +27,7 @@ |
| 362 | |
| 363 | def test_addFile(self): |
| 364 | data = 'data ' * 50 |
| 365 | - digest = sha.sha(data).hexdigest() |
| 366 | + digest = hashlib.sha1(data).hexdigest() |
| 367 | newfile = self.storage.startAddFile('file1', len(data)) |
| 368 | newfile.srcDigest = digest |
| 369 | newfile.append(data) |
| 370 | @@ -36,7 +37,7 @@ |
| 371 | def test_addFiles_identical(self): |
| 372 | # Start adding two files with identical data |
| 373 | data = 'data ' * 5000 |
| 374 | - digest = sha.sha(data).hexdigest() |
| 375 | + digest = hashlib.sha1(data).hexdigest() |
| 376 | newfile1 = self.storage.startAddFile('file1', len(data)) |
| 377 | newfile2 = self.storage.startAddFile('file2', len(data)) |
| 378 | newfile1.append(data) |
| 379 | @@ -63,7 +64,7 @@ |
| 380 | def test_alias(self): |
| 381 | # Add a file (and so also add an alias) |
| 382 | data = 'data ' * 50 |
| 383 | - digest = sha.sha(data).hexdigest() |
| 384 | + digest = hashlib.sha1(data).hexdigest() |
| 385 | newfile = self.storage.startAddFile('file1', len(data)) |
| 386 | newfile.mimetype = 'text/unknown' |
| 387 | newfile.append(data) |
| 388 | |
| 389 | === modified file 'lib/canonical/librarian/storage.py' |
| 390 | --- lib/canonical/librarian/storage.py 2009-07-17 00:26:05 +0000 |
| 391 | +++ lib/canonical/librarian/storage.py 2010-02-12 02:27:18 +0000 |
| 392 | @@ -4,9 +4,8 @@ |
| 393 | __metaclass__ = type |
| 394 | |
| 395 | import os |
| 396 | -import md5 |
| 397 | -import sha |
| 398 | import errno |
| 399 | +import hashlib |
| 400 | import shutil |
| 401 | import tempfile |
| 402 | |
| 403 | @@ -89,8 +88,8 @@ |
| 404 | tmpfile, tmpfilepath = tempfile.mkstemp(dir=self.storage.incoming) |
| 405 | self.tmpfile = os.fdopen(tmpfile, 'w') |
| 406 | self.tmpfilepath = tmpfilepath |
| 407 | - self.shaDigester = sha.new() |
| 408 | - self.md5Digester = md5.new() |
| 409 | + self.shaDigester = hashlib.sha1() |
| 410 | + self.md5Digester = hashlib.md5() |
| 411 | |
| 412 | def append(self, data): |
| 413 | self.tmpfile.write(data) |
| 414 | |
| 415 | === modified file 'lib/canonical/librarian/tests/test_storage.py' |
| 416 | --- lib/canonical/librarian/tests/test_storage.py 2009-07-19 04:41:14 +0000 |
| 417 | +++ lib/canonical/librarian/tests/test_storage.py 2010-02-12 02:27:18 +0000 |
| 418 | @@ -2,7 +2,6 @@ |
| 419 | # GNU Affero General Public License version 3 (see the file LICENSE). |
| 420 | |
| 421 | import os |
| 422 | -import sha |
| 423 | import shutil |
| 424 | import tempfile |
| 425 | import unittest |
| 426 | |
| 427 | === modified file 'lib/canonical/librarian/utils.py' |
| 428 | --- lib/canonical/librarian/utils.py 2009-06-25 05:30:52 +0000 |
| 429 | +++ lib/canonical/librarian/utils.py 2010-02-12 02:27:18 +0000 |
| 430 | @@ -2,7 +2,6 @@ |
| 431 | # GNU Affero General Public License version 3 (see the file LICENSE). |
| 432 | |
| 433 | __metaclass__ = type |
| 434 | - |
| 435 | __all__ = [ |
| 436 | 'copy_and_close', |
| 437 | 'filechunks', |
| 438 | @@ -10,7 +9,8 @@ |
| 439 | 'sha1_from_path', |
| 440 | ] |
| 441 | |
| 442 | -import sha |
| 443 | + |
| 444 | +import hashlib |
| 445 | |
| 446 | MEGABYTE = 1024*1024 |
| 447 | |
| 448 | @@ -39,7 +39,7 @@ |
| 449 | def sha1_from_path(path): |
| 450 | """Return the hexdigest SHA1 for the contents of the path.""" |
| 451 | the_file = open(path) |
| 452 | - the_hash = sha.new() |
| 453 | + the_hash = hashlib.sha1() |
| 454 | |
| 455 | for chunk in filechunks(the_file): |
| 456 | the_hash.update(chunk) |
| 457 | |
| 458 | === modified file 'lib/lp/archivepublisher/library.py' |
| 459 | --- lib/lp/archivepublisher/library.py 2009-06-24 23:28:16 +0000 |
| 460 | +++ lib/lp/archivepublisher/library.py 2010-02-12 02:27:18 +0000 |
| 461 | @@ -113,7 +113,9 @@ |
| 462 | return os.link(path, archive) |
| 463 | |
| 464 | if __name__ == '__main__': |
| 465 | - import os, sys, sha |
| 466 | + import hashlib |
| 467 | + import os |
| 468 | + import sys |
| 469 | |
| 470 | lib = Librarian('localhost', 9090, 8000, "/tmp/cache") |
| 471 | |
| 472 | @@ -124,7 +126,7 @@ |
| 473 | lib.upload_port) |
| 474 | fileobj = open(name, 'rb') |
| 475 | size = os.stat(name).st_size |
| 476 | - digest = sha.sha(open(name, 'rb').read()).hexdigest() |
| 477 | + digest = hashlib.sha1(open(name, 'rb').read()).hexdigest() |
| 478 | |
| 479 | fileid, filealias = lib.addFile(name, size, fileobj, |
| 480 | contentType='test/test', |
| 481 | |
| 482 | === modified file 'lib/lp/archivepublisher/publishing.py' |
| 483 | --- lib/lp/archivepublisher/publishing.py 2009-12-14 17:32:06 +0000 |
| 484 | +++ lib/lp/archivepublisher/publishing.py 2010-02-12 02:27:18 +0000 |
| 485 | @@ -9,12 +9,11 @@ |
| 486 | |
| 487 | __metaclass__ = type |
| 488 | |
| 489 | -import apt_pkg |
| 490 | -from datetime import datetime |
| 491 | +import hashlib |
| 492 | import logging |
| 493 | -from md5 import md5 |
| 494 | import os |
| 495 | -from sha import sha |
| 496 | + |
| 497 | +from datetime import datetime |
| 498 | |
| 499 | from zope.component import getUtility |
| 500 | |
| 501 | @@ -59,23 +58,6 @@ |
| 502 | Architecture: %s |
| 503 | """ |
| 504 | |
| 505 | -class sha256: |
| 506 | - """Encapsulates apt_pkg.sha256sum as expected by publishing. |
| 507 | - |
| 508 | - It implements '__init__' and 'hexdigest' methods from PEP-247, which are |
| 509 | - the only ones required in soyuz-publishing-system. |
| 510 | - |
| 511 | - It's a work around for broken Crypto.Hash.SHA256. See further information |
| 512 | - in bug #131503. |
| 513 | - """ |
| 514 | - def __init__(self, content): |
| 515 | - self._sum = apt_pkg.sha256sum(content) |
| 516 | - |
| 517 | - def hexdigest(self): |
| 518 | - """Return the hexdigest produced by apt_pkg.sha256sum.""" |
| 519 | - return self._sum |
| 520 | - |
| 521 | - |
| 522 | def reorder_components(components): |
| 523 | """Return a list of the components provided. |
| 524 | |
| 525 | @@ -518,13 +500,13 @@ |
| 526 | f.write("MD5Sum:\n") |
| 527 | all_files = sorted(list(all_files), key=os.path.dirname) |
| 528 | for file_name in all_files: |
| 529 | - self._writeSumLine(full_name, f, file_name, md5) |
| 530 | + self._writeSumLine(full_name, f, file_name, hashlib.md5) |
| 531 | f.write("SHA1:\n") |
| 532 | for file_name in all_files: |
| 533 | - self._writeSumLine(full_name, f, file_name, sha) |
| 534 | + self._writeSumLine(full_name, f, file_name, hashlib.sha1) |
| 535 | f.write("SHA256:\n") |
| 536 | for file_name in all_files: |
| 537 | - self._writeSumLine(full_name, f, file_name, sha256) |
| 538 | + self._writeSumLine(full_name, f, file_name, hashlib.sha256) |
| 539 | |
| 540 | f.close() |
| 541 | |
| 542 | @@ -607,16 +589,8 @@ |
| 543 | |
| 544 | in_file = open(full_name, 'r') |
| 545 | try: |
| 546 | - # XXX cprov 20080704 bug=243630,269014: Workaround for hardy's |
| 547 | - # python-apt. If it receives a file object as an argument instead |
| 548 | - # of the file contents as a string, it will generate the correct |
| 549 | - # SHA256. |
| 550 | - if sum_form == sha256: |
| 551 | - contents = in_file |
| 552 | - length = os.stat(full_name).st_size |
| 553 | - else: |
| 554 | - contents = in_file.read() |
| 555 | - length = len(contents) |
| 556 | + contents = in_file.read() |
| 557 | + length = len(contents) |
| 558 | checksum = sum_form(contents).hexdigest() |
| 559 | finally: |
| 560 | in_file.close() |
| 561 | |
| 562 | === modified file 'lib/lp/archivepublisher/tests/test_librarianwrapper.py' |
| 563 | --- lib/lp/archivepublisher/tests/test_librarianwrapper.py 2009-06-24 23:28:16 +0000 |
| 564 | +++ lib/lp/archivepublisher/tests/test_librarianwrapper.py 2010-02-12 02:27:18 +0000 |
| 565 | @@ -5,9 +5,9 @@ |
| 566 | |
| 567 | __metaclass__ = type |
| 568 | |
| 569 | +import hashlib |
| 570 | import os |
| 571 | import shutil |
| 572 | -import sha |
| 573 | import sys |
| 574 | import unittest |
| 575 | |
| 576 | @@ -47,7 +47,7 @@ |
| 577 | |
| 578 | fileobj = open(path, 'rb') |
| 579 | size = os.stat(path).st_size |
| 580 | - digest = sha.sha(open(path, 'rb').read()).hexdigest() |
| 581 | + digest = hashlib.sha1(open(path, 'rb').read()).hexdigest() |
| 582 | |
| 583 | ## Use Fake Librarian class |
| 584 | uploader = FakeUploadClient() |
| 585 | |
| 586 | === modified file 'lib/lp/archivepublisher/tests/test_pool.py' |
| 587 | --- lib/lp/archivepublisher/tests/test_pool.py 2009-06-24 23:28:16 +0000 |
| 588 | +++ lib/lp/archivepublisher/tests/test_pool.py 2010-02-12 02:27:18 +0000 |
| 589 | @@ -5,13 +5,13 @@ |
| 590 | |
| 591 | __metaclass__ = type |
| 592 | |
| 593 | - |
| 594 | +import hashlib |
| 595 | import os |
| 596 | -import sha |
| 597 | import sys |
| 598 | import shutil |
| 599 | +import unittest |
| 600 | + |
| 601 | from tempfile import mkdtemp |
| 602 | -import unittest |
| 603 | |
| 604 | from lp.archivepublisher.tests.util import FakeLogger |
| 605 | from lp.archivepublisher.diskpool import DiskPool, poolify |
| 606 | @@ -44,7 +44,7 @@ |
| 607 | def addToPool(self, component): |
| 608 | return self.pool.addFile( |
| 609 | component, self.sourcename, self.filename, |
| 610 | - sha.sha(self.contents).hexdigest(), MockFile(self.contents)) |
| 611 | + hashlib.sha1(self.contents).hexdigest(), MockFile(self.contents)) |
| 612 | |
| 613 | def removeFromPool(self, component): |
| 614 | return self.pool.removeFile(component, self.sourcename, self.filename) |
| 615 | |
| 616 | === modified file 'lib/lp/archivepublisher/tests/test_publisher.py' |
| 617 | --- lib/lp/archivepublisher/tests/test_publisher.py 2009-12-21 18:05:27 +0000 |
| 618 | +++ lib/lp/archivepublisher/tests/test_publisher.py 2010-02-12 02:27:18 +0000 |
| 619 | @@ -8,6 +8,7 @@ |
| 620 | |
| 621 | import bz2 |
| 622 | import gzip |
| 623 | +import hashlib |
| 624 | import os |
| 625 | import shutil |
| 626 | import stat |
| 627 | @@ -20,8 +21,7 @@ |
| 628 | |
| 629 | from lp.archivepublisher.config import getPubConfig |
| 630 | from lp.archivepublisher.diskpool import DiskPool |
| 631 | -from lp.archivepublisher.publishing import ( |
| 632 | - Publisher, getPublisher, sha256) |
| 633 | +from lp.archivepublisher.publishing import Publisher, getPublisher |
| 634 | from canonical.config import config |
| 635 | from canonical.database.constants import UTC_NOW |
| 636 | from canonical.launchpad.ftests.keys_for_tests import gpgkeysdir |
| 637 | @@ -745,70 +745,6 @@ |
| 638 | self.assertReleaseFileRequested( |
| 639 | publisher, 'breezy-autotest', component, dist) |
| 640 | |
| 641 | - def testAptSHA256(self): |
| 642 | - """Test issues with python-apt in Ubuntu/hardy. |
| 643 | - |
| 644 | - This test only runs on Ubuntu/hardy systems. |
| 645 | - |
| 646 | - The version of python-apt in Ubuntu/hardy has problems with |
| 647 | - contents containing '\0' character. |
| 648 | - |
| 649 | - The documented workaround for it is passing the original |
| 650 | - file-descriptor to apt_pkg.sha256sum(), instead of its contents. |
| 651 | - |
| 652 | - The python-apt version in Ubuntu/Intrepid has a fix for this issue, |
| 653 | - but it already has many other features that makes a backport |
| 654 | - practically unfeasible. That's mainly why this 'bug' is documented |
| 655 | - as a LP test, the current code was modified to cope with it. |
| 656 | - |
| 657 | - Once the issue with python-apt is gone, either by having a backport |
| 658 | - available in hardy or a production upgrade, this test will fail. At |
| 659 | - that point we will be able to revert the affected code and remove |
| 660 | - this test, restoring the balance of the force. |
| 661 | - |
| 662 | - See https://bugs.edge.launchpad.net/soyuz/+bug/243630 and |
| 663 | - https://bugs.edge.launchpad.net/soyuz/+bug/269014. |
| 664 | - """ |
| 665 | - # XXX cprov 20090218 bug-279248: when hardy's apt gets fixed by a |
| 666 | - # SRU, this test will fail in PQM/Buildbot. Then we should change |
| 667 | - # the actual code for passing file descriptors instead of text to |
| 668 | - # apt (it will perform better this way) and obviously remove this |
| 669 | - # test. |
| 670 | - |
| 671 | - # Skip this test if it's not being run on Ubuntu/hardy. |
| 672 | - lsb_info = get_lsb_information() |
| 673 | - if (lsb_info.get('ID') != 'Ubuntu' or |
| 674 | - lsb_info.get('CODENAME') != 'hardy'): |
| 675 | - return |
| 676 | - |
| 677 | - def _getSHA256(content): |
| 678 | - """Return checksums for the given content. |
| 679 | - |
| 680 | - Return a tuple containing the checksum corresponding to the |
| 681 | - given content (as string) and a file containing the same string. |
| 682 | - """ |
| 683 | - # Write the given content in a tempfile. |
| 684 | - test_filepath = tempfile.mktemp() |
| 685 | - test_file = open(test_filepath, 'w') |
| 686 | - test_file.write(content) |
| 687 | - test_file.close() |
| 688 | - # Generate the checksums for the two sources. |
| 689 | - text = sha256(content).hexdigest() |
| 690 | - file = sha256(open(test_filepath)).hexdigest() |
| 691 | - # Remove the tempfile. |
| 692 | - os.unlink(test_filepath) |
| 693 | - return text, file |
| 694 | - |
| 695 | - # Apt does the right thing for ordinary strings, both, file and text |
| 696 | - # checksums are identical. |
| 697 | - text, file = _getSHA256("foobar") |
| 698 | - self.assertEqual(text, file) |
| 699 | - |
| 700 | - # On the other hand, there is a mismatch for strings containing '\0' |
| 701 | - text, file = _getSHA256("foo\0bar") |
| 702 | - self.assertNotEqual( |
| 703 | - text, file, "Python-apt no longer creates bad SHA256 sums.") |
| 704 | - |
| 705 | def _getReleaseFileOrigin(self, contents): |
| 706 | origin_header = 'Origin: ' |
| 707 | [origin_line] = [ |
| 708 | |
| 709 | === modified file 'lib/lp/archiveuploader/nascentuploadfile.py' |
| 710 | --- lib/lp/archiveuploader/nascentuploadfile.py 2009-12-14 13:49:03 +0000 |
| 711 | +++ lib/lp/archiveuploader/nascentuploadfile.py 2010-02-12 02:27:18 +0000 |
| 712 | @@ -21,9 +21,8 @@ |
| 713 | |
| 714 | import apt_inst |
| 715 | import apt_pkg |
| 716 | +import hashlib |
| 717 | import os |
| 718 | -import md5 |
| 719 | -import sha |
| 720 | import subprocess |
| 721 | import sys |
| 722 | import time |
| 723 | @@ -214,8 +213,8 @@ |
| 724 | |
| 725 | # Read in the file and compute its md5 and sha1 checksums and remember |
| 726 | # the size of the file as read-in. |
| 727 | - digest = md5.md5() |
| 728 | - sha_cksum = sha.sha() |
| 729 | + digest = hashlib.md5() |
| 730 | + sha_cksum = hashlib.sha1() |
| 731 | ckfile = open(self.filepath, "r") |
| 732 | size = 0 |
| 733 | for chunk in filechunks(ckfile): |
| 734 | |
| 735 | === modified file 'lib/lp/services/mail/sendmail.py' |
| 736 | --- lib/lp/services/mail/sendmail.py 2010-02-09 01:31:05 +0000 |
| 737 | +++ lib/lp/services/mail/sendmail.py 2010-02-12 02:27:18 +0000 |
| 738 | @@ -22,10 +22,14 @@ |
| 739 | 'simple_sendmail', |
| 740 | 'simple_sendmail_from_person', |
| 741 | 'raw_sendmail', |
| 742 | - 'validate_message'] |
| 743 | + 'validate_message', |
| 744 | + ] |
| 745 | + |
| 746 | + |
| 747 | +import hashlib |
| 748 | +import sets |
| 749 | |
| 750 | from binascii import b2a_qp |
| 751 | -import sha |
| 752 | from email.Encoders import encode_base64 |
| 753 | from email.Utils import getaddresses, make_msgid, formatdate, formataddr |
| 754 | from email.Message import Message |
| 755 | @@ -374,7 +378,7 @@ |
| 756 | # helps security, but still exposes us to a replay attack; we consider the |
| 757 | # risk low. |
| 758 | del message['X-Launchpad-Hash'] |
| 759 | - hash = sha.new(config.mailman.shared_secret) |
| 760 | + hash = hashlib.sha1(config.mailman.shared_secret) |
| 761 | hash.update(str(message['message-id'])) |
| 762 | message['X-Launchpad-Hash'] = hash.hexdigest() |
| 763 | |
| 764 | |
| 765 | === modified file 'lib/lp/soyuz/doc/soyuz-upload.txt.disabled' |
| 766 | --- lib/lp/soyuz/doc/soyuz-upload.txt.disabled 2009-05-13 14:05:27 +0000 |
| 767 | +++ lib/lp/soyuz/doc/soyuz-upload.txt.disabled 2010-02-12 02:27:18 +0000 |
| 768 | @@ -144,9 +144,9 @@ |
| 769 | each FTP session. Below we ensure that, and also that the content |
| 770 | of these files match the uploaded ones. |
| 771 | |
| 772 | - >>> import md5 |
| 773 | + >>> import hashlib |
| 774 | >>> def get_md5(filename): |
| 775 | - ... return md5.new(open(filename).read()).digest() |
| 776 | + ... return hashlib.md5(open(filename).read()).digest() |
| 777 | |
| 778 | >>> def get_upload_dir(num, dir=incoming_dir): |
| 779 | ... """Return the path to the upload, if found in the dir.""" |
| 780 | |
| 781 | === modified file 'lib/lp/soyuz/scripts/ftpmaster.py' |
| 782 | --- lib/lp/soyuz/scripts/ftpmaster.py 2009-12-13 11:55:40 +0000 |
| 783 | +++ lib/lp/soyuz/scripts/ftpmaster.py 2010-02-12 02:27:18 +0000 |
| 784 | @@ -21,7 +21,7 @@ |
| 785 | |
| 786 | import apt_pkg |
| 787 | import commands |
| 788 | -import md5 |
| 789 | +import hashlib |
| 790 | import os |
| 791 | import stat |
| 792 | import sys |
| 793 | @@ -836,7 +836,7 @@ |
| 794 | @classmethod |
| 795 | def generateMD5Sum(self, filename): |
| 796 | file_handle = open(filename) |
| 797 | - md5sum = md5.md5(file_handle.read()).hexdigest() |
| 798 | + md5sum = hashlib.md5(file_handle.read()).hexdigest() |
| 799 | file_handle.close() |
| 800 | return md5sum |
| 801 | |
| 802 | |
| 803 | === modified file 'lib/lp/soyuz/scripts/gina/library.py' |
| 804 | --- lib/lp/soyuz/scripts/gina/library.py 2009-06-25 04:06:00 +0000 |
| 805 | +++ lib/lp/soyuz/scripts/gina/library.py 2010-02-12 02:27:18 +0000 |
| 806 | @@ -5,12 +5,15 @@ |
| 807 | |
| 808 | __metaclass__ = type |
| 809 | |
| 810 | + |
| 811 | +import hashlib |
| 812 | import os |
| 813 | -import sha |
| 814 | |
| 815 | from zope.component import getUtility |
| 816 | |
| 817 | from canonical.launchpad.interfaces.librarian import ILibraryFileAliasSet |
| 818 | + |
| 819 | + |
| 820 | def _libType(fname): |
| 821 | if fname.endswith(".dsc"): |
| 822 | return "text/x-debian-source-package" |
| 823 | @@ -41,7 +44,7 @@ |
| 824 | def checkLibraryForFile(path, filename): |
| 825 | fullpath = os.path.join(path, filename) |
| 826 | assert os.path.exists(fullpath) |
| 827 | - digester = sha.sha() |
| 828 | + digester = hashlib.sha1() |
| 829 | openfile = open(fullpath, "r") |
| 830 | for chunk in iter(lambda: openfile.read(1024*4), ''): |
| 831 | digester.update(chunk) |
| 832 | |
| 833 | === modified file 'lib/lp/soyuz/scripts/queue.py' |
| 834 | --- lib/lp/soyuz/scripts/queue.py 2009-08-28 06:39:38 +0000 |
| 835 | +++ lib/lp/soyuz/scripts/queue.py 2010-02-12 02:27:18 +0000 |
| 836 | @@ -9,7 +9,6 @@ |
| 837 | # as Launchpad contains lots of queues. |
| 838 | |
| 839 | __metaclass__ = type |
| 840 | - |
| 841 | __all__ = [ |
| 842 | 'CommandRunner', |
| 843 | 'CommandRunnerError', |
| 844 | @@ -17,11 +16,12 @@ |
| 845 | 'name_queue_map' |
| 846 | ] |
| 847 | |
| 848 | + |
| 849 | import errno |
| 850 | +import hashlib |
| 851 | import pytz |
| 852 | |
| 853 | from datetime import datetime |
| 854 | -from sha import sha |
| 855 | |
| 856 | from zope.component import getUtility |
| 857 | |
| 858 | @@ -420,7 +420,7 @@ |
| 859 | libfile.close() |
| 860 | else: |
| 861 | # Check sha against existing file (bug #67014) |
| 862 | - existing_sha = sha() |
| 863 | + existing_sha = hashlib.sha1() |
| 864 | for chunk in filechunks(existing_file): |
| 865 | existing_sha.update(chunk) |
| 866 | existing_file.close() |
| 867 | |
| 868 | === modified file 'lib/lp/soyuz/scripts/tests/test_queue.py' |
| 869 | --- lib/lp/soyuz/scripts/tests/test_queue.py 2009-12-13 11:55:40 +0000 |
| 870 | +++ lib/lp/soyuz/scripts/tests/test_queue.py 2010-02-12 02:27:18 +0000 |
| 871 | @@ -4,14 +4,17 @@ |
| 872 | """queue tool base class tests.""" |
| 873 | |
| 874 | __metaclass__ = type |
| 875 | - |
| 876 | -__all__ = ['upload_bar_source'] |
| 877 | - |
| 878 | +__all__ = [ |
| 879 | + 'upload_bar_source', |
| 880 | + ] |
| 881 | + |
| 882 | + |
| 883 | +import hashlib |
| 884 | import os |
| 885 | import shutil |
| 886 | import tempfile |
| 887 | + |
| 888 | from unittest import TestCase, TestLoader |
| 889 | -from sha import sha |
| 890 | |
| 891 | from zope.component import getUtility |
| 892 | from zope.security.proxy import removeSecurityProxy |
| 893 | @@ -929,7 +932,7 @@ |
| 894 | |
| 895 | def _getsha1(self, filename): |
| 896 | """Return a sha1 hex digest of a file""" |
| 897 | - file_sha = sha() |
| 898 | + file_sha = hashlib.sha1() |
| 899 | opened_file = open(filename,"r") |
| 900 | for chunk in filechunks(opened_file): |
| 901 | file_sha.update(chunk) |

I expect the next automatic merge from stable->db-devel to fail with a (trivial) conflict.
Here is the relevant merge with the conflict resolved.