Merge lp:~nataliabidart/magicicada-server/no-more-storms-3 into lp:magicicada-server

Proposed by Natalia Bidart
Status: Merged
Approved by: Natalia Bidart
Approved revision: 61
Merged at revision: 53
Proposed branch: lp:~nataliabidart/magicicada-server/no-more-storms-3
Merge into: lp:magicicada-server
Diff against target: 1338 lines (+161/-282)
24 files modified
src/backends/db/errors.py (+2/-4)
src/backends/filesync/admin.py (+2/-2)
src/backends/filesync/models.py (+21/-21)
src/backends/filesync/services.py (+19/-37)
src/backends/filesync/tests/test_dao.py (+9/-26)
src/backends/filesync/tests/test_gateway.py (+17/-81)
src/backends/filesync/tests/test_models.py (+11/-10)
src/backends/filesync/tests/test_resthelper.py (+4/-5)
src/backends/filesync/tests/test_services.py (+6/-16)
src/backends/filesync/tests/test_utils.py (+8/-11)
src/backends/filesync/tests/testcase.py (+7/-7)
src/backends/filesync/utils.py (+5/-17)
src/backends/txlog/testing/__init__.py (+2/-2)
src/backends/txlog/tests/test_utils.py (+24/-23)
src/backends/utils.py (+5/-8)
src/magicicada/settings/__init__.py (+1/-3)
src/magicicada/wsgi.py (+1/-1)
src/monitoring/dump.py (+3/-3)
src/monitoring/reactor.py (+2/-0)
src/monitoring/stats_worker.py (+1/-0)
src/monitoring/tests/test_dump.py (+2/-0)
src/monitoring/tests/test_reactor.py (+2/-0)
src/monitoring/tests/test_stats_worker.py (+2/-0)
src/server/tests/test_server.py (+5/-5)
To merge this branch: bzr merge lp:~nataliabidart/magicicada-server/no-more-storms-3
Reviewer Review Type Date Requested Status
Facundo Batista Approve
Review via email: mp+288052@code.launchpad.net

Commit message

- More datetime.utcnow() replcements.
- Completely removed the use_uuid flag for files.

To post a comment you must log in.
56. By Natalia Bidart

One more lint issue fixed.

57. By Natalia Bidart

Missing import.

Revision history for this message
Facundo Batista (facundo) wrote :

Remember the transition is done!!

review: Approve
58. By Natalia Bidart

Completely removed the use_uuid flag for files.

59. By Natalia Bidart

Fixed test failures.

60. By Natalia Bidart

More tests fixes.

61. By Natalia Bidart

Fixed remaining failing tests.

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file 'src/backends/db/errors.py'
2--- src/backends/db/errors.py 2016-02-25 03:34:42 +0000
3+++ src/backends/db/errors.py 2016-03-06 23:21:42 +0000
4@@ -20,6 +20,8 @@
5
6 from __future__ import unicode_literals
7
8+from django.db import IntegrityError # NOQA
9+
10
11 class RetryLimitReached(Exception):
12 """Raised when there have been to many retries."""
13@@ -29,9 +31,5 @@
14 self.extra_info = extra_info
15
16
17-class IntegrityError(Exception):
18- """Raised when there has been an integrity error."""
19-
20-
21 class NoTimeoutTracer(Exception):
22 """No timeout tracer was registered"""
23
24=== modified file 'src/backends/filesync/admin.py'
25--- src/backends/filesync/admin.py 2015-09-27 21:25:27 +0000
26+++ src/backends/filesync/admin.py 2016-03-06 23:21:42 +0000
27@@ -36,7 +36,7 @@
28 self.filter = filter
29
30 def _find_users(self):
31- """Perform storm query based on current filter."""
32+ """Perform query based on current filter."""
33 store = get_filesync_store()
34 conditions = []
35 if self.filter is not None:
36@@ -67,7 +67,7 @@
37
38 @fsync_readonly
39 def is_empty(self):
40- """Return True if there are any results."""
41+ """Return True if there are no results."""
42 return self._find_users().is_empty()
43
44 @fsync_readonly
45
46=== modified file 'src/backends/filesync/models.py'
47--- src/backends/filesync/models.py 2015-10-16 11:24:51 +0000
48+++ src/backends/filesync/models.py 2016-03-06 23:21:42 +0000
49@@ -20,13 +20,13 @@
50
51 from __future__ import unicode_literals
52
53-import datetime
54 import os
55 import re
56 import uuid
57
58 from types import NoneType
59
60+from django.utils.timezone import now
61 from storm.locals import (Int, DateTime, Unicode, RawStr, Reference, SQL,
62 Storm, Store, Bool, ReferenceSet)
63 from storm.expr import Or, Sum, Desc
64@@ -135,7 +135,7 @@
65 for d in deleted[:limit]:
66 leaf = parent.build_tree_from_path(d.path)
67 d.undelete(leaf)
68- root.when_last_modified = datetime.datetime.utcnow()
69+ root.when_last_modified = now()
70 return parent
71
72
73@@ -325,7 +325,7 @@
74 when_created = DateTime(allow_none=True)
75
76 def __init__(self):
77- self.when_created = datetime.datetime.utcnow()
78+ self.when_created = now()
79
80 @classmethod
81 def make_empty(cls, store):
82@@ -334,7 +334,7 @@
83 o.hash = EMPTY_CONTENT_HASH
84 o.crc32 = 0
85 o.size = 0
86- o.when_created = datetime.datetime.utcnow()
87+ o.when_created = now()
88 store.add(o)
89 return o
90
91@@ -564,7 +564,7 @@
92 curr_size = getattr(self.content, 'size', 0)
93 self._update_used_bytes(new_content.size - curr_size)
94 self._content = new_content
95- self.when_last_modified = datetime.datetime.utcnow()
96+ self.when_last_modified = now()
97 self.update_generation()
98
99 from backends.txlog.models import TransactionLog
100@@ -639,10 +639,10 @@
101 self.parent_id = new_parent_id
102 self.path = pypath.join(new_parent_path, new_parent_name)
103 self.name = new_name
104- now = datetime.datetime.utcnow()
105+ right_now = now()
106 if old_parent.id != new_parent_id:
107- old_parent.when_last_modified = now
108- new_parent.when_last_modified = now
109+ old_parent.when_last_modified = right_now
110+ new_parent.when_last_modified = right_now
111
112 from backends.txlog.models import TransactionLog
113 TransactionLog.record_move(self, old_name, old_parent)
114@@ -731,14 +731,14 @@
115 if new_parent:
116 # if we have a suitable parent, update the parent
117 self.parent = new_parent
118- self.parent.when_last_modified = datetime.datetime.utcnow()
119+ self.parent.when_last_modified = now()
120 else:
121 # if we can't find a suitable parent, we need to restore the
122 # old one.
123 self.parent.undelete()
124 else:
125 # if the parent was live, we just need to update the timestamp
126- self.parent.when_last_modified = datetime.datetime.utcnow()
127+ self.parent.when_last_modified = now()
128
129 def unlink(self):
130 """Mark the node as Dead."""
131@@ -758,7 +758,7 @@
132 # case unnecessary) implicit flush.
133 with implicit_flushes_blocked_on(Store.of(self)):
134 self.status = STATUS_DEAD
135- self.when_last_modified = datetime.datetime.utcnow()
136+ self.when_last_modified = now()
137 self.update_generation()
138
139 from backends.txlog.models import TransactionLog
140@@ -767,7 +767,7 @@
141 if self.kind == StorageObject.FILE:
142 self._update_used_bytes(0 - getattr(self.content, 'size', 0))
143 if self.parent_id != ROOT_PARENTID:
144- self.parent.when_last_modified = datetime.datetime.utcnow()
145+ self.parent.when_last_modified = now()
146
147 def unlink_tree(self):
148 """Unlink and entire directory and it's subdirectories"""
149@@ -791,13 +791,13 @@
150 TransactionLog.record_unlink_tree(self)
151
152 self.descendants.set(status=STATUS_DEAD,
153- when_last_modified=datetime.datetime.utcnow())
154+ when_last_modified=now())
155
156 self.status = STATUS_DEAD
157- self.when_last_modified = datetime.datetime.utcnow()
158+ self.when_last_modified = now()
159
160 if self.parent_id != ROOT_PARENTID:
161- self.parent.when_last_modified = datetime.datetime.utcnow()
162+ self.parent.when_last_modified = now()
163
164 @property
165 def tree_size(self):
166@@ -844,7 +844,7 @@
167 kind=StorageObject.DIRECTORY,
168 parent=self)
169 store.add(node)
170- self.when_last_modified = datetime.datetime.utcnow()
171+ self.when_last_modified = now()
172 return node
173
174 def make_file(self, name):
175@@ -864,7 +864,7 @@
176 kind=StorageObject.FILE,
177 parent=self)
178 store.add(node)
179- self.when_last_modified = datetime.datetime.utcnow()
180+ self.when_last_modified = now()
181 return node
182
183 @staticmethod
184@@ -1127,7 +1127,7 @@
185 self.chunk_count = 0
186 self.uploaded_bytes = 0
187 self.multipart_key = multipart_key
188- self.when_last_active = datetime.datetime.utcnow()
189+ self.when_last_active = now()
190 self.status = STATUS_LIVE
191
192 @classmethod
193@@ -1195,7 +1195,7 @@
194 self.path = path.strip('/')
195 self.status = STATUS_LIVE
196 self.generation = 0
197- self.when_created = datetime.datetime.utcnow()
198+ self.when_created = now()
199
200 def delete(self):
201 """Delete the UDF."""
202@@ -1337,7 +1337,7 @@
203 def set_status(self, status):
204 """Set the status of the download, and update the change date."""
205 self._status = status
206- self.status_change_date = datetime.datetime.utcnow()
207+ self.status_change_date = now()
208 status = property(get_status, set_status)
209
210
211@@ -1392,4 +1392,4 @@
212 self.hash_context = hash_context
213 self.magic_hash_context = magic_hash_context
214 self.crc_context = crc_context
215- self.when_last_active = datetime.datetime.utcnow()
216+ self.when_last_active = now()
217
218=== modified file 'src/backends/filesync/services.py'
219--- src/backends/filesync/services.py 2016-02-25 03:26:13 +0000
220+++ src/backends/filesync/services.py 2016-03-06 23:21:42 +0000
221@@ -25,7 +25,6 @@
222
223 from __future__ import unicode_literals
224
225-import datetime
226 import mimetypes
227 import os
228 import posixpath as pypath
229@@ -36,6 +35,7 @@
230 from weakref import WeakValueDictionary
231
232 from django.conf import settings
233+from django.utils.timezone import now
234 from storm.locals import SQL
235 from storm.expr import Or, LeftJoin, Desc, And
236 from storm.info import ClassAlias
237@@ -555,10 +555,7 @@
238 @property
239 def public_key(self):
240 """Return the public key for this node."""
241- if self.public_uuid:
242- return utils.get_node_public_key(self, True)
243- if self.public_id:
244- return utils.get_node_public_key(self)
245+ return utils.get_node_public_key(self)
246
247 @property
248 def owner(self):
249@@ -817,16 +814,14 @@
250 """Add part info to this uploadjob."""
251 self._gateway.add_uploadjob_part(self.id, size)
252 # also update the when_last_active value.
253- self._gateway.set_uploadjob_when_last_active(
254- self.id, datetime.datetime.utcnow())
255+ self._gateway.set_uploadjob_when_last_active(self.id, now())
256 self._load()
257
258 @retryable_transaction()
259 @fsync_commit
260 def touch(self):
261 """Update the when_last_active attribute."""
262- self._gateway.set_uploadjob_when_last_active(
263- self.id, datetime.datetime.utcnow())
264+ self._gateway.set_uploadjob_when_last_active(self.id, now())
265 self._load()
266
267
268@@ -1236,11 +1231,7 @@
269
270 def create_or_update_user(self, user_id, username, visible_name,
271 max_storage_bytes):
272- """Create or update a StorageUser and related data.
273-
274- This happens when a user subscribes to the service. If they upgrade or
275- reactivate a subscription, this would get called again.
276- """
277+ """Create or update a StorageUser and related data."""
278 user = self.store.get(StorageUser, user_id)
279 if user is None:
280 user = StorageUser.new(
281@@ -1292,11 +1283,7 @@
282
283 def claim_shareoffer(self, user_id, username, visible_name,
284 shareoffer_id):
285- """Claim a share offer sent to an email.
286-
287- This is a strange function in that the user may not have subscribed yet
288- and may not have a storage user record.
289- """
290+ """Claim a share offer sent to an email."""
291 # A anonymous share offer is a share sent to an email address but not
292 # to a specific user. We also don't let user's claim their own share
293 share, byuser = self._get_shareoffer(shareoffer_id)
294@@ -1316,7 +1303,7 @@
295 self.queue_share_accepted(share_dao)
296 return share_dao
297
298- def _get_public_node(self, public_key, use_uuid=False):
299+ def _get_public_node(self, public_key):
300 """Get a node from a public key."""
301 if public_key is None:
302 raise errors.DoesNotExist(self.publicfile_dne_error)
303@@ -1325,14 +1312,9 @@
304 except utils.Base62Error:
305 raise errors.DoesNotExist(self.publicfile_dne_error)
306
307- if use_uuid:
308- public_id = uuid.UUID(int=public_id)
309- publicnode = self.store.find(
310- PublicNode, PublicNode.public_uuid == public_id).one()
311- else:
312- publicnode = self.store.find(
313- PublicNode, PublicNode.id == public_id,
314- PublicNode.public_uuid == None).one() # NOQA
315+ public_id = uuid.UUID(int=public_id)
316+ publicnode = self.store.find(
317+ PublicNode, PublicNode.public_uuid == public_id).one()
318 if publicnode is None:
319 raise errors.DoesNotExist(self.publicfile_dne_error)
320
321@@ -1350,14 +1332,14 @@
322 def get_public_directory(self, public_key, mimetypes=None):
323 """Get a public directory."""
324 # Use UUIDs instead of the old method
325- node = self._get_public_node(public_key, use_uuid=True)
326+ node = self._get_public_node(public_key)
327 if node.kind != StorageObject.DIRECTORY:
328 raise errors.DoesNotExist(self.publicfile_dne_error)
329 return node
330
331- def get_public_file(self, public_key, use_uuid=False):
332+ def get_public_file(self, public_key):
333 """Get a public file."""
334- node = self._get_public_node(public_key, use_uuid)
335+ node = self._get_public_node(public_key)
336 if (node.content is None or node.content.storage_key is None or
337 node.kind != StorageObject.FILE):
338 # if the file has no content, we should not be able to get it
339@@ -3105,10 +3087,10 @@
340 return DAOUploadJob(job, gateway=self)
341
342 @timing_metric
343- def set_uploadjob_when_last_active(self, job_id, datetime):
344- """Set when_last_active to datetime.utcnow()."""
345+ def set_uploadjob_when_last_active(self, job_id, timestamp):
346+ """Set when_last_active to timestamp."""
347 job = self._get_uploadjob(job_id)
348- job.when_last_active = datetime
349+ job.when_last_active = timestamp
350 return DAOUploadJob(job, gateway=self)
351
352 def _make_public(self, fnode):
353@@ -3123,7 +3105,7 @@
354 # Flush the store to ensure the new PublicNode has
355 # a database ID.
356 self.store.flush()
357- if (utils.set_public_uuid and fnode.public_uuid is None):
358+ if fnode.public_uuid is None:
359 fnode.public_uuid = uuid.uuid4()
360 publicfile.public_uuid = fnode.public_uuid
361 fnode.publicfile_id = publicfile.id
362@@ -3279,10 +3261,10 @@
363
364
365 @fsync_readonly
366-def get_public_file(public_key, use_uuid=False):
367+def get_public_file(public_key):
368 """Get a public file."""
369 gw = SystemGateway()
370- return gw.get_public_file(public_key, use_uuid=use_uuid)
371+ return gw.get_public_file(public_key)
372
373
374 @fsync_readonly
375
376=== modified file 'src/backends/filesync/tests/test_dao.py'
377--- src/backends/filesync/tests/test_dao.py 2016-02-25 03:26:13 +0000
378+++ src/backends/filesync/tests/test_dao.py 2016-03-06 23:21:42 +0000
379@@ -23,10 +23,10 @@
380 import re
381 import uuid
382
383-from datetime import datetime
384 from operator import attrgetter
385
386 from django.conf import settings
387+from django.utils.timezone import now
388 from mocker import Mocker, expect
389 from storm.database import Connection
390 from storm.tracer import install_tracer, remove_tracer_type
391@@ -111,7 +111,7 @@
392 ['id', 'kind', 'parent_id', 'owner_id', 'status', 'when_created',
393 'when_last_modified', 'generation', 'generation_created',
394 'mimetype', 'public_uuid'])
395- self.assertTrue(isinstance(node_dao, services.FileNode))
396+ self.assertIsInstance(node_dao, services.FileNode)
397 # mimetype object will not be directly accessible
398 self.assertEqual(node_dao.nodekey, utils.make_nodekey(None, node.id))
399 self.assertEqual(node_dao.content, content)
400@@ -121,11 +121,11 @@
401 self.assertEqual(node_dao.can_delete, True)
402 node_dao.public_id = 1
403 # test public_key property
404- self.assertEqual(node_dao.public_key,
405- utils.get_node_public_key(node_dao, True))
406+ self.assertEqual(
407+ node_dao.public_key, utils.get_node_public_key(node_dao))
408 node_dao.public_uuid = None
409- self.assertEqual(node_dao.public_key,
410- utils.get_node_public_key(node_dao, False))
411+ self.assertEqual(
412+ node_dao.public_key, utils.get_node_public_key(node_dao))
413 node_dao.public_id = None
414 self.assertEqual(node_dao.public_key, None)
415 node.generation = None
416@@ -138,7 +138,7 @@
417 node.kind = StorageObject.DIRECTORY
418 dir_dao = services.StorageNode.factory(
419 None, node, owner=owner, content=content, permissions={})
420- self.assertTrue(isinstance(dir_dao, services.DirectoryNode))
421+ self.assertIsInstance(dir_dao, services.DirectoryNode)
422 # content for Directories is ignored
423 self.assertEqual(dir_dao.content, None)
424 self.assertEqual(dir_dao.can_read, False)
425@@ -196,8 +196,8 @@
426 upload = UploadJob(uuid.uuid4())
427 upload.hash_hint = b'fake hash hint'
428 upload.crc32_hint = 1234
429- upload.when_started = datetime.utcnow()
430- upload.when_last_active = datetime.utcnow()
431+ upload.when_started = now()
432+ upload.when_last_active = now()
433 upload_dao = services.DAOUploadJob(upload)
434 self._compare_props(upload, upload_dao,
435 ['storage_object_id', 'chunk_count',
436@@ -1128,24 +1128,7 @@
437 self.assertEqual(5, len(nodes))
438
439 def test_change_public_access_file(self):
440- """Test the basics of changing public access to a file."""
441- utils.set_public_uuid = False
442- user = self.create_user()
443- f1 = user.root.make_file('a-file.txt')
444- # It has no public ID
445- self.assertEqual(f1.public_uuid, None)
446- self.assertEqual(f1.public_url, None)
447- # It now has a public ID
448- f1.change_public_access(True)
449- self.assertEqual(f1.public_uuid, None)
450- self.assertNotEqual(f1.public_url, None)
451- f1.change_public_access(False)
452- self.assertEqual(f1.public_uuid, None)
453- self.assertEqual(f1.public_url, None)
454-
455- def test_change_public_access_file_uuid(self):
456 """Test the basics of changing public access to a file using uuid."""
457- utils.set_public_uuid = True
458 user = self.create_user()
459 f1 = user.root.make_file('a-file.txt')
460 # It has no public ID
461
462=== modified file 'src/backends/filesync/tests/test_gateway.py'
463--- src/backends/filesync/tests/test_gateway.py 2016-02-25 03:34:42 +0000
464+++ src/backends/filesync/tests/test_gateway.py 2016-03-06 23:21:42 +0000
465@@ -20,15 +20,16 @@
466
467 from __future__ import unicode_literals
468
469-import datetime
470 import logging
471 import posixpath
472 import os
473 import types
474 import uuid
475
476+from datetime import timedelta
477 from operator import attrgetter
478
479+from django.utils.timezone import now
480 from mock import patch
481 from psycopg2 import IntegrityError
482 from storm.locals import Store
483@@ -320,7 +321,8 @@
484 self.setup_shares()
485 node = get_filesync_store().get(StorageObject, self.d3.id)
486 share = self.user1.get_share(self.share1.id)
487- vgw = ReadWriteVolumeGateway(self.user1, share=share)
488+ vgw = ReadWriteVolumeGateway(
489+ self.user1, share=share, notifier=self.dummy_notifier)
490 vgw.handle_node_change(node)
491 transaction.commit()
492
493@@ -968,12 +970,11 @@
494 # change the when_started date for the test.
495 store = get_filesync_store()
496 uploadjob = store.get(UploadJob, up1.id)
497- uploadjob.when_last_active = (
498- datetime.datetime.now() - datetime.timedelta(uid))
499+ uploadjob.when_last_active = now() - timedelta(uid)
500 transaction.commit()
501 # check that filtering by date works as expected.
502 for idx in range(0, 10):
503- date = datetime.datetime.now() - datetime.timedelta(idx)
504+ date = now() - timedelta(idx)
505 jobs = list(self.gw.get_abandoned_uploadjobs(date))
506 self.assertEqual(len(jobs), 10 - idx)
507
508@@ -996,11 +997,10 @@
509 # change the when_started date for the test.
510 store = get_filesync_store()
511 uploadjob = store.get(UploadJob, up1.id)
512- uploadjob.when_last_active = (
513- datetime.datetime.now() - datetime.timedelta(10))
514+ uploadjob.when_last_active = now() - timedelta(10)
515 transaction.commit()
516 # check that filtering by date works as expected.
517- date = datetime.datetime.now() - datetime.timedelta(9)
518+ date = now() - timedelta(9)
519 jobs = self.gw.get_abandoned_uploadjobs(date)
520 self.assertEqual(len(jobs), 10)
521 self.gw.cleanup_uploadjobs(jobs)
522@@ -1026,11 +1026,6 @@
523 self.root.id, name, hash, crc, size, deflated_size, storage_key,
524 mimetype='fakemime')
525 self.file = self.vgw._get_node_simple(file1.id)
526- self.save_flag = utils.set_public_uuid
527-
528- def tearDown(self):
529- utils.set_public_uuid = self.save_flag
530- super(SystemGatewayPublicFileTestCase, self).tearDown()
531
532 def get_get_public_file_DoesNotExist(self):
533 """Get get_public_file with unknown key."""
534@@ -1040,7 +1035,6 @@
535
536 def test_get_public_file_no_content(self):
537 """Test get_public_file when file has no content."""
538- utils.set_public_uuid = False
539 file_dao = self.vgw.change_public_access(self.file.id, True)
540 self.file._content_hash = None
541 self.assertRaises(errors.DoesNotExist,
542@@ -1048,7 +1042,6 @@
543
544 def test_get_public_file_no_storage_key(self):
545 """Test get_public_file when file has no storage_key."""
546- utils.set_public_uuid = False
547 file_dao = self.vgw.change_public_access(self.file.id, True)
548 self.file.content.storage_key = None
549 self.assertRaises(errors.DoesNotExist,
550@@ -1056,57 +1049,23 @@
551
552 def test_get_public_file(self):
553 """Tests for get_public_file."""
554- utils.set_public_uuid = False
555 file_dao = self.vgw.change_public_access(self.file.id, True)
556- self.assertNotEquals(file_dao.public_id, None)
557+ self.assertIsNotNone(file_dao.public_id)
558 self.assertIsNotNone(file_dao.public_uuid)
559 public_id = file_dao.public_id
560- public_key = file_dao.public_key
561- # Once a file has been made public, it can be looked up by its ID.
562- file2 = self.gw.get_public_file(public_key)
563- self.assertEqual(file2.id, self.file.id)
564- self.assertEqual(file2.mimetype, 'fakemime')
565- # this file was created with content, the content must be returned
566- self.assertNotEquals(file2.content, None)
567-
568- # DoesNotExist is raised if that file is made private.
569- file_dao = self.vgw.change_public_access(self.file.id, False)
570- self.assertRaises(errors.DoesNotExist,
571- self.gw.get_public_file, public_key)
572-
573- # public stays the same when set back to public
574- self.assertEqual(file_dao.public_id, None)
575- file_dao = self.vgw.change_public_access(file_dao.id, True)
576- self.assertEqual(file_dao.public_id, public_id)
577-
578- # DoesNotExist is raised if the underlying file is deleted.
579- self.vgw.delete_node(self.file.id)
580- self.assertRaises(errors.DoesNotExist,
581- self.gw.get_public_file, public_key)
582-
583- def test_get_public_file_public_uuid(self):
584- """Tests for get_public_file."""
585- utils.set_public_uuid = True
586- file_dao = self.vgw.change_public_access(self.file.id, True)
587- self.assertNotEquals(file_dao.public_id, None)
588- self.assertNotEquals(file_dao.public_uuid, None)
589- public_id = file_dao.public_id
590 public_uuid = file_dao.public_uuid
591 public_key = file_dao.public_key
592 # Once a file has been made public, it can be looked up by its UUID.
593- file2 = self.gw.get_public_file(public_key, use_uuid=True)
594+ file2 = self.gw.get_public_file(public_key)
595 self.assertEqual(file2.id, self.file.id)
596 self.assertEqual(file2.mimetype, self.file.mimetype)
597 # this file was created with content, the content must be returned
598 self.assertNotEquals(file2.content, None)
599- # but not it's public_id since the config is set to use uuid
600- self.assertRaises(errors.DoesNotExist,
601- self.gw.get_public_file, public_key)
602
603 # DoesNotExist is raised if that file is made private.
604 file_dao = self.vgw.change_public_access(self.file.id, False)
605 self.assertRaises(errors.DoesNotExist,
606- self.gw.get_public_file, public_key, True)
607+ self.gw.get_public_file, public_key)
608
609 # public_id stays the same when set back to public
610 self.assertEqual(file_dao.public_id, None)
611@@ -1116,15 +1075,13 @@
612
613 # DoesNotExist is raised if the underlying file is deleted.
614 self.vgw.delete_node(self.file.id)
615- self.assertRaises(errors.DoesNotExist,
616- self.gw.get_public_file, public_key, use_uuid=True)
617+ self.assertRaises(
618+ errors.DoesNotExist, self.gw.get_public_file, public_key)
619
620 def test_get_public_file_user_locked(self):
621 """get_public_file works with a locked user."""
622- utils.set_public_uuid = False
623 file_dao = self.vgw.change_public_access(self.file.id, True)
624- self.assertNotEquals(file_dao.public_id, None)
625- self.assertEqual(file_dao.public_uuid, None)
626+ self.assertIsNotNone(file_dao.public_uuid)
627 public_key = file_dao.public_key
628 # lock the user
629 suser = self.store.get(StorageUser, self.user1.id)
630@@ -1146,12 +1103,6 @@
631 self.root = self.vgw.get_root()
632 dir1 = self.vgw.make_subdirectory(self.root.id, 'a-folder')
633 self.dir = self.vgw._get_node_simple(dir1.id)
634- self.save_flag = utils.set_public_uuid
635- utils.set_public_uuid = True
636-
637- def tearDown(self):
638- utils.set_public_uuid = self.save_flag
639- super(SystemGatewayPublicDirectoryTestCase, self).tearDown()
640
641 def get_get_public_directory_DoesNotExist(self):
642 """Get get_public_file with unknown key."""
643@@ -2932,7 +2883,7 @@
644 job = self.vgw.make_uploadjob(
645 file1.id, file1.content_hash, new_hash, crc, size,
646 multipart_key=uuid.uuid4())
647- new_last_active = datetime.datetime.utcnow() + datetime.timedelta(1)
648+ new_last_active = now() + timedelta(1)
649 self.vgw.set_uploadjob_when_last_active(job.id, new_last_active)
650 up_job = self.vgw.get_uploadjob(job.id)
651 self.assertEqual(up_job.when_last_active, new_last_active)
652@@ -4289,22 +4240,8 @@
653 self.assertEqual(f1.generation, 2)
654 self.assertEqual(f1.generation_created, 1)
655
656- def test_change_public_access_without_public_uuid(self):
657- """Test change public access."""
658- saved_flag = utils.set_public_uuid
659- utils.set_public_uuid = False
660- a_file = self.vgw.make_file(self.vgw.get_root().id, 'the file name')
661- a_file = self.vgw.change_public_access(a_file.id, True)
662- f1 = self.storage_store.get(StorageObject, a_file.id)
663- self.assertEqual(f1.generation, 2)
664- self.assertEqual(f1.generation_created, 1)
665- self.assertEqual(f1.public_uuid, None)
666- utils.set_public_uuid = saved_flag
667-
668- def test_change_public_access_with_public_uuid(self):
669- """Test change public access."""
670- saved_flag = utils.set_public_uuid
671- utils.set_public_uuid = True
672+ def test_change_public_access(self):
673+ """Test change public access."""
674 a_file = self.vgw.make_file(self.vgw.get_root().id, 'the file name')
675 a_file = self.vgw.change_public_access(a_file.id, True)
676 f1 = self.storage_store.get(StorageObject, a_file.id)
677@@ -4321,7 +4258,6 @@
678 a_file = self.vgw.change_public_access(a_file.id, True)
679 f1 = self.storage_store.get(StorageObject, a_file.id)
680 self.assertEqual(f1.public_uuid, public_uuid)
681- utils.set_public_uuid = saved_flag
682
683 def test_deltas_across_volumes(self):
684 """Test deltas across volumes to make sure they don't intermingle."""
685
686=== modified file 'src/backends/filesync/tests/test_models.py'
687--- src/backends/filesync/tests/test_models.py 2015-10-16 11:24:51 +0000
688+++ src/backends/filesync/tests/test_models.py 2016-03-06 23:21:42 +0000
689@@ -24,8 +24,10 @@
690 import threading
691 import unittest
692 import uuid
693-import datetime
694-
695+
696+from datetime import datetime
697+
698+from django.utils.timezone import now
699 from mock import patch
700 from storm.expr import Or, Max
701
702@@ -1372,9 +1374,9 @@
703 self.store.add(content)
704 root = StorageObject.get_root(self.store, user.id)
705 file = root.make_file('a_File')
706- before = datetime.datetime.utcnow()
707+ before = now()
708 file.content = content
709- after = datetime.datetime.utcnow()
710+ after = now()
711 self.assertTrue(after > file.when_last_modified > before)
712
713 def test_update_last_modified_on_make(self):
714@@ -1382,14 +1384,14 @@
715 user = self.make_user(1, 'a_test_user')
716 root = StorageObject.get_root(self.store, user.id)
717 subdir = root.make_subdirectory('subdir')
718- before_file = datetime.datetime.utcnow()
719+ before_file = now()
720 subdir.make_file('a_File')
721- after_file = datetime.datetime.utcnow()
722+ after_file = now()
723 self.assertTrue(after_file > subdir.when_last_modified > before_file)
724
725- before_dir = datetime.datetime.utcnow()
726+ before_dir = now()
727 subdir.make_subdirectory('subsubdir')
728- after_dir = datetime.datetime.utcnow()
729+ after_dir = now()
730 self.assertTrue(after_dir > subdir.when_last_modified > before_dir)
731
732 def test_max_used_bytes(self):
733@@ -2198,8 +2200,7 @@
734 self.assertEqual(download.file_path, 'file_path')
735 self.assertEqual(download.download_url, 'http://download/url')
736 self.assertEqual(download.status, Download.STATUS_QUEUED)
737- self.assertTrue(
738- isinstance(download.status_change_date, datetime.datetime))
739+ self.assertIsInstance(download.status_change_date, datetime)
740 self.assertEqual(download.node_id, None)
741 self.assertEqual(download.error_message, None)
742
743
744=== modified file 'src/backends/filesync/tests/test_resthelper.py'
745--- src/backends/filesync/tests/test_resthelper.py 2016-02-20 17:03:19 +0000
746+++ src/backends/filesync/tests/test_resthelper.py 2016-03-06 23:21:42 +0000
747@@ -26,9 +26,8 @@
748 import unittest
749 import uuid
750
751-from datetime import datetime
752-
753 from django.conf import settings
754+from django.utils.timezone import now
755
756 from metrics.tests import FakeMetrics
757 from backends.filesync import errors
758@@ -64,7 +63,7 @@
759 id = uuid.uuid4()
760 is_root = False
761 path = "~/Documents"
762- when_created = datetime.utcnow()
763+ when_created = now()
764
765
766 class MockNode(object):
767@@ -76,8 +75,8 @@
768 full_path = '/a/b/c/d/file.txt'
769 name = 'file.txt'
770 content_hash = 'abcdefg'
771- when_created = datetime.utcnow()
772- when_last_modified = datetime.utcnow()
773+ when_created = now()
774+ when_last_modified = now()
775 generation = 1
776 generation_created = 1
777 mimetype = 'text'
778
779=== modified file 'src/backends/filesync/tests/test_services.py'
780--- src/backends/filesync/tests/test_services.py 2015-09-27 21:25:27 +0000
781+++ src/backends/filesync/tests/test_services.py 2016-03-06 23:21:42 +0000
782@@ -21,7 +21,8 @@
783 from __future__ import unicode_literals
784
785 import uuid
786-import datetime
787+
788+from django.utils.timezone import now
789
790 from backends.filesync import errors, utils
791 from backends.filesync.models import StorageUser
792@@ -107,13 +108,11 @@
793 def test_get_abandoned_uploadjobs(self):
794 """Test the get_abandoned_uploadjobs function."""
795 self.assertRaises(TypeError, get_abandoned_uploadjobs)
796- jobs = get_abandoned_uploadjobs(datetime.datetime.now(), 100)
797+ jobs = get_abandoned_uploadjobs(now(), 100)
798 self.assertTrue(isinstance(jobs, list))
799
800 def test_get_public_file(self):
801 """Test the get_public_file function."""
802- save_setting = utils.set_public_uuid
803- utils.set_public_uuid = False
804 user = self.factory.make_user(
805 1, "Cool UserName", "Visible Name", 10)
806 a_file = user.volume().root.make_file_with_content(
807@@ -123,9 +122,7 @@
808 f1 = get_public_file(public_key)
809 self.assertEqual(f1, a_file)
810 a_file.change_public_access(False)
811- self.assertRaises(errors.DoesNotExist,
812- get_public_file, public_key, use_uuid=False)
813- utils.set_public_uuid = save_setting
814+ self.assertRaises(errors.DoesNotExist, get_public_file, public_key)
815
816 def test_get_public_directory(self):
817 """Test the get_public_directory function."""
818@@ -144,8 +141,6 @@
819
820 def test_get_public_file_public_uuid(self):
821 """Test the get_public_file function."""
822- save_setting = utils.set_public_uuid
823- utils.set_public_uuid = True
824 user = self.factory.make_user(
825 1, "Cool UserName", "Visible Name", 10)
826 a_file = user.volume().root.make_file_with_content(
827@@ -153,12 +148,7 @@
828 a_file.change_public_access(True)
829 public_key = a_file.public_key
830 # get the file using the public uuid
831- f1 = get_public_file(public_key, use_uuid=True)
832+ f1 = get_public_file(public_key)
833 self.assertEqual(f1, a_file)
834- # can't get the file using the old id
835- self.assertRaises(errors.DoesNotExist,
836- get_public_file, public_key)
837 a_file.change_public_access(False)
838- self.assertRaises(errors.DoesNotExist,
839- get_public_file, public_key, use_uuid=True)
840- utils.set_public_uuid = save_setting
841+ self.assertRaises(errors.DoesNotExist, get_public_file, public_key)
842
843=== modified file 'src/backends/filesync/tests/test_utils.py'
844--- src/backends/filesync/tests/test_utils.py 2016-02-25 03:26:13 +0000
845+++ src/backends/filesync/tests/test_utils.py 2016-03-06 23:21:42 +0000
846@@ -122,24 +122,21 @@
847 def test_get_node_public_key(self):
848 """Test get_node_public_key."""
849 node = FakeNode()
850- node.public_id = 1
851 node.public_uuid = uuid.UUID(int=12)
852- self.assertTrue(get_node_public_key(node, False),
853- encode_base62(node.public_id))
854- self.assertTrue(get_node_public_key(node, True),
855- encode_base62(node.public_uuid.int, padded_to=22))
856+ self.assertEqual(
857+ get_node_public_key(node),
858+ encode_base62(node.public_uuid.int, padded_to=22))
859
860 def test_get_public_url(self):
861 """Test get_public_url function."""
862 node = FakeNode()
863- self.assertTrue(get_public_file_url(node) is None)
864- node.public_id = 1
865- self.assertTrue(get_public_file_url(node).endswith(
866- '/%s/' % encode_base62(node.public_id)))
867+ self.assertIsNone(get_public_file_url(node))
868 # using a short value here to make sure padding works
869 node.public_uuid = uuid.UUID(int=12)
870- self.assertTrue(get_public_file_url(node).endswith(
871- '/%s' % encode_base62(node.public_uuid.int, padded_to=22)))
872+ key = encode_base62(node.public_uuid.int, padded_to=22)
873+ self.assertEqual(
874+ get_public_file_url(node),
875+ '%s/%s' % (settings.PUBLIC_URL_PREFIX, key))
876
877
878 class Base62Tests(unittest.TestCase):
879
880=== modified file 'src/backends/filesync/tests/testcase.py'
881--- src/backends/filesync/tests/testcase.py 2015-09-27 21:25:27 +0000
882+++ src/backends/filesync/tests/testcase.py 2016-03-06 23:21:42 +0000
883@@ -52,12 +52,6 @@
884 super(StorageDALTestCase, self).setUp()
885 self.factory = Factory()
886 self.store = get_filesync_store()
887- self.save_utils_set_public_uuid = utils.set_public_uuid
888-
889- def tearDown(self):
890- """Tear down."""
891- utils.set_public_uuid = self.save_utils_set_public_uuid
892- super(StorageDALTestCase, self).tearDown()
893
894 def patch(self, obj, attr_name, new_val):
895 """Patch!"""
896@@ -168,7 +162,7 @@
897 """Return a hashkey."""
898 return b'sha1:' + hashlib.sha1(key or str(uuid.uuid4())).hexdigest()
899
900- def get_test_contentblob(self, content=None):
901+ def get_test_contentblob(self, content=None, magic_hash=None):
902 """Get a content blob."""
903 if content:
904 content = content.encode('utf-8')
905@@ -180,6 +174,12 @@
906 cb.storage_key = uuid.uuid4()
907 cb.content = content
908 cb.status = STATUS_LIVE
909+ cb.magic_hash = magic_hash
910+ return cb
911+
912+ def make_content_blob(self, content=None, magic_hash=None):
913+ cb = self.get_test_contentblob(content, magic_hash)
914+ get_filesync_store().add(cb)
915 return cb
916
917 def content_blob_args(self):
918
919=== modified file 'src/backends/filesync/utils.py'
920--- src/backends/filesync/utils.py 2015-09-29 03:06:37 +0000
921+++ src/backends/filesync/utils.py 2016-03-06 23:21:42 +0000
922@@ -161,29 +161,17 @@
923 return value
924
925
926-# This is a setting used during testing and transition to the new public files
927-# urls and will be removed once we enable the new public URL feature.
928-# Since there is no way to go back once this setting is enabled,
929-# it is not used in a configuration file.
930-set_public_uuid = True
931-
932-
933-def get_node_public_key(node, from_uuid=False):
934+def get_node_public_key(node):
935 """Get a node's public_key."""
936- if from_uuid:
937+ if node.public_uuid is not None:
938 return encode_base62(node.public_uuid.int, padded_to=22)
939- else:
940- return encode_base62(node.publicfile_id)
941
942
943 def get_public_file_url(node):
944 """Return the url to a public file."""
945- if settings.UPDOWN_PUBLIC_URL_PREFIX_2 and node.public_uuid:
946- return '%s%s' % (settings.UPDOWN_PUBLIC_URL_PREFIX_2,
947- get_node_public_key(node, True))
948- elif node.publicfile_id:
949- return '%s%s/' % (settings.UPDOWN_PUBLIC_URL_PREFIX,
950- get_node_public_key(node, False))
951+ public_key = get_node_public_key(node)
952+ if public_key is not None:
953+ return '%s/%s' % (settings.PUBLIC_URL_PREFIX.rstrip('/'), public_key)
954
955
956 def get_keywords_from_path(volume_path):
957
958=== modified file 'src/backends/txlog/testing/__init__.py'
959--- src/backends/txlog/testing/__init__.py 2015-09-27 17:01:04 +0000
960+++ src/backends/txlog/testing/__init__.py 2016-03-06 23:21:42 +0000
961@@ -18,9 +18,9 @@
962
963 """Testing helpers for txlog."""
964
965-import datetime
966 import uuid
967
968+from django.utils.timezone import now
969 from backends.txlog.models import TransactionLog
970
971
972@@ -38,7 +38,7 @@
973 if path is None:
974 path = u'/%s' % unicode(uuid.uuid4())
975 if timestamp is None:
976- timestamp = datetime.datetime.utcnow()
977+ timestamp = now()
978 return dict(
979 txn_id=txn_id, node_id=node_id, owner_id=owner_id, volume_id=volume_id,
980 op_type=op_type, path=path, generation=generation, timestamp=timestamp,
981
982=== modified file 'src/backends/txlog/tests/test_utils.py'
983--- src/backends/txlog/tests/test_utils.py 2015-09-27 19:24:27 +0000
984+++ src/backends/txlog/tests/test_utils.py 2016-03-06 23:21:42 +0000
985@@ -20,8 +20,9 @@
986
987 from __future__ import unicode_literals
988
989-import datetime
990+from datetime import timedelta
991
992+from django.utils.timezone import now
993 from mock import patch
994
995 from backends.filesync import dbmanager
996@@ -142,7 +143,7 @@
997 self.assertRaises(
998 RuntimeError, utils.update_last_row,
999 worker_name=b'test_worker_name', row_id=1,
1000- timestamp=datetime.datetime.utcnow())
1001+ timestamp=now())
1002
1003 def _convert_txlogs_to_dicts(self, txlogs):
1004 """Convert a list of TransactionLog objects into dictionaries.
1005@@ -315,10 +316,10 @@
1006 """Test that txnlogs not old enough are maintained, instead of being
1007 deleted."""
1008
1009- now = datetime.datetime.utcnow()
1010- limit_datetime = now - datetime.timedelta(days=7)
1011+ right_now = now()
1012+ limit_datetime = right_now - timedelta(days=7)
1013 # Not so old
1014- old_datetime = limit_datetime + datetime.timedelta(seconds=1)
1015+ old_datetime = limit_datetime + timedelta(seconds=1)
1016
1017 self.factory.make_transaction_log(tx_id=1)
1018 self.factory.make_transaction_log(tx_id=2, timestamp=old_datetime)
1019@@ -339,8 +340,8 @@
1020 def test_deletes_old_enough_txlogs(self):
1021 """Test that txnlogs old enough are deleted."""
1022
1023- now = datetime.datetime.utcnow()
1024- timestamp_limit = now - datetime.timedelta(days=7)
1025+ right_now = now()
1026+ timestamp_limit = right_now - timedelta(days=7)
1027 # Old enough
1028 old_datetime = timestamp_limit
1029
1030@@ -368,8 +369,8 @@
1031 """Test that txnlogs old enough are deleted and are within the quantity
1032 limit given."""
1033
1034- now = datetime.datetime.utcnow()
1035- timestamp_limit = now - datetime.timedelta(days=7)
1036+ right_now = now()
1037+ timestamp_limit = right_now - timedelta(days=7)
1038 # Old enough
1039 old_datetime = timestamp_limit
1040 quantity_limit = 2
1041@@ -400,8 +401,8 @@
1042 def test_deletes_txlogs_slice(self):
1043 """Delete a txlog slice by date and quantity."""
1044
1045- now = datetime.datetime.utcnow()
1046- timestamp_limit = now - datetime.timedelta(days=7)
1047+ right_now = now()
1048+ timestamp_limit = right_now - timedelta(days=7)
1049 # Old enough
1050 old_dt = timestamp_limit
1051 quantity_limit = 2
1052@@ -415,7 +416,7 @@
1053 ]
1054 self.store.commit()
1055
1056- removed = utils.delete_txlogs_slice(date=now.date(),
1057+ removed = utils.delete_txlogs_slice(date=right_now.date(),
1058 quantity_limit=quantity_limit)
1059
1060 self.store.rollback() # Shouldn't affect the deletion result
1061@@ -429,15 +430,15 @@
1062
1063 def test_get_row_by_time_with_no_data(self):
1064 """Test the get_row_by_time function when no data is present."""
1065- txid, _ = utils.get_row_by_time(datetime.datetime.utcnow())
1066+ txid, _ = utils.get_row_by_time(now())
1067 self.assertEqual(txid, None)
1068
1069 def test_get_row_by_time_with_data(self):
1070 """Test get_row_by_time function when data is present."""
1071- ts = datetime.datetime.utcnow()
1072+ ts = now()
1073 txlogs = [
1074- self.factory.make_transaction_log(
1075- timestamp=ts + datetime.timedelta(i, 0)) for i in range(5)]
1076+ self.factory.make_transaction_log(timestamp=ts + timedelta(i, 0))
1077+ for i in range(5)]
1078 tstamp = txlogs[2].timestamp
1079 txid, newtstamp = utils.get_row_by_time(tstamp)
1080 self.assertEqual(txid, txlogs[2].id)
1081@@ -445,10 +446,10 @@
1082
1083 def test_get_row_by_time_timestamp_twice(self):
1084 """Test get_row_by_time having two lines with same timestamp."""
1085- ts = datetime.datetime.utcnow()
1086+ ts = now()
1087 txlogs = [
1088- self.factory.make_transaction_log(
1089- timestamp=ts + datetime.timedelta(i, 0)) for i in range(5)]
1090+ self.factory.make_transaction_log(timestamp=ts + timedelta(i, 0))
1091+ for i in range(5)]
1092 # put the timestamp of [3] into [1], the function should return the
1093 # id of [1]
1094 tstamp = txlogs[1].timestamp = txlogs[3].timestamp
1095@@ -459,10 +460,10 @@
1096
1097 def test_get_row_by_time_not_exact(self):
1098 """Test get_row_by_time not giving an exact timestamp."""
1099- ts = datetime.datetime.utcnow()
1100+ ts = now()
1101 txlogs = [
1102- self.factory.make_transaction_log(
1103- timestamp=ts + datetime.timedelta(i, 0)) for i in range(5)]
1104+ self.factory.make_transaction_log(timestamp=ts + timedelta(i, 0))
1105+ for i in range(5)]
1106
1107 # get a timestamp in the middle of [2] and [3], the function should
1108 # return the id of [3]
1109@@ -477,7 +478,7 @@
1110 def test_get_row_by_time_nothing_found(self):
1111 """Test get_row_by_time with a big enough timestamp."""
1112 txlogs = [self.factory.make_transaction_log() for i in range(2)]
1113- tstamp = txlogs[-1].timestamp + datetime.timedelta(seconds=1)
1114+ tstamp = txlogs[-1].timestamp + timedelta(seconds=1)
1115 txid, newtstamp = utils.get_row_by_time(tstamp)
1116 self.assertEqual(txid, None)
1117 self.assertEqual(newtstamp, None)
1118
1119=== modified file 'src/backends/utils.py'
1120--- src/backends/utils.py 2015-09-27 16:59:15 +0000
1121+++ src/backends/utils.py 2016-03-06 23:21:42 +0000
1122@@ -18,24 +18,21 @@
1123
1124 """Some generic utilities."""
1125
1126-from datetime import datetime
1127-
1128+from __future__ import unicode_literals
1129 from django.contrib.auth.models import User
1130
1131 from backends.filesync import services
1132
1133
1134-def create_test_user(username=u"fred", email=u"fred@bedrock.com",
1135- first_name=u"Fredrick", last_name=u"Flintsone",
1136- id=None, password=None):
1137+def create_test_user(
1138+ username='fred', email='fred@bedrock.com', first_name='Fredrick',
1139+ last_name='Flintsone', password=None, id=None):
1140 """Create a user used for testing."""
1141 try:
1142 user = User.objects.get(username=username)
1143 except User.DoesNotExist:
1144- now = datetime.utcnow()
1145 user = User(id=id, username=unicode(username), email=unicode(email),
1146- is_staff=False, is_active=True, is_superuser=False,
1147- last_login=now, date_joined=now)
1148+ is_staff=False, is_active=True, is_superuser=False)
1149 user.set_password(password)
1150 user.save()
1151 user.first_name = unicode(first_name)
1152
1153=== modified file 'src/magicicada/settings/__init__.py'
1154--- src/magicicada/settings/__init__.py 2016-02-20 16:11:52 +0000
1155+++ src/magicicada/settings/__init__.py 2016-03-06 23:21:42 +0000
1156@@ -184,6 +184,7 @@
1157 LOG_LEVEL = 5
1158 LOG_TO_SYSLOG = False
1159 OOPS_PATH = os.path.join(BASE_DIR, 'tmp', 'oops')
1160+PUBLIC_URL_PREFIX = 'http://some_url'
1161 ROOT_USERVOLUME_NAME = 'Magicicada'
1162 ROOT_USERVOLUME_PATH = '~/' + ROOT_USERVOLUME_NAME
1163 SERVICE_GROUP = 'filesync'
1164@@ -192,9 +193,6 @@
1165 STORAGE_PROXY_PORT = None
1166 SYSLOG_FORMAT = (
1167 '%(processName)-13s %(levelname)-8s %(name)s[%(process)d]: %(message)s')
1168-UPDOWN_PUBLIC_URL_PREFIX = 'http://some_url/p/'
1169-UPDOWN_PUBLIC_URL_PREFIX_2 = 'http://some_url/'
1170-
1171
1172 from . import api_server # noqa
1173 from . import ssl_proxy # noqa
1174
1175=== modified file 'src/magicicada/wsgi.py'
1176--- src/magicicada/wsgi.py 2015-08-17 04:52:41 +0000
1177+++ src/magicicada/wsgi.py 2016-03-06 23:21:42 +0000
1178@@ -11,6 +11,6 @@
1179
1180 from django.core.wsgi import get_wsgi_application
1181
1182-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "filesync.settings")
1183+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "magicicada.settings")
1184
1185 application = get_wsgi_application()
1186
1187=== modified file 'src/monitoring/dump.py'
1188--- src/monitoring/dump.py 2015-09-19 16:32:27 +0000
1189+++ src/monitoring/dump.py 2016-03-06 23:21:42 +0000
1190@@ -20,9 +20,9 @@
1191
1192 import os
1193 import gc
1194-import datetime
1195
1196 from django.conf import settings
1197+from django.utils.timezone import now
1198
1199
1200 SIGMELIAE = 44
1201@@ -35,7 +35,7 @@
1202
1203 dump_dir = settings.LOG_FOLDER
1204 filename = os.path.join(dump_dir, 'meliae-%s.json' % (
1205- datetime.datetime.utcnow().strftime("%Y%m%d%H%M%S",)))
1206+ now().strftime("%Y%m%d%H%M%S",)))
1207 gc.collect()
1208 scanner.dump_all_objects(filename)
1209 except ImportError as e:
1210@@ -50,7 +50,7 @@
1211 """Dump GC usage."""
1212 try:
1213 dump_dir = settings.LOG_FOLDER
1214- tstamp = datetime.datetime.utcnow().strftime("%Y%m%d%H%M%S")
1215+ tstamp = now().strftime("%Y%m%d%H%M%S")
1216 fname = os.path.join(dump_dir, 'gcdump-%s.txt' % (tstamp,))
1217 fh = open(fname, "w")
1218
1219
1220=== modified file 'src/monitoring/reactor.py'
1221--- src/monitoring/reactor.py 2015-09-19 16:32:27 +0000
1222+++ src/monitoring/reactor.py 2016-03-06 23:21:42 +0000
1223@@ -18,6 +18,8 @@
1224
1225 """A thread that measures responsiveness of the twisted reactor."""
1226
1227+from __future__ import unicode_literals
1228+
1229 import os
1230 import sys
1231 import time
1232
1233=== modified file 'src/monitoring/stats_worker.py'
1234--- src/monitoring/stats_worker.py 2015-09-19 16:32:27 +0000
1235+++ src/monitoring/stats_worker.py 2016-03-06 23:21:42 +0000
1236@@ -18,6 +18,7 @@
1237
1238 """Process that inform metrics about other processes and the machine."""
1239
1240+from __future__ import unicode_literals
1241 from __future__ import with_statement
1242
1243 import os
1244
1245=== modified file 'src/monitoring/tests/test_dump.py'
1246--- src/monitoring/tests/test_dump.py 2015-09-19 16:32:27 +0000
1247+++ src/monitoring/tests/test_dump.py 2016-03-06 23:21:42 +0000
1248@@ -18,6 +18,8 @@
1249
1250 """Tests for monitoring dump."""
1251
1252+from __future__ import unicode_literals
1253+
1254 import gc
1255 import shutil
1256 import tempfile
1257
1258=== modified file 'src/monitoring/tests/test_reactor.py'
1259--- src/monitoring/tests/test_reactor.py 2015-09-19 16:32:27 +0000
1260+++ src/monitoring/tests/test_reactor.py 2016-03-06 23:21:42 +0000
1261@@ -18,6 +18,8 @@
1262
1263 """Tests for the ReactorInspector."""
1264
1265+from __future__ import unicode_literals
1266+
1267 import time
1268 import logging
1269 import threading
1270
1271=== modified file 'src/monitoring/tests/test_stats_worker.py'
1272--- src/monitoring/tests/test_stats_worker.py 2015-09-19 16:32:27 +0000
1273+++ src/monitoring/tests/test_stats_worker.py 2016-03-06 23:21:42 +0000
1274@@ -18,6 +18,8 @@
1275
1276 """Tests for the stats worker."""
1277
1278+from __future__ import unicode_literals
1279+
1280 import logging
1281
1282 from mocker import Mocker, expect
1283
1284=== modified file 'src/server/tests/test_server.py'
1285--- src/server/tests/test_server.py 2016-02-20 17:03:19 +0000
1286+++ src/server/tests/test_server.py 2016-03-06 23:21:42 +0000
1287@@ -21,7 +21,6 @@
1288 """Test Storage Server requests/responses."""
1289
1290 import collections
1291-import datetime
1292 import logging
1293 import os
1294 import types
1295@@ -29,6 +28,7 @@
1296 import uuid
1297 import weakref
1298
1299+from django.utils.timezone import now
1300 from metrics.metricsconnector import MetricsConnector
1301 from mocker import expect, Mocker, MockerTestCase, ARGS, KWARGS, ANY
1302 from twisted.python.failure import Failure
1303@@ -2433,7 +2433,7 @@
1304 self.patch(settings.api_server, 'MAX_DELTA_INFO', 5)
1305 # create a few fake nodes
1306 nodes = []
1307- now = datetime.datetime.utcnow()
1308+ right_now = now()
1309 for i in range(10):
1310 node = FakeNode()
1311 node.id = str(uuid.uuid4())
1312@@ -2446,7 +2446,7 @@
1313 node.content_hash = 'sha1:foo'
1314 node.crc32 = 10
1315 node.size = 1024
1316- node.last_modified = int(time.mktime(now.timetuple()))
1317+ node.last_modified = int(time.mktime(right_now.timetuple()))
1318 nodes.append(node)
1319 gen = self.response._send_delta_info(nodes, 'share_id')
1320 gen.next()
1321@@ -2508,7 +2508,7 @@
1322 self.patch(settings.api_server, 'GET_FROM_SCRATCH_LIMIT', 5)
1323 # build fake nodes
1324 nodes = []
1325- now = datetime.datetime.now()
1326+ right_now = now()
1327 for i in range(20):
1328 node = FakeNode()
1329 node.id = str(uuid.uuid4())
1330@@ -2522,7 +2522,7 @@
1331 node.content_hash = 'sha1:foo'
1332 node.crc32 = 10
1333 node.size = 1024
1334- node.last_modified = int(time.mktime(now.timetuple()))
1335+ node.last_modified = int(time.mktime(right_now.timetuple()))
1336 nodes.append(node)
1337 # set required caps
1338 self.response.protocol.working_caps = server.PREFERRED_CAP

Subscribers

People subscribed via source and target branches

to all changes: