Merge lp:~canonical-ca-hackers/ubuntu-webcatalog/fix-scroll-backwards-error into lp:ubuntu-webcatalog

Proposed by Łukasz Czyżykowski
Status: Merged
Approved by: Anthony Lenton
Approved revision: 174
Merged at revision: 176
Proposed branch: lp:~canonical-ca-hackers/ubuntu-webcatalog/fix-scroll-backwards-error
Merge into: lp:ubuntu-webcatalog
Diff against target: 87 lines (+20/-9)
3 files modified
src/webcatalog/management/commands/cleanup.py (+4/-3)
src/webcatalog/tasks.py (+0/-1)
src/webcatalog/tests/test_commands.py (+16/-5)
To merge this branch: bzr merge lp:~canonical-ca-hackers/ubuntu-webcatalog/fix-scroll-backwards-error
Reviewer Review Type Date Requested Status
Anthony Lenton (community) Approve
Review via email: mp+123764@code.launchpad.net

Commit message

Fix 'cursor can only scan forward' error on cleanup tasks.

Description of the change

Overview
========
This branch fixes error with cleanup tasks.

Details
=======
If the remove code would run longer than specified (or default) batch_time the calculation of batch_size would turn it to 0 or -1, which supplied to SQL FETCH would result in an error.

The simplest fix is to just set batch_size to 1 in that case. Documentation of PostgreSQL states that in the event that the cursor was exhausted it will just return 0 rows and not error.

To post a comment you must log in.
Revision history for this message
Anthony Lenton (elachuni) :
review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
=== modified file 'src/webcatalog/management/commands/cleanup.py'
--- src/webcatalog/management/commands/cleanup.py 2012-07-02 13:51:23 +0000
+++ src/webcatalog/management/commands/cleanup.py 2012-09-11 14:43:24 +0000
@@ -131,7 +131,7 @@
131 "Supported tables are: %s" % (table, CLEANERS.keys()))131 "Supported tables are: %s" % (table, CLEANERS.keys()))
132 raise CommandError(msg)132 raise CommandError(msg)
133 cleaner = cleaner_cls(self)133 cleaner = cleaner_cls(self)
134 cur = connection.cursor()134 cursor = connection.cursor()
135135
136 removed = -1136 removed = -1
137 total_removed = 0137 total_removed = 0
@@ -142,13 +142,14 @@
142 # We don't want to hold the cursor open too long to allow142 # We don't want to hold the cursor open too long to allow
143 # vacuum to reclaim rows. 1 hour default before it is reopened.143 # vacuum to reclaim rows. 1 hour default before it is reopened.
144 if time.time() - cursor_opened_time > cursor_secs:144 if time.time() - cursor_opened_time > cursor_secs:
145 cleaner.declare_expired_keys_cursor(cur)145 cleaner.declare_expired_keys_cursor(cursor)
146 cursor_opened_time = time.time()146 cursor_opened_time = time.time()
147147
148 # Remove a batch of session rows.148 # Remove a batch of session rows.
149 batch_start = time.time()149 batch_start = time.time()
150 removed = cleaner.remove_batch(cur, batch_size)150 removed = cleaner.remove_batch(cursor, batch_size)
151 actual_batch_time = time.time() - batch_start151 actual_batch_time = time.time() - batch_start
152
152 total_removed += removed153 total_removed += removed
153 message = "Removed %d rows (%d total removed). Batch size %d\n"154 message = "Removed %d rows (%d total removed). Batch size %d\n"
154 self.output(message % (removed, total_removed, batch_size), 1)155 self.output(message % (removed, total_removed, batch_size), 1)
155156
=== modified file 'src/webcatalog/tasks.py'
--- src/webcatalog/tasks.py 2012-06-29 19:51:06 +0000
+++ src/webcatalog/tasks.py 2012-09-11 14:43:24 +0000
@@ -1,7 +1,6 @@
1from time import sleep1from time import sleep
22
3from celery.task import task3from celery.task import task
4from django.conf import settings
5from django.core.management import call_command4from django.core.management import call_command
65
76
87
=== modified file 'src/webcatalog/tests/test_commands.py'
--- src/webcatalog/tests/test_commands.py 2012-08-29 21:46:54 +0000
+++ src/webcatalog/tests/test_commands.py 2012-09-11 14:43:24 +0000
@@ -765,7 +765,7 @@
765 # relevant stats.765 # relevant stats.
766 onion = self.factory.make_distroseries(code_name='onion')766 onion = self.factory.make_distroseries(code_name='onion')
767 orig_timestamp = datetime.utcnow() - timedelta(2)767 orig_timestamp = datetime.utcnow() - timedelta(2)
768 import_record = ReviewStatsImport.objects.create(768 ReviewStatsImport.objects.create(
769 distroseries=onion, last_import=orig_timestamp)769 distroseries=onion, last_import=orig_timestamp)
770770
771 call_command('import_ratings_stats', 'onion')771 call_command('import_ratings_stats', 'onion')
@@ -1078,10 +1078,7 @@
10781078
1079 @skipUnless(connection.vendor == 'postgresql', "Requires postgresql")1079 @skipUnless(connection.vendor == 'postgresql', "Requires postgresql")
1080 def test_cleanup_sessions(self):1080 def test_cleanup_sessions(self):
1081 now = datetime.now()1081 self.create_django_sessions()
1082 for i in range(8):
1083 expire_date = now + timedelta(seconds=5000 * (4 - i))
1084 self.factory.make_session(expire_date=expire_date)
1085 self.assertEqual(8, Session.objects.count())1082 self.assertEqual(8, Session.objects.count())
10861083
1087 call_command('cleanup', 'django_session')1084 call_command('cleanup', 'django_session')
@@ -1089,6 +1086,20 @@
1089 self.assertEqual(4, Session.objects.count())1086 self.assertEqual(4, Session.objects.count())
1090 self.check_expected_output()1087 self.check_expected_output()
10911088
1089 def create_django_sessions(self):
1090 now = datetime.now()
1091 for i in range(8):
1092 expire_date = now + timedelta(seconds=5000 * (4 - i))
1093 self.factory.make_session(expire_date=expire_date)
1094
1095 @skipUnless(connection.vendor == 'postgresql', "Requires postgresql")
1096 def test_cleanup_session_when_it_takes_too_long(self):
1097 self.create_django_sessions()
1098
1099 call_command('cleanup', 'django_session', batch_secs=0)
1100
1101 self.assertEqual(0, Session.objects.count())
1102
10921103
1093class ImportAllAppInstallDataTestCase(TestCaseWithFactory):1104class ImportAllAppInstallDataTestCase(TestCaseWithFactory):
1094 @patch('sys.stdout')1105 @patch('sys.stdout')

Subscribers

People subscribed via source and target branches