Merge lp:~brian-murray/daisy/no-pycassa into lp:daisy
- no-pycassa
- Merge into trunk
Proposed by
Brian Murray
Status: | Merged |
---|---|
Merged at revision: | 898 |
Proposed branch: | lp:~brian-murray/daisy/no-pycassa |
Merge into: | lp:daisy |
Diff against target: |
239 lines (+99/-62) (has conflicts) 2 files modified
daisy/submit.py (+64/-1) daisy/submit_core.py (+35/-61) Text conflict in daisy/submit.py |
To merge this branch: | bzr merge lp:~brian-murray/daisy/no-pycassa |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Steve Langasek | Needs Fixing | ||
Review via email: mp+373484@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
Steve Langasek (vorlon) : | # |
review:
Needs Fixing
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'daisy/submit.py' |
2 | --- daisy/submit.py 2019-08-12 16:09:19 +0000 |
3 | +++ daisy/submit.py 2019-10-01 22:54:14 +0000 |
4 | @@ -27,12 +27,19 @@ |
5 | |
6 | from oopsrepository import config as oopsconfig |
7 | from oopsrepository import oopses |
8 | +<<<<<<< TREE |
9 | import pycassa |
10 | from pycassa.cassandra.ttypes import NotFoundException, InvalidRequestException |
11 | from pycassa.pool import MaximumRetryException |
12 | +======= |
13 | + |
14 | +from apport import Report |
15 | +from binascii import hexlify, unhexlify |
16 | +from cassandra import WriteTimeout |
17 | +from cassandra.query import SimpleStatement |
18 | +>>>>>>> MERGE-SOURCE |
19 | |
20 | from daisy import config |
21 | -import apport |
22 | from daisy import utils |
23 | from daisy.metrics import get_metrics |
24 | |
25 | @@ -47,6 +54,7 @@ |
26 | metrics = get_metrics('daisy.%s' % socket.gethostname()) |
27 | logger = logging.getLogger('gunicorn.error') |
28 | |
29 | +<<<<<<< TREE |
30 | |
31 | def update_release_pkg_counter(column_family, release, src_package, date): |
32 | column_family.insert('%s:%s' % (release, src_package), {date: 1}) |
33 | @@ -56,6 +64,32 @@ |
34 | |
35 | def create_report_from_bson(data): |
36 | report = apport.Report() |
37 | +======= |
38 | +counters_update = None |
39 | +proposed_counters_update = None |
40 | + |
41 | +def update_counters(_session, release, src_package, date, src_version=None): |
42 | + cql_release = release.replace("'", "''") |
43 | + if src_version: |
44 | + key = '%s:%s:%s' % (cql_release, src_package, src_version) |
45 | + else: |
46 | + key = '%s:%s' % (cql_release, src_package) |
47 | + _session.execute(counters_update, [key, date]) |
48 | + |
49 | + |
50 | +def update_proposed_counters(_session, release, src_package, date, |
51 | + src_version=None): |
52 | + cql_release = release.replace("'", "''") |
53 | + if src_version: |
54 | + key = '%s:%s:%s' % (cql_release, src_package, src_version) |
55 | + else: |
56 | + key = '%s:%s' % (cql_release, src_package) |
57 | + _session.execute(proposed_counters_update, [key, date]) |
58 | + |
59 | + |
60 | +def create_minimal_report_from_bson(data): |
61 | + report = Report() |
62 | +>>>>>>> MERGE-SOURCE |
63 | for key in data: |
64 | try: |
65 | report[key.encode('UTF-8')] = data[key].encode('UTF-8') |
66 | @@ -159,7 +193,15 @@ |
67 | proc_status = data.get('ProcStatus', '') |
68 | if date and exec_path and proc_status and system_token: |
69 | try: |
70 | +<<<<<<< TREE |
71 | reported_crash_ids = systemoopshashes_cf.get(system_token) |
72 | +======= |
73 | + cql_system_token = '0x' + hexlify(system_token) |
74 | + results = _session.execute( |
75 | + SimpleStatement("SELECT column1 FROM \"%s\" WHERE key = %s LIMIT 1" |
76 | + % ('SystemOOPSHashes', cql_system_token))) |
77 | + reported_crash_ids = (row[0] for row in results) |
78 | +>>>>>>> MERGE-SOURCE |
79 | crash_id = '%s:%s:%s' % (date, exec_path, proc_status) |
80 | if type(crash_id) == unicode: |
81 | crash_id = crash_id.encode('utf-8') |
82 | @@ -300,6 +342,17 @@ |
83 | If a core file is to be requested, return (True, 'UUID CORE') |
84 | If no further action is needed, return (True, 'UUID OOPSID') |
85 | ''' |
86 | +<<<<<<< TREE |
87 | +======= |
88 | + indexes_select = None |
89 | + if not indexes_select: |
90 | + indexes_select = \ |
91 | + _session.prepare('SELECT value FROM "Indexes" WHERE key = ? and column1 = ? LIMIT 1') |
92 | + stacktrace_select = None |
93 | + if not stacktrace_select: |
94 | + stacktrace_select = \ |
95 | + _session.prepare('SELECT value FROM "Stacktrace" WHERE key = ? and column1 = ? LIMIT 1') |
96 | +>>>>>>> MERGE-SOURCE |
97 | |
98 | indexes_fam = pycassa.ColumnFamily(_pool, 'Indexes') |
99 | oops_cf = pycassa.ColumnFamily(_pool, 'OOPS') |
100 | @@ -369,10 +422,20 @@ |
101 | if crash_sig and not retry: |
102 | # The crash is a duplicate so we don't need this data. |
103 | # Stacktrace, and ThreadStacktrace were already not accepted |
104 | +<<<<<<< TREE |
105 | if 'ProcMaps' in report: |
106 | unneeded_columns = ['Disassembly', 'ProcMaps', 'ProcStatus', |
107 | 'Registers', 'StacktraceTop'] |
108 | oops_cf.remove(oops_id, columns=unneeded_columns) |
109 | +======= |
110 | + if 'ProcMaps' in data: |
111 | + oops_delete = _session.prepare('DELETE FROM "OOPS" WHERE key = ? AND column1 = ?') |
112 | + unneeded_columns = ('Disassembly', 'ProcMaps', 'ProcStatus', |
113 | + 'Registers', 'StacktraceTop') |
114 | + for unneeded_column in unneeded_columns: |
115 | + _session.execute(oops_delete, |
116 | + [oops_id, unneeded_column]) |
117 | +>>>>>>> MERGE-SOURCE |
118 | # We have already retraced for this address signature, so this |
119 | # crash can be immediately bucketed. |
120 | utils.bucket(oops_config, oops_id, crash_sig, data) |
121 | |
122 | === modified file 'daisy/submit_core.py' |
123 | --- daisy/submit_core.py 2019-08-12 16:09:19 +0000 |
124 | +++ daisy/submit_core.py 2019-10-01 22:54:14 +0000 |
125 | @@ -54,53 +54,31 @@ |
126 | return False |
127 | return True |
128 | |
129 | -def swift_delete_ignoring_error(conn, bucket, oops_id): |
130 | - global _cached_swift |
131 | - import swiftclient |
132 | +def swift_delete_ignoring_error(swift_cmd, bucket, oops_id): |
133 | + from subprocess import check_call, CalledProcessError |
134 | + swift_delete_cmd = swift_cmd + ['delete', bucket, oops_id] |
135 | try: |
136 | - conn.delete_object(bucket, oops_id) |
137 | - except swiftclient.ClientException: |
138 | - metrics.meter('swift_delete_error') |
139 | + check_call(swift_delete_cmd) |
140 | + except CalledProcessError: |
141 | + pass |
142 | |
143 | def write_to_swift(environ, fileobj, oops_id, provider_data): |
144 | '''Write the core file to OpenStack Swift.''' |
145 | - global _cached_swift |
146 | - import swiftclient |
147 | - opts = {'tenant_name': provider_data['os_tenant_name'], |
148 | - 'region_name': provider_data['os_region_name']} |
149 | - if not _cached_swift: |
150 | - _cached_swift = swiftclient.client.Connection( |
151 | - provider_data['os_auth_url'], |
152 | - provider_data['os_username'], |
153 | - provider_data['os_password'], |
154 | - os_options=opts, |
155 | - auth_version='2.0') |
156 | - # if there is no token we should recreate the connection |
157 | - if not _cached_swift.token: |
158 | - _cached_swift = swiftclient.client.Connection( |
159 | - provider_data['os_auth_url'], |
160 | - provider_data['os_username'], |
161 | - provider_data['os_password'], |
162 | - os_options=opts, |
163 | - auth_version='2.0') |
164 | - # it seems to still be None sometimes |
165 | - msg = 'swift_token: %s' % (_cached_swift.token) |
166 | - logger.info(msg) |
167 | + from subprocess import check_call, CalledProcessError |
168 | + |
169 | + swift_cmd = ['/usr/bin/swift', |
170 | + '--os-auth-url', '%s' % provider_data['os_auth_url'], |
171 | + '--os-username', '%s' % provider_data['os_username'], |
172 | + '--os-password', '%s' % provider_data['os_password'], |
173 | + '--os-tenant-name', '%s' % provider_data['os_tenant_name'], |
174 | + '--os-region-name', '%s' % provider_data['os_region_name'], |
175 | + '--auth-version', '2.0'] |
176 | bucket = provider_data['bucket'] |
177 | - _cached_swift.http_conn = None |
178 | - if (provider_data.get('usage_max_mb')): |
179 | - headers = _cached_swift.head_account() |
180 | - bytes_used = int(headers.get('x-account-bytes-used', 0)) |
181 | - # Keep a reference to the number of bytes used by swift in a possible |
182 | - # future OOPS report. We may find that a series of OOPSes are actually |
183 | - # related to heavy load on Swift, as has been the case before. |
184 | - environ['swift.bytes_used'] = bytes_used |
185 | - if (not write_policy_allow(oops_id, bytes_used, provider_data)): |
186 | - msg = '%s bytes_used issue' % (oops_id) |
187 | - logger.info(msg) |
188 | - return False |
189 | - |
190 | - _cached_swift.put_container(bucket) |
191 | + swift_post_cmd = swift_cmd + ['post', bucket] |
192 | + try: |
193 | + check_call(swift_post_cmd) |
194 | + except CalledProcessError: |
195 | + return False |
196 | try: |
197 | coredir = '/tmp/cores-%s' % os.getpid() |
198 | if not os.path.exists(coredir): |
199 | @@ -117,25 +95,21 @@ |
200 | t_size = os.path.getsize(t.name) |
201 | msg = '%s has a %i byte core file' % (oops_id, t_size) |
202 | logger.info(msg) |
203 | - # Don't set a content_length (that we don't have) to force a chunked |
204 | - # transfer. |
205 | - _cached_swift.put_object(bucket, oops_id, t, content_length=t_size) |
206 | - except IOError, e: |
207 | - swift_delete_ignoring_error(_cached_swift, bucket, oops_id) |
208 | - if e.message == 'request data read error': |
209 | - msg = 'IOError when trying to add (%s) to bucket: %s' % (oops_id, str(e)) |
210 | - logger.info(msg) |
211 | - return False |
212 | - else: |
213 | - msg = 'IOError when trying to add (%s) to bucket: %s' % (oops_id, str(e)) |
214 | - logger.info(msg) |
215 | - metrics.meter('swift_ioerror') |
216 | - raise |
217 | - except swiftclient.ClientException as e: |
218 | - msg = 'ClientException when trying to add (%s) to bucket: %s' % (oops_id, str(e)) |
219 | - logger.info(msg) |
220 | - metrics.meter('swift_client_exception') |
221 | - swift_delete_ignoring_error(_cached_swift, bucket, oops_id) |
222 | + swift_upload_cmd = swift_cmd + ['upload', '--object-name', |
223 | + oops_id, bucket, |
224 | + os.path.join(coredir, t.name)] |
225 | + check_call(swift_upload_cmd) |
226 | + except CalledProcessError as e: |
227 | + swift_delete_ignoring_error(swift_cmd, bucket, oops_id) |
228 | + msg = 'CalledProcessError when trying to add (%s) to bucket: %s' % \ |
229 | + (oops_id, str(e.returncode)) |
230 | + logger.info(msg) |
231 | + return False |
232 | + except IOError as e: |
233 | + swift_delete_ignoring_error(swift_cmd, bucket, oops_id) |
234 | + msg = 'IOError when trying to add (%s) to bucket: %s' % \ |
235 | + (oops_id, str(e)) |
236 | + logger.info(msg) |
237 | return False |
238 | msg = 'CORE for (%s) written to bucket' % (oops_id) |
239 | logger.info(msg) |