Merge lp:~bigwhale/gwibber/threading into lp:gwibber
- threading
- Merge into trunk
Proposed by
David Klasinc
Status: | Merged |
---|---|
Merged at revision: | 1254 |
Proposed branch: | lp:~bigwhale/gwibber/threading |
Merge into: | lp:gwibber |
Diff against target: |
1912 lines (+447/-381) 24 files modified
bin/gwibber-service (+35/-40) gwibber/client.py (+8/-6) gwibber/microblog/dispatcher.py (+187/-153) gwibber/microblog/network.py (+36/-61) gwibber/microblog/plugins/buzz/__init__.py (+4/-3) gwibber/microblog/plugins/digg/__init__.py (+4/-2) gwibber/microblog/plugins/facebook/__init__.py (+16/-14) gwibber/microblog/plugins/flickr/__init__.py (+6/-2) gwibber/microblog/plugins/foursquare/__init__.py (+12/-6) gwibber/microblog/plugins/friendfeed/__init__.py (+5/-2) gwibber/microblog/plugins/identica/__init__.py (+17/-11) gwibber/microblog/plugins/pingfm/__init__.py (+4/-0) gwibber/microblog/plugins/qaiku/__init__.py (+3/-1) gwibber/microblog/plugins/statusnet/__init__.py (+12/-10) gwibber/microblog/plugins/twitter/__init__.py (+18/-17) gwibber/microblog/storage.py (+18/-16) gwibber/microblog/util/__init__.py (+5/-4) gwibber/microblog/util/couchmigrate.py (+7/-4) gwibber/microblog/util/exceptions.py (+6/-4) gwibber/microblog/util/keyring.py (+2/-3) gwibber/microblog/util/log.py (+21/-7) gwibber/microblog/util/resources.py (+7/-9) po/Makefile.in.in (+9/-4) tests/plugins/test/__init__.py (+5/-2) |
To merge this branch: | bzr merge lp:~bigwhale/gwibber/threading |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Ken VanDine | Approve | ||
Review via email: mp+92523@code.launchpad.net |
Commit message
Description of the change
Finished threading and logging.
To post a comment you must log in.
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'bin/gwibber-service' |
2 | --- bin/gwibber-service 2011-11-01 13:54:06 +0000 |
3 | +++ bin/gwibber-service 2012-02-10 17:26:15 +0000 |
4 | @@ -1,9 +1,8 @@ |
5 | #!/usr/bin/python |
6 | |
7 | from gi.repository import Gio |
8 | -import sys, optparse, gobject, dbus |
9 | +import sys, gobject, dbus |
10 | from os.path import join, dirname, exists, realpath, abspath |
11 | -from os import popen, getpid |
12 | from dbus.mainloop.glib import DBusGMainLoop |
13 | |
14 | DBusGMainLoop(set_as_default=True) |
15 | @@ -13,31 +12,11 @@ |
16 | SOURCE_DIR = join(LAUNCH_DIR, "..", "gwibber") |
17 | DISPATCHER = join(SOURCE_DIR, "microblog", "dispatcher.py") |
18 | |
19 | -###################################################################### |
20 | -# Setup path |
21 | -if exists(DISPATCHER): |
22 | - sys.path.insert(0, realpath(dirname(SOURCE_DIR))) |
23 | - try: |
24 | - from gwibber.microblog.util import log |
25 | - log.logger.name = "Gwibber Service" |
26 | - log.logger.info("Running from the source tree") |
27 | - from gwibber.microblog import dispatcher |
28 | - finally: |
29 | - del sys.path[0] |
30 | - |
31 | -else: |
32 | - from gwibber.microblog.util import log |
33 | - log.logger.name = "Gwibber Service" |
34 | - log.logger.info("Running from the system path") |
35 | - from gwibber.microblog import dispatcher |
36 | - |
37 | gsettings = Gio.Settings.new("org.gwibber.preferences") |
38 | debug = gsettings.get_boolean("debug") |
39 | -if debug: |
40 | - log.logger.setLevel(log.logging.DEBUG) |
41 | |
42 | ###################################################################### |
43 | -# Options |
44 | +# Options |
45 | from optparse import OptionParser |
46 | parser = OptionParser() |
47 | parser.add_option("-d", "--debug", action="store_true", |
48 | @@ -48,26 +27,42 @@ |
49 | help="Log to stdout") |
50 | (options, args) = parser.parse_args() |
51 | |
52 | +if options.stdout: |
53 | + console = True |
54 | +else: |
55 | + console = False |
56 | if options.debug or debug: |
57 | - log.logger.setLevel(log.logging.DEBUG) |
58 | -else: |
59 | - log.logger.setLevel(log.logging.INFO) |
60 | - |
61 | -if options.stdout: |
62 | - # define a Handler which writes INFO messages or higher to the sys.stderr |
63 | - console = log.logging.StreamHandler() |
64 | - if options.debug or debug: |
65 | - console.setLevel(log.logging.DEBUG) |
66 | - else: |
67 | - console.setLevel(log.logging.INFO) |
68 | - # set a format which is simpler for console use |
69 | - formatter = log.logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') |
70 | - console.setFormatter(formatter) |
71 | - log.logger.addHandler(console) |
72 | + debuglog = True |
73 | +else: |
74 | + debuglog = False |
75 | + |
76 | +###################################################################### |
77 | +# Setup path |
78 | +if exists(DISPATCHER): |
79 | + sys.path.insert(0, realpath(dirname(SOURCE_DIR))) |
80 | + try: |
81 | + from gwibber.microblog.util.log import setup_logging |
82 | + setup_logging(console, debuglog) |
83 | + import logging |
84 | + logger = logging.getLogger("Service") |
85 | + logger.info("Service starting") |
86 | + logger.info("Running from the source tree") |
87 | + from gwibber.microblog import dispatcher |
88 | + finally: |
89 | + del sys.path[0] |
90 | + |
91 | +else: |
92 | + from gwibber.microblog.util.log import setup_logging |
93 | + import logging |
94 | + setup_logging(console, debuglog) |
95 | + logger = logging.getLogger("Service") |
96 | + logger.info("Service starting") |
97 | + logger.info("Running from the source tree") |
98 | + from gwibber.microblog import dispatcher |
99 | |
100 | # if gwibber-serivce is already running, don't start |
101 | if "com.Gwibber.Connection" in dbus.SessionBus().list_names(): |
102 | - log.logger.info("Found gwibber-service already running, exiting") |
103 | + logger.info("Found gwibber-service already running, exiting") |
104 | quit() |
105 | |
106 | """ |
107 | @@ -79,7 +74,7 @@ |
108 | message_monitor = dispatcher.MessagesMonitor() |
109 | """ |
110 | |
111 | -log.logger.debug("Setting up monitors") |
112 | +logger.debug("Setting up monitors") |
113 | connection_monitor = dispatcher.ConnectionMonitor() |
114 | urlshortener = dispatcher.URLShorten() |
115 | translator = dispatcher.Translate() |
116 | |
117 | === modified file 'gwibber/client.py' |
118 | --- gwibber/client.py 2011-06-07 18:29:53 +0000 |
119 | +++ gwibber/client.py 2012-02-10 17:26:15 +0000 |
120 | @@ -6,13 +6,15 @@ |
121 | import microblog.util |
122 | from microblog.util import resources |
123 | |
124 | +import logging |
125 | +logger = logging.getLogger("Client") |
126 | + |
127 | import gettext |
128 | from gettext import lgettext as _ |
129 | if hasattr(gettext, 'bind_textdomain_codeset'): |
130 | gettext.bind_textdomain_codeset('gwibber','UTF-8') |
131 | gettext.textdomain('gwibber') |
132 | |
133 | -from gwibber.microblog.util import log |
134 | from microblog.util.const import * |
135 | # Try to import * from custom, install custom.py to include packaging |
136 | # customizations like distro API keys, etc |
137 | @@ -104,7 +106,7 @@ |
138 | |
139 | # set state online/offline |
140 | if not self.connection.isConnected(): |
141 | - log.logger.info("Setting to Offline") |
142 | + logger.info("Setting to Offline") |
143 | self.actions.get_action("refresh").set_sensitive(False) |
144 | |
145 | # Delay resizing input area or else it doesn't work |
146 | @@ -477,13 +479,13 @@ |
147 | return True |
148 | else: |
149 | self.save_window_settings() |
150 | - log.logger.info("Gwibber Client closed") |
151 | + logger.info("Gwibber Client closed") |
152 | gtk.main_quit() |
153 | |
154 | def on_quit(self, *args): |
155 | self.service.Quit() |
156 | self.save_window_settings() |
157 | - log.logger.info("Gwibber Client quit") |
158 | + logger.info("Gwibber Client quit") |
159 | gtk.main_quit() |
160 | |
161 | def on_search(self, *args): |
162 | @@ -676,11 +678,11 @@ |
163 | self.update_view() |
164 | |
165 | def on_connection_online(self, *args): |
166 | - log.logger.info("Setting to Online") |
167 | + logger.info("Setting to Online") |
168 | self.actions.get_action("refresh").set_sensitive(True) |
169 | |
170 | def on_connection_offline(self, *args): |
171 | - log.logger.info("Setting to Offline") |
172 | + logger.info("Setting to Offline") |
173 | self.actions.get_action("refresh").set_sensitive(False) |
174 | |
175 | class Client(dbus.service.Object): |
176 | |
177 | === modified file 'gwibber/microblog/dispatcher.py' |
178 | --- gwibber/microblog/dispatcher.py 2012-01-11 17:06:18 +0000 |
179 | +++ gwibber/microblog/dispatcher.py 2012-02-10 17:26:15 +0000 |
180 | @@ -1,20 +1,20 @@ |
181 | #!/usr/bin/env python |
182 | # -*- coding: utf-8 -*- |
183 | |
184 | -import multiprocessing, traceback, json |
185 | +import traceback, json, random, string |
186 | import gobject, dbus, dbus.service |
187 | import sqlite3, mx.DateTime, re, uuid |
188 | import urlshorter, storage, network, util, uploader |
189 | from gettext import lgettext as _ |
190 | -import signal |
191 | - |
192 | -from util import log |
193 | -from util import resources |
194 | -from util import exceptions |
195 | +from threading import Thread |
196 | +from datetime import datetime, timedelta |
197 | + |
198 | +import logging |
199 | +logger = logging.getLogger("Dispatcher") |
200 | + |
201 | from util.const import * |
202 | import subprocess |
203 | |
204 | - |
205 | try: |
206 | from gi.repository import Unity, Dbusmenu |
207 | except: |
208 | @@ -41,10 +41,10 @@ |
209 | PROTOCOLS = {} |
210 | for p in util.resources.get_plugin_dirs()[0]: |
211 | PROTOCOLS[str(p)] = __import__("%s" % p, fromlist='*') |
212 | - print "Loading plugin %s version %s" % (PROTOCOLS[str(p)].PROTOCOL_INFO["name"], PROTOCOLS[str(p)].PROTOCOL_INFO["version"]) |
213 | + # print "Loading plugin %s version %s" % (PROTOCOLS[str(p)].PROTOCOL_INFO["name"], PROTOCOLS[str(p)].PROTOCOL_INFO["version"]) |
214 | #print "Path %s" % str(PROTOCOLS[str(p)].__file__) |
215 | # FIXME: Figure out why the logger doesn't log here |
216 | - #log.logger.info("Loading plugin for %s", p) |
217 | + #logger.info("Loading plugin for %s", p) |
218 | |
219 | FEATURES = json.loads(GWIBBER_OPERATIONS) |
220 | SERVICES = dict([(k, v.PROTOCOL_INFO) for k, v in PROTOCOLS.items()]) |
221 | @@ -54,71 +54,85 @@ |
222 | if gsettings.get_int("interval") < 5: |
223 | gsettings.set_int("interval", 5) |
224 | |
225 | -log.logger.name = "Gwibber Dispatcher" |
226 | - |
227 | -def perform_operation((account, opname, args, transient)): |
228 | - try: |
229 | - stream = FEATURES[opname]["stream"] or opname |
230 | - logtext = "<%s:%s>" % (account["service"], opname) |
231 | - |
232 | - logtext = "<%s:%s>" % (account["service"], opname) |
233 | - log.logger.debug("%s Performing operation", logtext) |
234 | - |
235 | - args = dict((str(k), v) for k, v in args.items()) |
236 | - message_data = PROTOCOLS[account["service"]].Client(account)(opname, **args) |
237 | - text_cleaner = re.compile(u"[: \n\t\r♻♺]+|@[^ ]+|![^ ]+|#[^ ]+") # signs, @nickname, !group, #tag |
238 | - new_messages = [] |
239 | - |
240 | - if message_data is not None: |
241 | - for m in message_data: |
242 | - try: |
243 | - if isinstance(m, dict) and m.has_key("mid"): |
244 | - m["id"] = uuid.uuid1().hex |
245 | - m["operation"] = opname |
246 | - m["stream"] = m.get("stream", stream) |
247 | - m["transient"] = transient |
248 | - m["time"] = m.get("time", 0) |
249 | - if not m["text"]: m["text"] = "" |
250 | - m["rtl"] = util.isRTL(re.sub(text_cleaner, "", m["text"].decode('utf-8'))) |
251 | - if m.has_key("type"): |
252 | - if m["type"] == "link": m["stream"] = "links" |
253 | - if m["type"] == "video": m["stream"] = "videos" |
254 | - if m["type"] == "photo": m["stream"] = "images" |
255 | - |
256 | - log.logger.debug("%s Adding record", logtext) |
257 | - |
258 | - new_messages.insert(0, ( |
259 | - m["id"], |
260 | - m["mid"], |
261 | - m["account"], |
262 | - account["service"], |
263 | - opname, |
264 | - transient, |
265 | - m["stream"] or stream, |
266 | - m["time"], |
267 | - m["text"], |
268 | - m.get("sender", {}).get("is_me", None), |
269 | - m.get("to_me", None), |
270 | - m.get("sender", {}).get("nick", None), |
271 | - m.get("reply", {}).get("nick", None), |
272 | - json.dumps(m) |
273 | - )) |
274 | - elif isinstance(m, dict) and m.has_key("error"): |
275 | - new_messages.insert(0, ( |
276 | - "error", |
277 | - json.dumps(m) |
278 | - )) |
279 | - except Exception as e: |
280 | - if not "logtext" in locals(): logtext = "<UNKNOWN>" |
281 | - log.logger.error("%s Operation failed: %s", logtext, e) |
282 | - |
283 | - log.logger.debug("%s Finished operation", logtext) |
284 | - return ("Success", new_messages) |
285 | - except Exception as e: |
286 | - if not "logtext" in locals(): logtext = "<UNKNOWN>" |
287 | - log.logger.error("%s Operation failed", logtext) |
288 | - log.logger.debug("Traceback:\n%s", traceback.format_exc()) |
289 | - return ("Failure", traceback.format_exc()) |
290 | +class perform_operation(Thread): |
291 | + def __init__(self, job, callback_success, callback_failure): |
292 | + Thread.__init__(self) |
293 | + self.job = job |
294 | + self.t_start = None |
295 | + self.t_end = None |
296 | + self.t_runtime = timedelta(0) |
297 | + self.callback_success = callback_success |
298 | + self.callback_failure = callback_failure |
299 | + |
300 | + def run(self): |
301 | + self.id = ''.join(random.choice(string.letters) for i in xrange(5)) |
302 | + self.t_start = datetime.now() |
303 | + (account, opname, args, transient) = self.job |
304 | + try: |
305 | + stream = FEATURES[opname]["stream"] or opname |
306 | + logtext = "<%s:%s>" % (account["service"], opname) |
307 | + |
308 | + logtext = "<%s:%s>" % (account["service"], opname) |
309 | + logger.debug("%s Performing operation", logtext) |
310 | + |
311 | + args = dict((str(k), v) for k, v in args.items()) |
312 | + message_data = PROTOCOLS[account["service"]].Client(account)(opname, **args) |
313 | + text_cleaner = re.compile(u"[: \n\t\r♻♺]+|@[^ ]+|![^ ]+|#[^ ]+") # signs, @nickname, !group, #tag |
314 | + new_messages = [] |
315 | + |
316 | + if message_data is not None: |
317 | + for m in message_data: |
318 | + try: |
319 | + if isinstance(m, dict) and m.has_key("mid"): |
320 | + m["id"] = uuid.uuid1().hex |
321 | + m["operation"] = opname |
322 | + m["stream"] = m.get("stream", stream) |
323 | + m["transient"] = transient |
324 | + m["time"] = m.get("time", 0) |
325 | + if not m["text"]: m["text"] = "" |
326 | + m["rtl"] = util.isRTL(re.sub(text_cleaner, "", m["text"].decode('utf-8'))) |
327 | + if m.has_key("type"): |
328 | + if m["type"] == "link": m["stream"] = "links" |
329 | + if m["type"] == "video": m["stream"] = "videos" |
330 | + if m["type"] == "photo": m["stream"] = "images" |
331 | + |
332 | + logger.debug("%s Adding record", logtext) |
333 | + new_messages.insert(0, ( |
334 | + m["id"], |
335 | + m["mid"], |
336 | + m["account"], |
337 | + account["service"], |
338 | + opname, |
339 | + transient, |
340 | + m["stream"] or stream, |
341 | + m["time"], |
342 | + m["text"], |
343 | + m.get("sender", {}).get("is_me", None), |
344 | + m.get("to_me", None), |
345 | + m.get("sender", {}).get("nick", None), |
346 | + m.get("reply", {}).get("nick", None), |
347 | + json.dumps(m) |
348 | + )) |
349 | + elif isinstance(m, dict) and m.has_key("error"): |
350 | + new_messages.insert(0, ( |
351 | + "error", |
352 | + json.dumps(m) |
353 | + )) |
354 | + except Exception as e: |
355 | + if not "logtext" in locals(): logtext = "<UNKNOWN>" |
356 | + logger.error("%s Operation failed: %s", logtext, e) |
357 | + self.callback_success(new_messages) |
358 | + |
359 | + except Exception as e: |
360 | + if not "logtext" in locals(): logtext = "<UNKNOWN>" |
361 | + logger.error("%s Operation failed", logtext) |
362 | + logger.debug("Traceback:\n%s", traceback.format_exc()) |
363 | + self.callback_failure("Error") |
364 | + |
365 | + self.t_end = datetime.now() |
366 | + self.t_runtime = self.t_end - self.t_start |
367 | + logger.debug("%s Finished operation (%s)" % (logtext, str(self.t_runtime))) |
368 | + |
369 | |
370 | class OperationCollector: |
371 | def __init__(self, dispatcher): |
372 | @@ -266,7 +280,9 @@ |
373 | self.unseen_counts[s] = 0 |
374 | |
375 | self.indicate = None |
376 | - self.launcher = None |
377 | + self.launcher = None |
378 | + |
379 | + self.job_list = [] |
380 | |
381 | if indicate and util.resources.get_desktop_file(): |
382 | self.indicate = indicate.Server.ref_default () |
383 | @@ -281,7 +297,7 @@ |
384 | menu_server = Dbusmenu.Server.new("/messaging/commands") |
385 | root = Dbusmenu.Menuitem.new () |
386 | root.child_append (post_menu) |
387 | - menu_server.set_root(root); |
388 | + menu_server.set_root(root) |
389 | self.indicate.set_menu (menu_server) |
390 | self.indicate.connect("server-display", self.on_indicator_server_activate) |
391 | self.indicate.connect("interest-added", self.on_indicator_interest_added) |
392 | @@ -383,7 +399,7 @@ |
393 | return False |
394 | |
395 | def on_connection_online(self, *args): |
396 | - log.logger.info("Dispatcher Online, initiating a refresh") |
397 | + logger.info("Dispatcher Online, initiating a refresh") |
398 | if self.refresh_timer_id: |
399 | gobject.source_remove(self.refresh_timer_id) |
400 | # wait a few seconds before alerting the world we are online |
401 | @@ -391,7 +407,7 @@ |
402 | |
403 | def on_connection_offline(self, *args): |
404 | self.refreshRunning = False |
405 | - log.logger.info("Dispatcher Offline, suspending operations") |
406 | + logger.info("Dispatcher Offline, suspending operations") |
407 | if self.refresh_timer_id: |
408 | gobject.source_remove(self.refresh_timer_id) |
409 | |
410 | @@ -445,7 +461,7 @@ |
411 | try: o = json.loads(opdata) |
412 | except: return |
413 | |
414 | - log.logger.debug("** Starting Single Operation **") |
415 | + logger.debug("** Starting Single Operation **") |
416 | self.LoadingStarted() |
417 | |
418 | params = ["account", "operation", "args", "transient"] |
419 | @@ -490,6 +506,7 @@ |
420 | @dbus.service.method("com.Gwibber.Service", in_signature="s") |
421 | def Send(self, opdata): |
422 | try: |
423 | + operations = [] |
424 | o = json.loads(opdata) |
425 | if "target" in o: |
426 | args = {"message": o["message"], "target": o["target"]} |
427 | @@ -501,11 +518,11 @@ |
428 | operations = [(self.collector.get_account(a), "send", {"message": o["message"]}, None) for a in o["accounts"]] |
429 | self.send(operations) |
430 | except: |
431 | - log.logger.error("Sending failed:\n%s", traceback.format_exc()) |
432 | + logger.error("Sending failed:\n%s", traceback.format_exc()) |
433 | |
434 | @dbus.service.method("com.Gwibber.Service", in_signature="ss") |
435 | def Retweet(self, mid, account): |
436 | - log.logger.debug("Retweeting %s", mid) |
437 | + logger.debug("Retweeting %s", mid) |
438 | self.PerformOp(json.dumps({ |
439 | "account": account, |
440 | "operation": "retweet", |
441 | @@ -515,7 +532,7 @@ |
442 | |
443 | @dbus.service.method("com.Gwibber.Service", in_signature="ss") |
444 | def Like(self, mid, account): |
445 | - log.logger.debug("Liking %s", mid) |
446 | + logger.debug("Liking %s", mid) |
447 | self.PerformOp(json.dumps({ |
448 | "account": account, |
449 | "operation": "like", |
450 | @@ -582,7 +599,7 @@ |
451 | service = dbus.Interface(obj, "com.Gwibber.Service") |
452 | service.Start() |
453 | """ |
454 | - log.logger.info("Gwibber Service is starting") |
455 | + logger.info("Gwibber Service is starting") |
456 | |
457 | @dbus.service.method("com.Gwibber.Service") |
458 | def Quit(self): |
459 | @@ -594,7 +611,7 @@ |
460 | service = dbus.Interface(obj, "com.Gwibber.Service") |
461 | service.Quit() |
462 | """ |
463 | - log.logger.info("Gwibber Service is being shutdown") |
464 | + logger.info("Gwibber Service is being shutdown") |
465 | self.mainloop.quit() |
466 | |
467 | @dbus.service.method("com.Gwibber.Service", out_signature="b") |
468 | @@ -629,7 +646,7 @@ |
469 | |
470 | def send_error_notify(self, error): |
471 | if not isinstance(error, dict): |
472 | - log.logger.error("Failed to parse error message: %s", error) |
473 | + logger.error("Failed to parse error message: %s", error) |
474 | return |
475 | if error.has_key("error"): |
476 | error = json.loads(error)["error"] |
477 | @@ -646,35 +663,54 @@ |
478 | util.notify(error["account"]["service"], error["message"], icon, 2000) |
479 | self.notified_errors[error["account"]["service"]] = error["message"] |
480 | |
481 | - def perform_async_operation (self, iterable): |
482 | - pool = multiprocessing.Pool () |
483 | - try: |
484 | - pool.map_async (perform_operation, iterable, callback = self.loading_complete) |
485 | - except Exception as e: |
486 | - self.loading_failed (e, traceback.format_exc()) |
487 | - pool.close () |
488 | - pool.join () |
489 | - |
490 | - def loading_complete(self, output): |
491 | + def perform_async_operation (self, jobs): |
492 | + # |
493 | + # Clean old, not running jobs ... |
494 | + # |
495 | + logger.info("Running Jobs: %s", len(self.job_list)) |
496 | + |
497 | + for job in reversed(self.job_list): |
498 | + if not job.isAlive(): |
499 | + job.join() |
500 | + self.job_list.remove(job) |
501 | + |
502 | + logger.info("Running Jobs: %s", len(self.job_list)) |
503 | + |
504 | + # |
505 | + # Start all jobs |
506 | + # |
507 | + # |
508 | + for job in jobs: |
509 | + thread = perform_operation(job, self.cb_loading_complete, self.cb_loading_failed) |
510 | + self.job_list.append(thread) |
511 | + thread.start () |
512 | + |
513 | + logger.info("Running Jobs: %s", len(self.job_list)) |
514 | + |
515 | + # |
516 | + # TODO: Make me prettier |
517 | + # |
518 | + def cb_loading_failed(self, error): |
519 | + logger.info("Loading Error: %s - %s", self.refresh_count, error) |
520 | + |
521 | + def cb_loading_complete(self, messages): |
522 | self.refresh_count += 1 |
523 | - |
524 | items = [] |
525 | errors = [] |
526 | - for o in output: |
527 | - for o2 in o[1]: |
528 | - if len(o2) > 1: |
529 | - if o2[0] != "error": |
530 | - with sqlite3.connect(SQLITE_DB_FILENAME) as db: |
531 | - if len(db.execute("""select * from messages where mid = '%s' and account = '%s' and stream = '%s'""" % (o2[1], o2[2], o2[6])).fetchall()) > 0: |
532 | - self.messages.Message("update", o2[-1]) |
533 | - else: |
534 | - self.messages.Message("new", o2[-1]) |
535 | - for s in "messages", "replies", "private": |
536 | - if o2[6] == s and o2[9] != 1: |
537 | - self.unseen_counts[s] = self.unseen_counts[s] + 1 |
538 | - items.append(o2) |
539 | - else: |
540 | - errors.append(o2) |
541 | + for m in messages: |
542 | + if len(m) > 1: |
543 | + if m[0] != "error": |
544 | + with sqlite3.connect(SQLITE_DB_FILENAME) as db: |
545 | + if len(db.execute("""select * from messages where mid = '%s' and account = '%s' and stream = '%s'""" % (m[1], m[2], m[6])).fetchall()) > 0: |
546 | + self.messages.Message("update", m[-1]) |
547 | + else: |
548 | + self.messages.Message("new", m[-1]) |
549 | + for s in "messages", "replies", "private": |
550 | + if m[6] == s and m[9] != 1: |
551 | + self.unseen_counts[s] += 1 |
552 | + items.append(m) |
553 | + else: |
554 | + errors.append(m) |
555 | with sqlite3.connect(SQLITE_DB_FILENAME) as db: |
556 | oldid = db.execute("select max(ROWID) from messages").fetchone()[0] or 0 |
557 | |
558 | @@ -683,11 +719,13 @@ |
559 | ",".join("?" * len(self.messages.columns))), items) |
560 | |
561 | gobject.idle_add (self.update_indicators, self.unseen_counts) |
562 | - |
563 | - new_items = db.execute(""" |
564 | - select * from (select * from messages where operation == "receive" and ROWID > %s and to_me = 0 ORDER BY time DESC LIMIT 10) as a union |
565 | - select * from (select * from messages where operation IN ("receive","private") and ROWID > %s and to_me != 0 ORDER BY time DESC LIMIT 10) as b |
566 | - ORDER BY time ASC""" % (oldid, oldid)).fetchall() |
567 | + query = """ |
568 | + SELECT * FROM messages WHERE rowid > {0} AND |
569 | + ((operation == "receive" AND to_me = 0) OR |
570 | + (operation IN ("receive", "private") AND to_me != 0)) |
571 | + ORDER BY time ASC LIMIT 10 |
572 | + """.format(oldid) |
573 | + new_items = db.execute(query) |
574 | |
575 | for i in new_items: |
576 | self.new_message(i) |
577 | @@ -706,7 +744,7 @@ |
578 | self.send_error_notify(error[1]) |
579 | |
580 | self.LoadingComplete() |
581 | - log.logger.info("Loading complete: %s - %s", self.refresh_count, [o[0] for o in output]) |
582 | + logger.info("Loading complete: %s - %s", self.refresh_count, output.rowcount if output.rowcount > 0 else 0) |
583 | |
584 | def update_indicators(self, counts): |
585 | total_unseen = 0 |
586 | @@ -722,7 +760,6 @@ |
587 | total_unseen += counts["messages"] |
588 | if self.messages_indicator not in self.indicator_items: |
589 | self.indicator_items["messages"] = self.messages_indicator |
590 | - log.logger.debug("Messages Indicator count updated to %s", counts["messages"]) |
591 | if counts.has_key("replies"): |
592 | if not self.replies_indicator: |
593 | self.replies_indicator = indicate.Indicator() if hasattr(indicate, "Indicator") else indicate.IndicatorMessage() |
594 | @@ -734,7 +771,6 @@ |
595 | total_unseen += counts["replies"] |
596 | if self.replies_indicator not in self.indicator_items: |
597 | self.indicator_items["replies"] = self.replies_indicator |
598 | - log.logger.debug("Replies Indicator count updated to %s", counts["replies"]) |
599 | if counts.has_key("private"): |
600 | if not self.private_indicator: |
601 | self.private_indicator = indicate.Indicator() if hasattr(indicate, "Indicator") else indicate.IndicatorMessage() |
602 | @@ -743,13 +779,11 @@ |
603 | self.private_indicator.set_property("stream", "private") |
604 | self.private_indicator.show() |
605 | self.private_indicator.set_property("count", str(counts["private"])) |
606 | - total_unseen +- counts["private"] |
607 | + total_unseen += counts["private"] |
608 | if counts["private"] > 0: |
609 | self.private_indicator.set_property_bool("draw-attention", True) |
610 | if self.private_indicator not in self.indicator_items: |
611 | self.indicator_items["private"] = self.private_indicator |
612 | - log.logger.debug("Private Messages Indicator count updated to %s", counts["private"]) |
613 | - log.logger.debug ("Total unseen count: %d", total_unseen) |
614 | if Unity and self.launcher: |
615 | self.launcher.set_property("count", total_unseen) |
616 | if total_unseen < 1: |
617 | @@ -767,7 +801,7 @@ |
618 | def on_indicator_server_activate(self, indicator, timestamp=None): |
619 | dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) |
620 | client_bus = dbus.SessionBus() |
621 | - log.logger.debug("Raising gwibber client") |
622 | + logger.debug("Raising gwibber client") |
623 | try: |
624 | self.handle_indicator_counts() |
625 | except: |
626 | @@ -777,7 +811,7 @@ |
627 | def on_indicator_activate(self, indicator, timestamp=None): |
628 | if not indicate: return |
629 | stream = indicator.get_property("stream") |
630 | - log.logger.debug("Raising gwibber client, focusing %s stream", stream) |
631 | + logger.debug("Raising gwibber client, focusing %s stream", stream) |
632 | try: |
633 | self.handle_indicator_counts(stream) |
634 | except: |
635 | @@ -785,10 +819,10 @@ |
636 | self.show_client(stream=stream) |
637 | |
638 | def handle_focus_reply(self, *args): |
639 | - log.logger.debug("Gwibber Client raised") |
640 | + logger.debug("Gwibber Client raised") |
641 | |
642 | def handle_focus_error(self, *args): |
643 | - log.logger.error("Failed to raise client %s", args) |
644 | + logger.error("Failed to raise client %s", args) |
645 | |
646 | def handle_indicator_counts(self, stream=None): |
647 | if indicate: |
648 | @@ -798,8 +832,9 @@ |
649 | self.unseen_counts[s] = 0 |
650 | if s == "private": |
651 | self.private_indicator.set_property_bool("draw-attention", False) |
652 | - self.launcher.set_property("count", 0) |
653 | - self.launcher.set_property("count_visible", False) |
654 | + if self.launcher: |
655 | + self.launcher.set_property("count", 0) |
656 | + self.launcher.set_property("count_visible", False) |
657 | return |
658 | if self.indicator_items.has_key(stream): |
659 | self.indicator_items[stream].set_property("count", str(0)) |
660 | @@ -811,12 +846,13 @@ |
661 | total_unseen = 0 |
662 | for s in self.unseen_counts.keys(): |
663 | total_unseen += self.unseen_counts[s] |
664 | - log.logger.debug ("handle_indicator_counts total_unseen is %d", total_unseen) |
665 | - self.launcher.set_property("count", total_unseen) |
666 | - if total_unseen < 1: |
667 | - self.launcher.set_property("count_visible", False) |
668 | - else: |
669 | - self.launcher.set_property("count_visible", True) |
670 | + logger.debug ("handle_indicator_counts total_unseen is %d", total_unseen) |
671 | + if self.launcher: |
672 | + self.launcher.set_property("count", total_unseen) |
673 | + if total_unseen < 1: |
674 | + self.launcher.set_property("count_visible", False) |
675 | + else: |
676 | + self.launcher.set_property("count_visible", True) |
677 | |
678 | |
679 | def new_search_message(self, data): |
680 | @@ -826,16 +862,13 @@ |
681 | def new_message(self, data): |
682 | message = json.loads(data[-1]) |
683 | if message["transient"]: |
684 | - log.logger.debug("Message %s is transient, not notifying", message["id"]) |
685 | return |
686 | |
687 | if util.can_notify and str(message["mid"]) not in self.notified_items: |
688 | self.notified_items.append(message["mid"]) |
689 | if gsettings.get_boolean("notify-mentions-only") and message["to_me"]: |
690 | - log.logger.debug("%s is a mention and notify_mentions_only is true", message["mid"]) |
691 | gobject.idle_add(self.handle_notify_item, message) |
692 | elif gsettings.get_boolean("show-notifications") and not gsettings.get_boolean("notify-mentions-only"): |
693 | - log.logger.debug("%s - show_notifications is true and notify_mentions_only is false", message["mid"]) |
694 | gobject.idle_add(self.handle_notify_item, message) |
695 | gobject.idle_add(self.cache_avatar, message) |
696 | |
697 | @@ -873,23 +906,24 @@ |
698 | |
699 | def loading_failed(self, exception, tb): |
700 | self.LoadingComplete() |
701 | - log.logger.error("Loading failed: %s - %s", exception, tb) |
702 | + logger.error("Loading failed: %s - %s", exception, tb) |
703 | |
704 | def send(self, operations): |
705 | operations = util.compact(operations) |
706 | if operations: |
707 | self.LoadingStarted() |
708 | - log.logger.debug("*** Sending Message ***") |
709 | + logger.debug("*** Sending Message ***") |
710 | self.perform_async_operation(operations) |
711 | |
712 | def refresh(self, *args): |
713 | + |
714 | if self.refresh_timer_id: |
715 | gobject.source_remove(self.refresh_timer_id) |
716 | |
717 | refresh_interval = gsettings.get_int("interval") |
718 | |
719 | if not self.maintRunning and not self.refreshRunning: |
720 | - log.logger.debug("Refresh interval is set to %s", refresh_interval) |
721 | + logger.debug("Refresh interval is set to %s", refresh_interval) |
722 | operations = [] |
723 | |
724 | for o in self.collector.get_operations(): |
725 | @@ -898,12 +932,12 @@ |
726 | operations.append(o) |
727 | |
728 | if operations: |
729 | - log.logger.debug("** Starting Refresh - %s **", mx.DateTime.now()) |
730 | + logger.debug("** Starting Refresh - %s **", mx.DateTime.now()) |
731 | self.LoadingStarted() |
732 | self.perform_async_operation(operations) |
733 | |
734 | |
735 | - self.refresh_timer_id = gobject.timeout_add_seconds(int(60 * refresh_interval), self.refresh) |
736 | + self.refresh_timer_id = gobject.timeout_add_seconds(int(60 * refresh_interval), self.refresh) |
737 | else: |
738 | self.refresh_timer_id = gobject.timeout_add_seconds(int(30), self.refresh) |
739 | |
740 | @@ -938,15 +972,15 @@ |
741 | |
742 | |
743 | try: |
744 | - log.logger.debug("NM Version is %s", str(self.nm.Get(NM_DBUS_INTERFACE, "Version"))) |
745 | + logger.debug("NM Version is %s", str(self.nm.Get(NM_DBUS_INTERFACE, "Version"))) |
746 | if str(self.nm.Get(NM_DBUS_INTERFACE, "Version")) >= "0.8.998": |
747 | - log.logger.debug("NM Version is greater than 0.8.997") |
748 | + logger.debug("NM Version is greater than 0.8.997") |
749 | self.NM_STATE_ASLEEP = 10 |
750 | self.NM_STATE_DISCONNECTED = 20 |
751 | self.NM_STATE_CONNECTING = 40 |
752 | self.NM_STATE_CONNECTED = 70 |
753 | else: |
754 | - log.logger.debug("NM Version is less than 0.8.998") |
755 | + logger.debug("NM Version is less than 0.8.998") |
756 | self.NM_STATE_ASLEEP = 1 |
757 | self.NM_STATE_CONNECTING = 2 |
758 | self.NM_STATE_CONNECTED = 3 |
759 | @@ -957,13 +991,13 @@ |
760 | |
761 | |
762 | def on_connection_changed(self, state): |
763 | - log.logger.debug("Network state changed, new state is %d", state) |
764 | + logger.debug("Network state changed, new state is %d", state) |
765 | |
766 | if state == self.NM_STATE_CONNECTED: |
767 | - log.logger.info("Network state changed to Online") |
768 | + logger.info("Network state changed to Online") |
769 | self.ConnectionOnline() |
770 | elif state == self.NM_STATE_DISCONNECTED: |
771 | - log.logger.info("Network state changed to Offline") |
772 | + logger.info("Network state changed to Offline") |
773 | self.ConnectionOffline() |
774 | |
775 | @dbus.service.signal("com.Gwibber.Connection") |
776 | @@ -975,7 +1009,7 @@ |
777 | @dbus.service.method("com.Gwibber.Connection") |
778 | def isConnected(self): |
779 | if not self.has_nm: |
780 | - log.logger.info("Can't determine network state, assuming online") |
781 | + logger.info("Can't determine network state, assuming online") |
782 | return True |
783 | try: |
784 | if self.nm.state() == self.NM_STATE_CONNECTED: |
785 | @@ -1006,7 +1040,7 @@ |
786 | """ |
787 | |
788 | service = gsettings.get_string("urlshorter") or "is.gd" |
789 | - log.logger.info("Shortening URL %s with %s", url, service) |
790 | + logger.info("Shortening URL %s with %s", url, service) |
791 | if self.IsShort(url): return url |
792 | try: |
793 | s = urlshorter.PROTOCOLS[service].URLShorter() |
794 | @@ -1053,16 +1087,16 @@ |
795 | sig_failed = uploader.connect_to_signal("UploadFailed", failed) |
796 | uploader.Upload(filepath) |
797 | """ |
798 | - log.logger.info("Uploading image %s", filepath) |
799 | + logger.info("Uploading image %s", filepath) |
800 | url = uploader.upload(filepath, self.UploadComplete, self.UploadFailed) |
801 | |
802 | @dbus.service.signal(dbus_interface="com.Gwibber.Uploader", signature="ss") |
803 | def UploadComplete(self, filepath, public_url): |
804 | - log.logger.info("Image %s uploaded as %s", filepath, public_url) |
805 | + logger.info("Image %s uploaded as %s", filepath, public_url) |
806 | |
807 | @dbus.service.signal(dbus_interface="com.Gwibber.Uploader", signature="ss") |
808 | def UploadFailed(self, filepath, error_message): |
809 | - log.logger.info("Image %s failed to upload with message '%s'", filepath, error_message) |
810 | + logger.info("Image %s failed to upload with message '%s'", filepath, error_message) |
811 | |
812 | class Translate(dbus.service.Object): |
813 | __dbus_object_path__ = "/com/gwibber/Translate" |
814 | |
815 | === modified file 'gwibber/microblog/network.py' |
816 | --- gwibber/microblog/network.py 2011-11-01 13:24:26 +0000 |
817 | +++ gwibber/microblog/network.py 2012-02-10 17:26:15 +0000 |
818 | @@ -1,78 +1,53 @@ |
819 | #!/usr/bin/env python |
820 | |
821 | -import urllib, pycurl, json, StringIO |
822 | -from util import log |
823 | - |
824 | -try: |
825 | - import libproxy |
826 | -except: |
827 | - libproxy = None |
828 | - |
829 | -# Completely disable libproxy support for now, it causes crashes on amd64 |
830 | -libproxy = None |
831 | - |
832 | -class CurlDownloader: |
833 | - def __init__(self, url, params=None, post=False, username=None, password=None, header=None, body=None): |
834 | - self.curl = pycurl.Curl() |
835 | - |
836 | - # store url for logging later |
837 | - self.url = url |
838 | - |
839 | - if header: |
840 | - self.curl.setopt(pycurl.HTTPHEADER, header) |
841 | - |
842 | - if body: |
843 | - self.curl.setopt(pycurl.POST, 1) |
844 | - self.curl.setopt(pycurl.POSTFIELDS, body) |
845 | - |
846 | +import urllib, json |
847 | +import urllib2, base64 |
848 | +import logging |
849 | +logger = logging.getLogger("Network") |
850 | + |
851 | +class UrlLib2Downloader: |
852 | + def __init__(self, url, params=None, post=False, username=None, |
853 | + password=None, header=None, body=None, proxy=None): |
854 | + |
855 | + data = None |
856 | + |
857 | if params: |
858 | if post: |
859 | - self.curl.setopt(pycurl.HTTPPOST, [(x, str(y)) for x,y in params.items()]) |
860 | + data = urllib.urlencode(params) |
861 | else: |
862 | url = "?".join((url, urllib.urlencode(params))) |
863 | - |
864 | - self.curl.setopt(pycurl.URL, str(url)) |
865 | - #log.logger.debug("URL: %s", str(url)) |
866 | - |
867 | + |
868 | + self.url = url |
869 | + |
870 | + if header: |
871 | + req = urllib2.Request(url, data, headers=header) |
872 | + else: |
873 | + req = urllib2.Request(url, data) |
874 | + |
875 | if username and password: |
876 | - self.curl.setopt(pycurl.USERPWD, "%s:%s" % (str(username), str(password))) |
877 | - |
878 | - self.curl.setopt(pycurl.FOLLOWLOCATION, 1) |
879 | - self.curl.setopt(pycurl.MAXREDIRS, 5) |
880 | - self.curl.setopt(pycurl.TIMEOUT, 150) |
881 | - self.curl.setopt(pycurl.HTTP_VERSION, pycurl.CURL_HTTP_VERSION_1_0) |
882 | - |
883 | - self.content = StringIO.StringIO() |
884 | - self.curl.setopt(pycurl.WRITEFUNCTION, self.content.write) |
885 | - |
886 | - if libproxy: |
887 | - proxy_factory = libproxy.ProxyFactory() |
888 | - log.logger.debug("libproxy: getting proxies") |
889 | - proxylist = proxy_factory.getProxies(str(url)) |
890 | - |
891 | - if proxylist: |
892 | - proxy = proxylist[0] |
893 | - if (proxy.find("@") != -1): |
894 | - self.curl.setopt(pycurl.PROXYAUTH, ["CURLAUTH_ANY"]) |
895 | - if (proxy.find("direct://") != 0): |
896 | - log.logger.debug("using proxy %s", proxy) |
897 | - self.curl.setopt(pycurl.PROXY, proxy) |
898 | + base64string = base64.encodestring('%s:%s' % (username, password)).replace('\n', '') |
899 | + req.add_header("Authorization", "Basic %s" % base64string) |
900 | + if proxy and len(proxy) > 1: # If didn't get a list, we're screwed |
901 | + req.set_proxy(proxy[1], proxy[0]) |
902 | |
903 | try: |
904 | - self.curl.perform() |
905 | - except pycurl.error, e: |
906 | - log.logger.error("Network failure for %s - error: %d - %s", self.url.split("?")[0], e[0], e[1]) |
907 | + self.res = urllib2.urlopen(req) |
908 | + except urllib2.HTTPError, e: |
909 | + logger.error("Network failure for %s - error code: %s", e.geturl().split("?")[0], e.getcode()) |
910 | + self.res = "Network error: {0}".format(e.getcode()) |
911 | |
912 | def get_json(self): |
913 | try: |
914 | return json.loads(self.get_string()) |
915 | except ValueError as e: |
916 | - log.logger.debug("Failed to parse the response for %s, error was: %s", self.url.split("?")[0], str(e)) |
917 | + logger.debug("Failed to parse the response for %s, error was: %s", self.url.split("?")[0], str(e)) |
918 | return [] |
919 | |
920 | def get_string(self): |
921 | - return self.content.getvalue() |
922 | - |
923 | -Download = CurlDownloader |
924 | - |
925 | - |
926 | + try: |
927 | + return self.res.read() |
928 | + except AttributeError: |
929 | + return self.res |
930 | + |
931 | + |
932 | +Download = UrlLib2Downloader |
933 | |
934 | === modified file 'gwibber/microblog/plugins/buzz/__init__.py' |
935 | --- gwibber/microblog/plugins/buzz/__init__.py 2011-04-14 18:39:56 +0000 |
936 | +++ gwibber/microblog/plugins/buzz/__init__.py 2012-02-10 17:26:15 +0000 |
937 | @@ -1,8 +1,11 @@ |
938 | from gwibber.microblog import network, util |
939 | -from gwibber.microblog.util import resources |
940 | import json |
941 | from oauth import oauth |
942 | |
943 | +import logging |
944 | +logger = logging.getLogger("Buzz") |
945 | +logger.debug("Initializing.") |
946 | + |
947 | PROTOCOL_INFO = { |
948 | "name": "Buzz", |
949 | "version": "1.0", |
950 | @@ -134,5 +137,3 @@ |
951 | path = "activities/%s/@self/%s/@comments" % (target["sender"]["id"], target["mid"]) |
952 | self._get(path, post=True, single=True, body=text) |
953 | return [] |
954 | - |
955 | - |
956 | |
957 | === modified file 'gwibber/microblog/plugins/digg/__init__.py' |
958 | --- gwibber/microblog/plugins/digg/__init__.py 2010-11-19 23:50:20 +0000 |
959 | +++ gwibber/microblog/plugins/digg/__init__.py 2012-02-10 17:26:15 +0000 |
960 | @@ -1,6 +1,8 @@ |
961 | from gwibber.microblog import network, util |
962 | -from gwibber.microblog.util import resources |
963 | -from gettext import lgettext as _ |
964 | + |
965 | +import logging |
966 | +logger = logging.getLogger("Digg") |
967 | +logger.debug("Initializing.") |
968 | |
969 | PROTOCOL_INFO = { |
970 | "name": "Digg", |
971 | |
972 | === modified file 'gwibber/microblog/plugins/facebook/__init__.py' |
973 | --- gwibber/microblog/plugins/facebook/__init__.py 2011-08-22 16:11:15 +0000 |
974 | +++ gwibber/microblog/plugins/facebook/__init__.py 2012-02-10 17:26:15 +0000 |
975 | @@ -1,7 +1,7 @@ |
976 | #!/usr/bin/env python |
977 | |
978 | from gwibber.microblog import network, util |
979 | -from gwibber.microblog.util import log, resources |
980 | +from gwibber.microblog.util import resources |
981 | import hashlib, mx.DateTime, time |
982 | from os.path import join, getmtime, exists |
983 | from gettext import lgettext as _ |
984 | @@ -13,7 +13,9 @@ |
985 | except: |
986 | pass |
987 | |
988 | -log.logger.name = "Facebook" |
989 | +import logging |
990 | +logger = logging.getLogger("Facebook") |
991 | +logger.debug("Initializing.") |
992 | |
993 | PROTOCOL_INFO = { |
994 | "name": "Facebook", |
995 | @@ -61,7 +63,7 @@ |
996 | def _check_error(self, data): |
997 | if isinstance(data, dict): |
998 | if data.has_key("error"): |
999 | - log.logger.info("Facebook error %s - %s", data["error"]["type"], data["error"]["message"]) |
1000 | + logger.info("Facebook error %s - %s", data["error"]["type"], data["error"]["message"]) |
1001 | return True |
1002 | else: |
1003 | return False |
1004 | @@ -70,7 +72,7 @@ |
1005 | def _get(self, operation, post=False, single=False, **args): |
1006 | if not self.user_id or "access_token" not in self.account: |
1007 | logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("Authentication failed"), "Auth needs updating") |
1008 | - log.logger.error("%s", logstr) |
1009 | + logger.error("%s", logstr) |
1010 | return [{"error": {"type": "auth", "account": self.account, "message": _("Authentication failed, please re-authorize")}}] |
1011 | |
1012 | args.update({ |
1013 | @@ -86,15 +88,15 @@ |
1014 | if isinstance(data, dict) and data.get("error_msg", 0): |
1015 | if "permission" in data["error_msg"]: |
1016 | logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("Authentication failed"), data["error_msg"]) |
1017 | - log.logger.error("%s", logstr) |
1018 | + logger.error("%s", logstr) |
1019 | return [{"error": {"type": "auth", "account": self.account, "message": data["error_msg"]}}] |
1020 | elif data["error_code"] == 102: |
1021 | logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("Session invalid"), data["error_msg"]) |
1022 | - log.logger.error("%s", logstr) |
1023 | + logger.error("%s", logstr) |
1024 | return [{"error": {"type": "auth", "account": self.account, "message": data["error_msg"]}}] |
1025 | else: |
1026 | logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("Unknown failure"), data["error_msg"]) |
1027 | - log.logger.error("%s", logstr) |
1028 | + logger.error("%s", logstr) |
1029 | return [{"error": {"type": "unknown", "account": self.account, "message": data["error_msg"]}}] |
1030 | |
1031 | return data |
1032 | @@ -111,7 +113,7 @@ |
1033 | |
1034 | def _message(self, data): |
1035 | if type(data) != dict: |
1036 | - log.logger.error("Cannot parse message data: %s", str(data)) |
1037 | + logger.error("Cannot parse message data: %s", str(data)) |
1038 | return {} |
1039 | |
1040 | m = {} |
1041 | @@ -211,41 +213,41 @@ |
1042 | |
1043 | data = self._get("me/home") |
1044 | |
1045 | - log.logger.debug("<STATS> facebook:receive account:%s since:%s size:%s", |
1046 | + logger.debug("<STATS> facebook:receive account:%s since:%s size:%s", |
1047 | self.account["id"], mx.DateTime.DateTimeFromTicks(since), len(str(data))) |
1048 | |
1049 | if not self._check_error(data): |
1050 | try: |
1051 | return [self._message(post) for post in data["data"]] |
1052 | except TypeError: |
1053 | - log.logger.error("<facebook:receive> failed to parse message data") |
1054 | + logger.error("<facebook:receive> failed to parse message data") |
1055 | return data |
1056 | else: return |
1057 | |
1058 | def delete(self, message): |
1059 | result = self._get("stream.remove", post_id=message["mid"]) |
1060 | if not result: |
1061 | - log.logger.error("<facebook:delete> failed") |
1062 | + logger.error("<facebook:delete> failed") |
1063 | return |
1064 | return [] |
1065 | |
1066 | def like(self, message): |
1067 | result = self._get(message["mid"] + "/likes", post=True) |
1068 | if not result: |
1069 | - log.logger.error("<facebook:like> failed") |
1070 | + logger.error("<facebook:like> failed") |
1071 | return |
1072 | return [] |
1073 | |
1074 | def send(self, message): |
1075 | result = self._get("me/feed", message=message, status_includes_verb=True, post=True) |
1076 | if not result: |
1077 | - log.logger.error("<facebook:send> failed") |
1078 | + logger.error("<facebook:send> failed") |
1079 | return |
1080 | return [] |
1081 | |
1082 | def send_thread(self, message, target): |
1083 | result = self._get(target["mid"] + "/comments", message=message, post=True) |
1084 | if not result: |
1085 | - log.logger.error("<facebook:send_thread> failed") |
1086 | + logger.error("<facebook:send_thread> failed") |
1087 | return |
1088 | return [] |
1089 | |
1090 | === modified file 'gwibber/microblog/plugins/flickr/__init__.py' |
1091 | --- gwibber/microblog/plugins/flickr/__init__.py 2011-03-09 19:49:53 +0000 |
1092 | +++ gwibber/microblog/plugins/flickr/__init__.py 2012-02-10 17:26:15 +0000 |
1093 | @@ -1,5 +1,9 @@ |
1094 | from gwibber.microblog import network, util |
1095 | -from gwibber.microblog.util import log, resources |
1096 | +from gwibber.microblog.util import resources |
1097 | + |
1098 | +import logging |
1099 | +logger = logging.getLogger("Flickr") |
1100 | +logger.debug("Initializing.") |
1101 | |
1102 | import re, mx.DateTime |
1103 | from gettext import lgettext as _ |
1104 | @@ -99,5 +103,5 @@ |
1105 | extras="date_upload,owner_name,icon_server") |
1106 | else: |
1107 | logstr = """%s: %s""" % (PROTOCOL_INFO["name"], _("Failed to find account")) |
1108 | - log.logger.error("%s", logstr) |
1109 | + logger.error("%s", logstr) |
1110 | return [{"error": {"type": "auth", "account": self.account, "message": _("Failed to find account")}}] |
1111 | |
1112 | === modified file 'gwibber/microblog/plugins/foursquare/__init__.py' |
1113 | --- gwibber/microblog/plugins/foursquare/__init__.py 2011-09-21 20:02:43 +0000 |
1114 | +++ gwibber/microblog/plugins/foursquare/__init__.py 2012-02-10 17:26:15 +0000 |
1115 | @@ -1,8 +1,11 @@ |
1116 | from gwibber.microblog import network, util |
1117 | -from gwibber.microblog.util import log, resources |
1118 | +from gwibber.microblog.util import resources |
1119 | import urllib, urllib2, json, htmllib, re |
1120 | from gettext import lgettext as _ |
1121 | -log.logger.name = "Foursquare" |
1122 | + |
1123 | +import logging |
1124 | +logger = logging.getLogger("FourSquare") |
1125 | +logger.debug("Initializing.") |
1126 | |
1127 | PROTOCOL_INFO = { |
1128 | "name": "Foursquare", |
1129 | @@ -124,19 +127,22 @@ |
1130 | if isinstance(data, dict) and "recent" in data: |
1131 | return True |
1132 | else: |
1133 | - log.logger.error("Foursquare error %s", data) |
1134 | + logger.error("Foursquare error %s", data) |
1135 | return False |
1136 | |
1137 | def _get(self, path, parse="message", post=False, single=False, **args): |
1138 | url = "/".join((URL_PREFIX, path)) |
1139 | |
1140 | url = url + "?oauth_token=" + self.token |
1141 | + # |
1142 | + # TODO: Proxy support. Where's Downloader? |
1143 | + # |
1144 | data = json.load(urllib2.urlopen(url))["response"] |
1145 | |
1146 | if isinstance(data, dict) and data.get("errors", 0): |
1147 | if "authenticate" in data["errors"][0]["message"]: |
1148 | logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("Authentication failed"), error["message"]) |
1149 | - log.logger.error("%s", logstr) |
1150 | + logger.error("%s", logstr) |
1151 | return [{"error": {"type": "auth", "account": self.account, "message": data["errors"][0]["message"]}}] |
1152 | else: |
1153 | for error in data["errors"]: |
1154 | @@ -145,11 +151,11 @@ |
1155 | elif isinstance(data, dict) and data.get("error", 0): |
1156 | if "Incorrect signature" in data["error"]: |
1157 | logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("Request failed"), data["error"]) |
1158 | - log.logger.error("%s", logstr) |
1159 | + logger.error("%s", logstr) |
1160 | return [{"error": {"type": "auth", "account": self.account, "message": data["error"]}}] |
1161 | elif isinstance(data, str): |
1162 | logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("Request failed"), data) |
1163 | - log.logger.error("%s", logstr) |
1164 | + logger.error("%s", logstr) |
1165 | return [{"error": {"type": "request", "account": self.account, "message": data}}] |
1166 | if not self._check_error(data): |
1167 | return [] |
1168 | |
1169 | === modified file 'gwibber/microblog/plugins/friendfeed/__init__.py' |
1170 | --- gwibber/microblog/plugins/friendfeed/__init__.py 2010-11-19 23:50:20 +0000 |
1171 | +++ gwibber/microblog/plugins/friendfeed/__init__.py 2012-02-10 17:26:15 +0000 |
1172 | @@ -1,6 +1,9 @@ |
1173 | from gwibber.microblog import network, util |
1174 | -from gwibber.microblog.util import log, resources |
1175 | -log.logger.name = "FriendFeed" |
1176 | +from gwibber.microblog.util import resources |
1177 | + |
1178 | +import logging |
1179 | +logger = logging.getLogger("FriendFeed") |
1180 | +logger.debug("Initializing.") |
1181 | |
1182 | |
1183 | PROTOCOL_INFO = { |
1184 | |
1185 | === modified file 'gwibber/microblog/plugins/identica/__init__.py' |
1186 | --- gwibber/microblog/plugins/identica/__init__.py 2011-11-01 12:38:36 +0000 |
1187 | +++ gwibber/microblog/plugins/identica/__init__.py 2012-02-10 17:26:15 +0000 |
1188 | @@ -1,10 +1,11 @@ |
1189 | -import re |
1190 | from gwibber.microblog import network, util |
1191 | -import gnomekeyring |
1192 | from oauth import oauth |
1193 | -from gwibber.microblog.util import log, resources |
1194 | +from gwibber.microblog.util import resources |
1195 | from gettext import lgettext as _ |
1196 | -log.logger.name = "Identi.ca" |
1197 | + |
1198 | +import logging |
1199 | +logger = logging.getLogger("Identica") |
1200 | +logger.debug("Initializing.") |
1201 | |
1202 | PROTOCOL_INFO = { |
1203 | "name": "Identi.ca", |
1204 | @@ -51,6 +52,11 @@ |
1205 | ], |
1206 | } |
1207 | |
1208 | +# |
1209 | +# TODO: There is self.url_prefix and no global URL_PREFIX? |
1210 | +# |
1211 | +URL_PREFIX = "https://identi.ca" |
1212 | + |
1213 | class Client: |
1214 | def __init__(self, acct): |
1215 | self.url_prefix = "https://identi.ca" |
1216 | @@ -93,7 +99,7 @@ |
1217 | m["images"] = images |
1218 | m["type"] = "photo" |
1219 | except: |
1220 | - log.logger.error("%s failure - %s", PROTOCOL_INFO["name"], data) |
1221 | + logger.error("%s failure - %s", PROTOCOL_INFO["name"], data) |
1222 | |
1223 | return m |
1224 | |
1225 | @@ -117,7 +123,7 @@ |
1226 | |
1227 | def _message(self, data): |
1228 | if type(data) != dict: |
1229 | - log.logger.error("Cannot parse message data: %s", str(data)) |
1230 | + logger.error("Cannot parse message data: %s", str(data)) |
1231 | return {} |
1232 | |
1233 | n = {} |
1234 | @@ -208,7 +214,7 @@ |
1235 | |
1236 | def _get(self, path, parse="message", post=False, single=False, **args): |
1237 | if not self.account.has_key("access_token") and not self.account.has_key("secret_token"): |
1238 | - log.logger.error("%s unexpected result - %s", PROTOCOL_INFO["name"], _("Account needs to be re-authorized")) |
1239 | + logger.error("%s unexpected result - %s", PROTOCOL_INFO["name"], _("Account needs to be re-authorized")) |
1240 | return [{"error": {"type": "auth", "account": self.account, "message": _("Account needs to be re-authorized")}}] |
1241 | |
1242 | self.sigmethod = oauth.OAuthSignatureMethod_HMAC_SHA1() |
1243 | @@ -229,19 +235,19 @@ |
1244 | resources.dump(self.account["service"], self.account["id"], data) |
1245 | |
1246 | if isinstance(data, dict) and data.get("error", 0): |
1247 | - log.logger.error("%s failure - %s", PROTOCOL_INFO["name"], data["error"]) |
1248 | + logger.error("%s failure - %s", PROTOCOL_INFO["name"], data["error"]) |
1249 | if "authenticate" in data["error"]: |
1250 | return [{"error": {"type": "auth", "account": self.account, "message": data["error"]}}] |
1251 | else: |
1252 | return [{"error": {"type": "unknown", "account": self.account, "message": data["error"]}}] |
1253 | elif isinstance(data, str): |
1254 | - log.logger.error("%s unexpected result - %s", PROTOCOL_INFO["name"], data) |
1255 | + logger.error("%s unexpected result - %s", PROTOCOL_INFO["name"], data) |
1256 | return [{"error": {"type": "unknown", "account": self.account, "message": data}}] |
1257 | |
1258 | if parse == "follow" or parse == "unfollow": |
1259 | if isinstance(data, dict) and data.get("error", 0): |
1260 | logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("%s failed" % parse), data["error"]) |
1261 | - log.logger.error("%s", logstr) |
1262 | + logger.error("%s", logstr) |
1263 | return [{"error": {"type": "auth", "account": self.account, "message": data["error"]}}] |
1264 | else: |
1265 | return [["friendships", {"type": parse, "account": self.account["id"], "service": self.account["service"],"user_id": data["id"], "nick": data["screen_name"]}]] |
1266 | @@ -260,7 +266,7 @@ |
1267 | data = data.get_json() |
1268 | |
1269 | if type(data) != dict: |
1270 | - log.logger.error("Cannot parse search data: %s", str(data)) |
1271 | + logger.error("Cannot parse search data: %s", str(data)) |
1272 | return [] |
1273 | |
1274 | return [self._result(m) for m in data["results"]] |
1275 | |
1276 | === modified file 'gwibber/microblog/plugins/pingfm/__init__.py' |
1277 | --- gwibber/microblog/plugins/pingfm/__init__.py 2010-10-18 20:24:12 +0000 |
1278 | +++ gwibber/microblog/plugins/pingfm/__init__.py 2012-02-10 17:26:15 +0000 |
1279 | @@ -8,6 +8,10 @@ |
1280 | |
1281 | from gwibber.microblog import network, util |
1282 | |
1283 | +import logging |
1284 | +logger = logging.getLogger("Ping.FM") |
1285 | +logger.debug("Initializing.") |
1286 | + |
1287 | PROTOCOL_INFO = { |
1288 | "name": "Ping.fm", |
1289 | "version": 0.1, |
1290 | |
1291 | === modified file 'gwibber/microblog/plugins/qaiku/__init__.py' |
1292 | --- gwibber/microblog/plugins/qaiku/__init__.py 2010-11-19 23:50:20 +0000 |
1293 | +++ gwibber/microblog/plugins/qaiku/__init__.py 2012-02-10 17:26:15 +0000 |
1294 | @@ -1,7 +1,9 @@ |
1295 | from gwibber.microblog import network, util |
1296 | from gwibber.microblog.util import resources |
1297 | -from gettext import lgettext as _ |
1298 | |
1299 | +import logging |
1300 | +logger = logging.getLogger("Qaiku") |
1301 | +logger.debug("Initializing.") |
1302 | PROTOCOL_INFO = { |
1303 | "name": "Qaiku", |
1304 | "version": "1.0", |
1305 | |
1306 | === modified file 'gwibber/microblog/plugins/statusnet/__init__.py' |
1307 | --- gwibber/microblog/plugins/statusnet/__init__.py 2012-02-02 22:35:38 +0000 |
1308 | +++ gwibber/microblog/plugins/statusnet/__init__.py 2012-02-10 17:26:15 +0000 |
1309 | @@ -1,10 +1,12 @@ |
1310 | import re |
1311 | from gwibber.microblog import network, util |
1312 | -import gnomekeyring |
1313 | from oauth import oauth |
1314 | -from gwibber.microblog.util import log, resources |
1315 | +from gwibber.microblog.util import resources |
1316 | from gettext import lgettext as _ |
1317 | -log.logger.name = "StatusNet" |
1318 | + |
1319 | +import logging |
1320 | +logger = logging.getLogger("StatusNet") |
1321 | +logger.debug("Initializing.") |
1322 | |
1323 | PROTOCOL_INFO = { |
1324 | "name": "StatusNet", |
1325 | @@ -97,7 +99,7 @@ |
1326 | m["images"] = images |
1327 | m["type"] = "photo" |
1328 | except: |
1329 | - log.logger.error("%s failure - %s", PROTOCOL_INFO["name"], data) |
1330 | + logger.error("%s failure - %s", PROTOCOL_INFO["name"], data) |
1331 | |
1332 | return m |
1333 | |
1334 | @@ -121,7 +123,7 @@ |
1335 | |
1336 | def _message(self, data): |
1337 | if type(data) != dict: |
1338 | - log.logger.error("Cannot parse message data: %s", str(data)) |
1339 | + logger.error("Cannot parse message data: %s", str(data)) |
1340 | return {} |
1341 | |
1342 | n = {} |
1343 | @@ -211,7 +213,7 @@ |
1344 | |
1345 | def _get(self, path, parse="message", post=False, single=False, **args): |
1346 | if not self.account.has_key("access_token") and not self.account.has_key("secret_token"): |
1347 | - log.logger.error("%s unexpected result - %s", PROTOCOL_INFO["name"], _("Account needs to be re-authorized")) |
1348 | + logger.error("%s unexpected result - %s", PROTOCOL_INFO["name"], _("Account needs to be re-authorized")) |
1349 | return [{"error": {"type": "auth", "account": self.account, "message": _("Account needs to be re-authorized")}}] |
1350 | |
1351 | url = "/".join((self.account["url_prefix"], "api", path)) |
1352 | @@ -233,19 +235,19 @@ |
1353 | resources.dump(self.account["service"], self.account["id"], data) |
1354 | |
1355 | if isinstance(data, dict) and data.get("error", 0): |
1356 | - log.logger.error("%s failure - %s", PROTOCOL_INFO["name"], data["error"]) |
1357 | + logger.error("%s failure - %s", PROTOCOL_INFO["name"], data["error"]) |
1358 | if "authenticate" in data["error"]: |
1359 | return [{"error": {"type": "auth", "account": self.account, "message": data["error"]}}] |
1360 | else: |
1361 | return [{"error": {"type": "unknown", "account": self.account, "message": data["error"]}}] |
1362 | elif isinstance(data, str): |
1363 | - log.logger.error("%s unexpected result - %s", PROTOCOL_INFO["name"], data) |
1364 | + logger.error("%s unexpected result - %s", PROTOCOL_INFO["name"], data) |
1365 | return [{"error": {"type": "unknown", "account": self.account, "message": data}}] |
1366 | |
1367 | if parse == "follow" or parse == "unfollow": |
1368 | if isinstance(data, dict) and data.get("error", 0): |
1369 | logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("%s failed" % parse), data["error"]) |
1370 | - log.logger.error("%s", logstr) |
1371 | + logger.error("%s", logstr) |
1372 | return [{"error": {"type": "auth", "account": self.account, "message": data["error"]}}] |
1373 | else: |
1374 | return [["friendships", {"type": parse, "account": self.account["id"], "service": self.account["service"],"user_id": data["id"], "nick": data["screen_name"]}]] |
1375 | @@ -264,7 +266,7 @@ |
1376 | data = data.get_json() |
1377 | |
1378 | if type(data) != dict: |
1379 | - log.logger.error("Cannot parse search data: %s", str(data)) |
1380 | + logger.error("Cannot parse search data: %s", str(data)) |
1381 | return [] |
1382 | |
1383 | return [self._result(m) for m in data["results"]] |
1384 | |
1385 | === modified file 'gwibber/microblog/plugins/twitter/__init__.py' |
1386 | --- gwibber/microblog/plugins/twitter/__init__.py 2011-11-01 12:38:36 +0000 |
1387 | +++ gwibber/microblog/plugins/twitter/__init__.py 2012-02-10 17:26:15 +0000 |
1388 | @@ -1,10 +1,11 @@ |
1389 | from gwibber.microblog import network, util |
1390 | -import re |
1391 | -import gnomekeyring |
1392 | from oauth import oauth |
1393 | -from gwibber.microblog.util import log, resources |
1394 | +from gwibber.microblog.util import resources |
1395 | from gettext import lgettext as _ |
1396 | -log.logger.name = "Twitter" |
1397 | + |
1398 | +import logging |
1399 | +logger = logging.getLogger("Twitter") |
1400 | +logger.debug("Initializing.") |
1401 | |
1402 | PROTOCOL_INFO = { |
1403 | "name": "Twitter", |
1404 | @@ -57,7 +58,7 @@ |
1405 | URL_PREFIX = "https://twitter.com" |
1406 | API_PREFIX = "https://api.twitter.com/1" |
1407 | |
1408 | -class Client: |
1409 | +class Client (): |
1410 | def __init__(self, acct): |
1411 | self.service = util.getbus("Service") |
1412 | if acct.has_key("secret_token") and acct.has_key("password"): acct.pop("password") |
1413 | @@ -71,7 +72,7 @@ |
1414 | self.token = oauth.OAuthToken(acct["access_token"], acct["secret_token"]) |
1415 | |
1416 | def _common(self, data): |
1417 | - m = {}; |
1418 | + m = {} |
1419 | try: |
1420 | |
1421 | m["mid"] = str(data["id"]) |
1422 | @@ -97,7 +98,7 @@ |
1423 | m["images"] = images |
1424 | m["type"] = "photo" |
1425 | except: |
1426 | - log.logger.error("%s failure - %s", PROTOCOL_INFO["name"], data) |
1427 | + logger.error("%s failure - %s", PROTOCOL_INFO["name"], data) |
1428 | return {} |
1429 | |
1430 | return m |
1431 | @@ -122,7 +123,7 @@ |
1432 | |
1433 | def _message(self, data): |
1434 | if type(data) != dict: |
1435 | - log.logger.error("Cannot parse message data: %s", str(data)) |
1436 | + logger.error("Cannot parse message data: %s", str(data)) |
1437 | return {} |
1438 | |
1439 | n = {} |
1440 | @@ -247,18 +248,18 @@ |
1441 | request = oauth.OAuthRequest.from_consumer_and_token(self.consumer, self.token, |
1442 | http_method=post and "POST" or "GET", http_url=url, parameters=util.compact(args)) |
1443 | request.sign_request(self.sigmethod, self.consumer, self.token) |
1444 | - |
1445 | + |
1446 | if post: |
1447 | - data = network.Download(request.to_url(), util.compact(args), post).get_json() |
1448 | + headers = request.to_header() |
1449 | + data = network.Download(url, util.compact(args), post, header=headers).get_json() |
1450 | else: |
1451 | data = network.Download(request.to_url(), None, post).get_json() |
1452 | - |
1453 | resources.dump(self.account["service"], self.account["id"], data) |
1454 | |
1455 | if isinstance(data, dict) and data.get("errors", 0): |
1456 | if "authenticate" in data["errors"][0]["message"]: |
1457 | - logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("Authentication failed"), error["message"]) |
1458 | - log.logger.error("%s", logstr) |
1459 | + logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("Authentication failed"), data["errors"][0]["message"]) |
1460 | + logger.error("%s", logstr) |
1461 | return [{"error": {"type": "auth", "account": self.account, "message": data["errors"][0]["message"]}}] |
1462 | else: |
1463 | for error in data["errors"]: |
1464 | @@ -267,17 +268,17 @@ |
1465 | elif isinstance(data, dict) and data.get("error", 0): |
1466 | if "Incorrect signature" in data["error"]: |
1467 | logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("Request failed"), data["error"]) |
1468 | - log.logger.error("%s", logstr) |
1469 | + logger.error("%s", logstr) |
1470 | return [{"error": {"type": "auth", "account": self.account, "message": data["error"]}}] |
1471 | elif isinstance(data, str): |
1472 | logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("Request failed"), data) |
1473 | - log.logger.error("%s", logstr) |
1474 | + logger.error("%s", logstr) |
1475 | return [{"error": {"type": "request", "account": self.account, "message": data}}] |
1476 | |
1477 | if parse == "follow" or parse == "unfollow": |
1478 | if isinstance(data, dict) and data.get("error", 0): |
1479 | logstr = """%s: %s - %s""" % (PROTOCOL_INFO["name"], _("%s failed" % parse), data["error"]) |
1480 | - log.logger.error("%s", logstr) |
1481 | + logger.error("%s", logstr) |
1482 | return [{"error": {"type": "auth", "account": self.account, "message": data["error"]}}] |
1483 | else: |
1484 | return [["friendships", {"type": parse, "account": self.account["id"], "service": self.account["service"],"user_id": data["id"], "nick": data["screen_name"]}]] |
1485 | @@ -297,7 +298,7 @@ |
1486 | data = data.get_json()["results"] |
1487 | |
1488 | if type(data) != list: |
1489 | - log.logger.error("Cannot parse search data: %s", str(data)) |
1490 | + logger.error("Cannot parse search data: %s", str(data)) |
1491 | return [] |
1492 | |
1493 | return [self._result(m) for m in data] |
1494 | |
1495 | === modified file 'gwibber/microblog/storage.py' |
1496 | --- gwibber/microblog/storage.py 2011-10-23 04:11:05 +0000 |
1497 | +++ gwibber/microblog/storage.py 2012-02-10 17:26:15 +0000 |
1498 | @@ -3,11 +3,13 @@ |
1499 | import json, sqlite3, uuid |
1500 | import gtk, gobject, dbus, dbus.service |
1501 | import util, util.keyring, atexit |
1502 | -from util import log |
1503 | from dbus.mainloop.glib import DBusGMainLoop |
1504 | |
1505 | DBusGMainLoop(set_as_default=True) |
1506 | |
1507 | +import logging |
1508 | +logger = logging.getLogger("Storage") |
1509 | + |
1510 | class MessageManager(dbus.service.Object): |
1511 | __dbus_object_path__ = "/com/gwibber/Messages" |
1512 | |
1513 | @@ -38,7 +40,7 @@ |
1514 | if not self.db.execute("PRAGMA table_info(messages)").fetchall(): |
1515 | self.setup_table() |
1516 | |
1517 | - log.logger.debug("Creating indexes") |
1518 | + logger.debug("Creating indexes") |
1519 | self.db.execute("create index IF NOT EXISTS idx2 on messages (stream, time, transient)") |
1520 | |
1521 | def setup_table(self): |
1522 | @@ -49,20 +51,20 @@ |
1523 | self.db.execute("create index IF NOT EXISTS idx2 on messages (stream, time, transient)") |
1524 | |
1525 | def maintenance(self): |
1526 | - log.logger.info("Cleaning up database...") |
1527 | + logger.info("Cleaning up database...") |
1528 | accounts = self.db.execute("SELECT distinct(account) FROM messages").fetchall() |
1529 | for acct in accounts: |
1530 | try: |
1531 | if not self.db.execute("SELECT count(id) FROM accounts WHERE id = '%s'" % acct[0]).fetchone()[0]: |
1532 | - log.logger.info("DB Maintenance: Found data for an unknown account %s, removing", acct[0]) |
1533 | + logger.info("DB Maintenance: Found data for an unknown account %s, removing", acct[0]) |
1534 | self.db.execute("DELETE FROM messages WHERE account = '%s'" % acct[0]) |
1535 | except: |
1536 | pass |
1537 | try: |
1538 | count = self.db.execute("SELECT count(data) FROM messages WHERE operation = 'receive' AND stream = 'messages' AND account = '%s'" % acct[0]).fetchone()[0] |
1539 | - log.logger.info("Found %d records in the messages stream for account %s", count, acct[0]) |
1540 | + logger.info("Found %d records in the messages stream for account %s", count, acct[0]) |
1541 | if count > 2000: |
1542 | - log.logger.info("Purging old data for %s", acct[0]) |
1543 | + logger.info("Purging old data for %s", acct[0]) |
1544 | self.db.execute("DELETE FROM messages WHERE account = ? AND operation = 'receive' AND stream = 'messages' AND time IN (SELECT CAST (time AS int) FROM (SELECT time FROM messages WHERE account = ? AND operation = 'receive' AND stream = 'messages' AND time != 0 ORDER BY time ASC LIMIT (SELECT COUNT(time) FROM messages WHERE operation = 'receive' AND stream = 'messages' AND account = ? AND time != 0) - 2000) ORDER BY time ASC)", (acct[0],acct[0],acct[0])) |
1545 | except: |
1546 | pass |
1547 | @@ -103,15 +105,15 @@ |
1548 | |
1549 | @dbus.service.signal("com.Gwibber.Searches", signature="s") |
1550 | def Updated(self, data): |
1551 | - log.logger.debug("Search Changed: %s", data) |
1552 | + logger.debug("Search Changed: %s", data) |
1553 | |
1554 | @dbus.service.signal("com.Gwibber.Searches", signature="s") |
1555 | def Deleted(self, data): |
1556 | - log.logger.debug("Search Deleted: %s", data) |
1557 | + logger.debug("Search Deleted: %s", data) |
1558 | |
1559 | @dbus.service.signal("com.Gwibber.Searches", signature="s") |
1560 | def Created(self, data): |
1561 | - log.logger.debug("Search Created: %s", data) |
1562 | + logger.debug("Search Created: %s", data) |
1563 | |
1564 | @dbus.service.method("com.Gwibber.Searches", in_signature="s", out_signature="s") |
1565 | def Get(self, id): |
1566 | @@ -169,15 +171,15 @@ |
1567 | |
1568 | @dbus.service.signal("com.Gwibber.Streams", signature="s") |
1569 | def Updated(self, data): |
1570 | - log.logger.debug("Stream Changed: %s", data) |
1571 | + logger.debug("Stream Changed: %s", data) |
1572 | |
1573 | @dbus.service.signal("com.Gwibber.Streams", signature="s") |
1574 | def Deleted(self, data): |
1575 | - log.logger.debug("Stream Deleted: %s", data) |
1576 | + logger.debug("Stream Deleted: %s", data) |
1577 | |
1578 | @dbus.service.signal("com.Gwibber.Streams", signature="s") |
1579 | def Created(self, data): |
1580 | - log.logger.debug("Stream Created: %s", data) |
1581 | + logger.debug("Stream Created: %s", data) |
1582 | |
1583 | @dbus.service.method("com.Gwibber.Streams", in_signature="s", out_signature="s") |
1584 | def Get(self, id): |
1585 | @@ -284,7 +286,7 @@ |
1586 | self.update_password_cache(acct) |
1587 | |
1588 | def unlock_password_cache(self): |
1589 | - log.logger.debug("Unlocking password cache!") |
1590 | + logger.debug("Unlocking password cache!") |
1591 | for id in self.passwords: |
1592 | util.keyring.munlock(self.passwords[id]) |
1593 | |
1594 | @@ -296,15 +298,15 @@ |
1595 | |
1596 | @dbus.service.signal("com.Gwibber.Accounts", signature="s") |
1597 | def Updated(self, data): |
1598 | - log.logger.info("Account Changed: %s", data) |
1599 | + logger.info("Account Changed: %s", data) |
1600 | |
1601 | @dbus.service.signal("com.Gwibber.Accounts", signature="s") |
1602 | def Deleted(self, data): |
1603 | - log.logger.debug("Account Deleted: %s", data) |
1604 | + logger.debug("Account Deleted: %s", data) |
1605 | |
1606 | @dbus.service.signal("com.Gwibber.Accounts", signature="s") |
1607 | def Created(self, data): |
1608 | - log.logger.debug("Account Created: %s", data) |
1609 | + logger.debug("Account Created: %s", data) |
1610 | |
1611 | @dbus.service.method("com.Gwibber.Accounts", in_signature="s", out_signature="s") |
1612 | def Get(self, id): |
1613 | |
1614 | === modified file 'gwibber/microblog/util/__init__.py' |
1615 | --- gwibber/microblog/util/__init__.py 2011-09-19 22:23:09 +0000 |
1616 | +++ gwibber/microblog/util/__init__.py 2012-02-10 17:26:15 +0000 |
1617 | @@ -1,10 +1,11 @@ |
1618 | - |
1619 | import os, locale, re, mx.DateTime, cgi |
1620 | -import log, resources |
1621 | +import resources |
1622 | import dbus |
1623 | from const import * |
1624 | from htmlentitydefs import name2codepoint |
1625 | |
1626 | +import logging |
1627 | +logger = logging.getLogger("Microblog Util") |
1628 | |
1629 | # Try to import * from custom, install custom.py to include packaging |
1630 | # customizations like distro API keys, etc |
1631 | @@ -179,7 +180,7 @@ |
1632 | pixbuf = gtk.gdk.pixbuf_new_from_file_at_size(icon, iconsize, iconsize) |
1633 | notification.set_icon_from_pixbuf(pixbuf) |
1634 | except glib.GError as e: |
1635 | - log.logger.error("Avatar failure - %s - %s", icon, e.message) |
1636 | + logger.error("Avatar failure - %s - %s", icon, e.message) |
1637 | resources.del_avatar(icon) |
1638 | |
1639 | if timeout: |
1640 | @@ -191,7 +192,7 @@ |
1641 | try: |
1642 | notification.show () |
1643 | except: |
1644 | - log.logger.error("Notification failed") |
1645 | + logger.error("Notification failed") |
1646 | return |
1647 | |
1648 | can_notify = True |
1649 | |
1650 | === modified file 'gwibber/microblog/util/couchmigrate.py' |
1651 | --- gwibber/microblog/util/couchmigrate.py 2010-12-10 05:12:11 +0000 |
1652 | +++ gwibber/microblog/util/couchmigrate.py 2012-02-10 17:26:15 +0000 |
1653 | @@ -7,9 +7,12 @@ |
1654 | CouchDatabase = None |
1655 | |
1656 | from const import SQLITE_DB_FILENAME |
1657 | -import sqlite3, uuid, json, log |
1658 | +import sqlite3, uuid, json |
1659 | import resources |
1660 | |
1661 | +import logging |
1662 | +logger = logging.getLogger("Couch Migrate") |
1663 | + |
1664 | COUCH_DB_ACCOUNTS = "gwibber_accounts" |
1665 | |
1666 | # Dynamically build a list of available service plugins |
1667 | @@ -30,12 +33,12 @@ |
1668 | accounts = CouchDatabase(COUCH_DB_ACCOUNTS, create=False) |
1669 | records = accounts.get_records() |
1670 | except NoSuchDatabase: |
1671 | - log.logger.info("Nothing to migrate from desktopcouch") |
1672 | + logger.info("Nothing to migrate from desktopcouch") |
1673 | return |
1674 | |
1675 | migrate = {} |
1676 | |
1677 | - log.logger.info("Looking for accounts to migrate from desktopcouch to sqlite") |
1678 | + logger.info("Looking for accounts to migrate from desktopcouch to sqlite") |
1679 | |
1680 | for record in records: |
1681 | id = str(record["value"]["protocol"] + "-" + record["value"]["username"]) |
1682 | @@ -52,7 +55,7 @@ |
1683 | for param in SERVICES[record["value"]["protocol"]]["config"]: |
1684 | key = param.replace('private:','') |
1685 | new_account[key] = record["value"][key] |
1686 | - log.logger.info("Found account %s - %s that needs to be migrated", new_account["service"], new_account["username"]) |
1687 | + logger.info("Found account %s - %s that needs to be migrated", new_account["service"], new_account["username"]) |
1688 | self.Create(json.dumps(new_account)) |
1689 | |
1690 | def Create(self, account): |
1691 | |
1692 | === modified file 'gwibber/microblog/util/exceptions.py' |
1693 | --- gwibber/microblog/util/exceptions.py 2010-10-06 23:33:22 +0000 |
1694 | +++ gwibber/microblog/util/exceptions.py 2012-02-10 17:26:15 +0000 |
1695 | @@ -1,7 +1,9 @@ |
1696 | -import log |
1697 | import os, subprocess |
1698 | import xdg, time |
1699 | |
1700 | +import logging |
1701 | +logger = logging.getLogger("Exceptions") |
1702 | + |
1703 | import gettext |
1704 | from gettext import lgettext as _ |
1705 | if hasattr(gettext, 'bind_textdomain_codeset'): |
1706 | @@ -23,15 +25,15 @@ |
1707 | """ |
1708 | def __init__(self, kind="UNKNOWN", service="UNKNOWN", username="UNKNOWN", message="UNKNOWN"): |
1709 | if kind == "keyring" or kind == "auth": |
1710 | - log.logger.error("Failed to find credentials in the keyring") |
1711 | + logger.error("Failed to find credentials in the keyring") |
1712 | accounts_error = os.path.join(xdg.BaseDirectory.xdg_cache_home, "gwibber", ".accounts_error") |
1713 | if os.path.exists(accounts_error) and os.path.getmtime(accounts_error) > time.time()-600: |
1714 | - log.logger.info("gwibber-accounts was raised less than 600 seconds") |
1715 | + logger.info("gwibber-accounts was raised less than 600 seconds") |
1716 | return |
1717 | else: |
1718 | open(accounts_error, 'w').close() |
1719 | else: |
1720 | - log.logger.error("%s failure: %s:%s - %s", kind, service, username, message) |
1721 | + logger.error("%s failure: %s:%s - %s", kind, service, username, message) |
1722 | |
1723 | display_message = _("There was an %(kind)s failure from %(service)s for account %(account)s, error was %(error)s") % { |
1724 | "kind": kind, |
1725 | |
1726 | === modified file 'gwibber/microblog/util/keyring.py' |
1727 | --- gwibber/microblog/util/keyring.py 2010-10-22 16:59:03 +0000 |
1728 | +++ gwibber/microblog/util/keyring.py 2012-02-10 17:26:15 +0000 |
1729 | @@ -8,11 +8,10 @@ |
1730 | value = "" |
1731 | try: |
1732 | value = gnomekeyring.find_items_sync( |
1733 | - gnomekeyring.ITEM_GENERIC_SECRET, |
1734 | - {"id": str(id)})[0].secret |
1735 | + gnomekeyring.ITEM_GENERIC_SECRET, |
1736 | + {"id": str(id)})[0].secret |
1737 | mlock(value) |
1738 | except gnomekeyring.NoMatchError: |
1739 | - print id |
1740 | raise exceptions.GwibberServiceError("keyring") |
1741 | |
1742 | return value |
1743 | |
1744 | === modified file 'gwibber/microblog/util/log.py' |
1745 | --- gwibber/microblog/util/log.py 2010-10-19 03:09:16 +0000 |
1746 | +++ gwibber/microblog/util/log.py 2012-02-10 17:26:15 +0000 |
1747 | @@ -6,14 +6,28 @@ |
1748 | import xdg.BaseDirectory |
1749 | |
1750 | cache_dir = realpath(join(xdg.BaseDirectory.xdg_cache_home, "gwibber")) |
1751 | + |
1752 | if not isdir(cache_dir): |
1753 | makedirs(cache_dir) |
1754 | |
1755 | LOG_FILENAME = join(cache_dir, "gwibber.log") |
1756 | -loghandler = logging.handlers.RotatingFileHandler( |
1757 | - LOG_FILENAME, maxBytes=20971520, backupCount=5) |
1758 | -logger = logging.getLogger('Gwibber') |
1759 | -logformatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") |
1760 | -loghandler.setFormatter(logformatter) |
1761 | -logger.setLevel(logging.INFO) |
1762 | -logger.addHandler(loghandler) |
1763 | + |
1764 | +def setup_logging(console = False, debuglog = False): |
1765 | + log = logging.getLogger() |
1766 | + if debuglog: |
1767 | + log.setLevel(logging.DEBUG) |
1768 | + else: |
1769 | + log.setLevel(logging.INFO) |
1770 | + |
1771 | + if console: |
1772 | + console_handler = logging.StreamHandler() |
1773 | + console_formatter = logging.Formatter('%(name)-12s %(threadName)-12s: %(levelname)-8s %(message)s') |
1774 | + console_handler.setFormatter(console_formatter) |
1775 | + log.addHandler(console_handler) |
1776 | + |
1777 | + text_handler = logging.handlers.RotatingFileHandler(LOG_FILENAME, maxBytes=20971520, backupCount=5) |
1778 | + text_formatter = logging.Formatter("%(asctime)s - %(name)-12s %(threadName)-14s: %(levelname)-8s - %(message)s") |
1779 | + text_handler.setFormatter(text_formatter) |
1780 | + log.addHandler(text_handler) |
1781 | + |
1782 | + log.info("Logger initialized") |
1783 | |
1784 | === modified file 'gwibber/microblog/util/resources.py' |
1785 | --- gwibber/microblog/util/resources.py 2011-11-04 12:40:19 +0000 |
1786 | +++ gwibber/microblog/util/resources.py 2012-02-10 17:26:15 +0000 |
1787 | @@ -5,16 +5,16 @@ |
1788 | """ |
1789 | |
1790 | import os, sys |
1791 | -from os.path import join, isdir, realpath, exists |
1792 | from os import makedirs, remove |
1793 | -from os import environ |
1794 | import Image |
1795 | -import log |
1796 | import mx.DateTime |
1797 | from gwibber.microblog import network |
1798 | from const import * |
1799 | import inspect |
1800 | |
1801 | +import logging |
1802 | +logger = logging.getLogger("Resources") |
1803 | + |
1804 | # Try to import * from custom, install custom.py to include packaging |
1805 | # customizations like distro API keys, etc |
1806 | try: |
1807 | @@ -22,8 +22,6 @@ |
1808 | except: |
1809 | pass |
1810 | |
1811 | -log.logger.name = "Gwibber Dispatcher Resources" |
1812 | - |
1813 | PROGRAM_NAME = "gwibber" |
1814 | UI_DIR_NAME = "ui" |
1815 | THEME_DIR_NAME = os.path.join(UI_DIR_NAME, "themes") |
1816 | @@ -90,7 +88,7 @@ |
1817 | avatar_cache_image = join(avatar_cache_dir, url.replace("/","")) |
1818 | |
1819 | if not exists(avatar_cache_image) or len(open(avatar_cache_image, "r").read()) < 1: |
1820 | - log.logger.debug("Downloading avatar %s", url) |
1821 | + logger.debug("Downloading avatar %s", url) |
1822 | f = file(avatar_cache_image, "w") |
1823 | data = network.Download(url) |
1824 | f.write(data.get_string()) |
1825 | @@ -106,7 +104,7 @@ |
1826 | try: |
1827 | remove(avatar) |
1828 | except: |
1829 | - log.logger.error("Failed to remove avatar from cache: %s", avatar) |
1830 | + logger.error("Failed to remove avatar from cache: %s", avatar) |
1831 | |
1832 | def img_resize(img_path, size): |
1833 | try: |
1834 | @@ -118,7 +116,7 @@ |
1835 | image.resize((size, size), Image.ANTIALIAS).save(img_path, format="jpeg") |
1836 | except Exception, e: |
1837 | from traceback import format_exc |
1838 | - log.logger.error("Image resizing failed:\n%s", format_exc()) |
1839 | + logger.error("Image resizing failed:\n%s", format_exc()) |
1840 | |
1841 | def get_desktop_file(): |
1842 | p = os.path.join(LAUNCH_DIR, "gwibber.desktop") |
1843 | @@ -206,7 +204,7 @@ |
1844 | dump_cache_file = join(dump_cache_dir, (aid + "." + str(mx.DateTime.now()) + "." + operation)) |
1845 | |
1846 | if not exists(dump_cache_file) or len(open(dump_cache_file, "r").read()) < 1: |
1847 | - log.logger.debug("Dumping test data %s - %s - %s", service, aid, operation) |
1848 | + logger.debug("Dumping test data %s - %s - %s", service, aid, operation) |
1849 | f = file(dump_cache_file, "w") |
1850 | f.write(str(data)) |
1851 | f.close() |
1852 | |
1853 | === modified file 'po/Makefile.in.in' |
1854 | --- po/Makefile.in.in 2011-12-06 19:58:21 +0000 |
1855 | +++ po/Makefile.in.in 2012-02-10 17:26:15 +0000 |
1856 | @@ -49,8 +49,8 @@ |
1857 | XGETTEXT = @XGETTEXT@ |
1858 | INTLTOOL_UPDATE = @INTLTOOL_UPDATE@ |
1859 | INTLTOOL_EXTRACT = @INTLTOOL_EXTRACT@ |
1860 | -MSGMERGE = INTLTOOL_EXTRACT=$(INTLTOOL_EXTRACT) XGETTEXT="$(XGETTEXT)" srcdir=$(srcdir) $(INTLTOOL_UPDATE) --gettext-package $(GETTEXT_PACKAGE) --dist |
1861 | -GENPOT = INTLTOOL_EXTRACT=$(INTLTOOL_EXTRACT) XGETTEXT="$(XGETTEXT)" srcdir=$(srcdir) $(INTLTOOL_UPDATE) --gettext-package $(GETTEXT_PACKAGE) --pot |
1862 | +MSGMERGE = INTLTOOL_EXTRACT="$(INTLTOOL_EXTRACT)" XGETTEXT="$(XGETTEXT)" srcdir=$(srcdir) $(INTLTOOL_UPDATE) --gettext-package $(GETTEXT_PACKAGE) --dist |
1863 | +GENPOT = INTLTOOL_EXTRACT="$(INTLTOOL_EXTRACT)" XGETTEXT="$(XGETTEXT)" srcdir=$(srcdir) $(INTLTOOL_UPDATE) --gettext-package $(GETTEXT_PACKAGE) --pot |
1864 | |
1865 | ALL_LINGUAS = @ALL_LINGUAS@ |
1866 | |
1867 | @@ -73,15 +73,20 @@ |
1868 | .SUFFIXES: |
1869 | .SUFFIXES: .po .pox .gmo .mo .msg .cat |
1870 | |
1871 | +AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ |
1872 | +INTLTOOL_V_MSGFMT = $(INTLTOOL__v_MSGFMT_$(V)) |
1873 | +INTLTOOL__v_MSGFMT_= $(INTLTOOL__v_MSGFMT_$(AM_DEFAULT_VERBOSITY)) |
1874 | +INTLTOOL__v_MSGFMT_0 = @echo " MSGFMT" $@; |
1875 | + |
1876 | .po.pox: |
1877 | $(MAKE) $(GETTEXT_PACKAGE).pot |
1878 | $(MSGMERGE) $< $(GETTEXT_PACKAGE).pot -o $*.pox |
1879 | |
1880 | .po.mo: |
1881 | - $(MSGFMT) -o $@ $< |
1882 | + $(INTLTOOL_V_MSGFMT)$(MSGFMT) -o $@ $< |
1883 | |
1884 | .po.gmo: |
1885 | - file=`echo $* | sed 's,.*/,,'`.gmo \ |
1886 | + $(INTLTOOL_V_MSGFMT)file=`echo $* | sed 's,.*/,,'`.gmo \ |
1887 | && rm -f $$file && $(GMSGFMT) -o $$file $< |
1888 | |
1889 | .po.cat: |
1890 | |
1891 | === modified file 'tests/plugins/test/__init__.py' |
1892 | --- tests/plugins/test/__init__.py 2012-01-10 18:20:28 +0000 |
1893 | +++ tests/plugins/test/__init__.py 2012-02-10 17:26:15 +0000 |
1894 | @@ -1,7 +1,10 @@ |
1895 | from gwibber.microblog import util |
1896 | -from gwibber.microblog.util import log |
1897 | import json |
1898 | |
1899 | +import logging |
1900 | +logger = logging.getLogger("Plugin Test") |
1901 | +logger.debug("Initializing.") |
1902 | + |
1903 | import re, mx.DateTime |
1904 | from gettext import lgettext as _ |
1905 | |
1906 | @@ -66,5 +69,5 @@ |
1907 | return getattr(self, opname)(**args) |
1908 | |
1909 | def receive(self, count=util.COUNT, since=None): |
1910 | - log.logger.debug ("HERE"); |
1911 | + logger.debug ("HERE"); |
1912 | return self._get("statuses/friends_timeline.json", count=count, since_id=since, source="Gwibber") |
I had a little feedback: start.in needed tweaking for the logging changes
* there was an old unused facelib.py which still had a curl import
* tests/service-
I went ahead and cleaned that up for the merge, thanks!