Merge lp:~zeitgeist/zeitgeist/bug695311 into lp:zeitgeist/0.1

Proposed by Siegfried Gevatter
Status: Merged
Merged at revision: 1658
Proposed branch: lp:~zeitgeist/zeitgeist/bug695311
Merge into: lp:zeitgeist/0.1
Diff against target: 194 lines (+48/-32)
4 files modified
_zeitgeist/engine/extension.py (+11/-1)
_zeitgeist/engine/extensions/datasource_registry.py (+21/-24)
test/remote-test.py (+2/-0)
zeitgeist-daemon.py (+14/-7)
To merge this branch: bzr merge lp:~zeitgeist/zeitgeist/bug695311
Reviewer Review Type Date Requested Status
Mikkel Kamstrup Erlandsen Approve
Markus Korn Needs Information
Seif Lotfy Approve
Review via email: mp+44850@code.launchpad.net
To post a comment you must log in.
Revision history for this message
Seif Lotfy (seif) wrote :

Changing to dict is a big improvement for me and makes it easier to comprehend. All in all its pretty straight forward and i like the unload method. I would suggest writing to disk every 5 minutes or every time a new application opens, instead of "on zeitgeist exit" but this is just a suggestion.
However given its current state a +1 from me since my suggestions could be discussed later.

Revision history for this message
Seif Lotfy (seif) wrote :

forgot the +1

review: Approve
lp:~zeitgeist/zeitgeist/bug695311 updated
1660. By Siegfried Gevatter

Actually call the extension's unload method ^^.

Revision history for this message
Markus Korn (thekorn) wrote :

Siegfried, you added an additional handler for SIGTERM. Can you please elaborate when we receive such kind of signals. I mean we clearly documented SIGHUB as a tool for distributors, and I would like to see the same documentation for SIGTERM too.

review: Needs Information
Revision history for this message
Siegfried Gevatter (rainct) wrote :

I haven't checked, but Zeitgeist should be receiving a SIGTERM at system poweroff (and it's also the default signal sent by "kill"). This way we try to ensure that Zeitgeist always ends nicely (ie. mostly always other than if there's an uncached exception or a SIGKILL).

Revision history for this message
Mikkel Kamstrup Erlandsen (kamstrup) wrote :

I have done any test runs, but this looks sane to me

review: Approve
Revision history for this message
Mikkel Kamstrup Erlandsen (kamstrup) wrote :

Sorry i mean I have *not* done test runs... :-)

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file '_zeitgeist/engine/extension.py'
2--- _zeitgeist/engine/extension.py 2010-12-26 09:18:11 +0000
3+++ _zeitgeist/engine/extension.py 2010-12-29 16:56:27 +0000
4@@ -62,6 +62,15 @@
5 def __init__(self, engine):
6 self.engine = weakref.proxy(engine)
7
8+ def unload(self):
9+ """
10+ This method gets called before Zeitgeist stops.
11+
12+ Execution of this method isn't guaranteed, and you shouldn't do
13+ anything slow in there.
14+ """
15+ pass
16+
17 def pre_insert_event(self, event, sender):
18 """
19 Hook applied to all events before they are inserted into the
20@@ -278,8 +287,9 @@
21 self.unload(ext)
22 else:
23 ext_name = get_extension_name(extension)
24- log.debug("Unloading extension '%s'" %ext_name)
25+ log.debug("Unloading extension '%s'" % ext_name)
26 obj = self.__extensions[ext_name]
27+ obj.unload()
28 for method in obj.PUBLIC_METHODS:
29 del self.methods[method]
30 del self.__extensions[ext_name]
31
32=== modified file '_zeitgeist/engine/extensions/datasource_registry.py'
33--- _zeitgeist/engine/extensions/datasource_registry.py 2010-10-19 13:54:12 +0000
34+++ _zeitgeist/engine/extensions/datasource_registry.py 2010-12-29 16:56:27 +0000
35@@ -72,17 +72,18 @@
36 dbus.service.Object.__init__(self, dbus.SessionBus(),
37 REGISTRY_DBUS_OBJECT_PATH)
38
39+ self._registry = {}
40 if os.path.exists(DATA_FILE):
41 try:
42- self._registry = [DataSource.from_list(
43- datasource) for datasource in pickle.load(open(DATA_FILE))]
44+ with open(DATA_FILE) as data_file:
45+ for datasource in pickle.load(data_file):
46+ ds = DataSource.from_list(datasource)
47+ self._registry[ds[DataSource.UniqueId]] = ds
48 log.debug("Loaded data-source data from %s" % DATA_FILE)
49 except Exception, e:
50 log.warn("Failed to load data file %s: %s" % (DATA_FILE, e))
51- self._registry = []
52 else:
53 log.debug("No existing data-source data found.")
54- self._registry = []
55 self._running = {}
56
57 # Connect to client disconnection signals
58@@ -93,49 +94,46 @@
59 )
60
61 def _write_to_disk(self):
62- data = [DataSource.get_plain(datasource) for datasource in self._registry]
63+ data = [DataSource.get_plain(datasource) for datasource in
64+ self._registry.itervalues()]
65 with open(DATA_FILE, "w") as data_file:
66 pickle.dump(data, data_file)
67 #log.debug("Data-source registry update written to disk.")
68
69- def _get_data_source(self, unique_id):
70- for datasource in self._registry:
71- if datasource.unique_id == unique_id:
72- return datasource
73-
74 def pre_insert_event(self, event, sender):
75 for (unique_id, bus_names) in self._running.iteritems():
76 if sender in bus_names:
77- datasource = self._get_data_source(unique_id)
78+ datasource = self._registry[unique_id]
79 # Update LastSeen time
80 datasource.last_seen = get_timestamp_for_now()
81- self._write_to_disk()
82 # Check whether the data-source is allowed to insert events
83 if not datasource.enabled:
84 return None
85 return event
86
87+ def unload(self):
88+ self._write_to_disk()
89+
90 # PUBLIC
91 def register_data_source(self, unique_id, name, description, templates):
92 source = DataSource(str(unique_id), unicode(name), unicode(description),
93 map(Event.new_for_struct, templates))
94- for datasource in self._registry:
95- if datasource == source:
96- datasource.update_from_data_source(source)
97- self.DataSourceRegistered(datasource)
98- return datasource.enabled
99- self._registry.append(source)
100+ if unique_id in self._registry:
101+ datasource = self._registry[unique_id]
102+ datasource.update_from_data_source(source)
103+ else:
104+ datasource = self._registry[unique_id] = source
105 self._write_to_disk()
106- self.DataSourceRegistered(source)
107- return True
108+ self.DataSourceRegistered(datasource)
109+ return datasource.enabled
110
111 # PUBLIC
112 def get_data_sources(self):
113- return self._registry
114+ return self._registry.values()
115
116 # PUBLIC
117 def set_data_source_enabled(self, unique_id, enabled):
118- datasource = self._get_data_source(unique_id)
119+ datasource = self._registry[unique_id]
120 if not datasource:
121 return False
122 if datasource.enabled != enabled:
123@@ -244,11 +242,10 @@
124 return
125 uid = uid[0]
126
127- datasource = self._get_data_source(uid)
128+ datasource = self._registry[uid]
129
130 # Update LastSeen time
131 datasource.last_seen = get_timestamp_for_now()
132- self._write_to_disk()
133
134 strid = "%s (%s)" % (uid, datasource.name)
135 log.debug("Client disconnected: %s" % strid)
136
137=== modified file 'test/remote-test.py'
138--- test/remote-test.py 2010-12-29 13:31:10 +0000
139+++ test/remote-test.py 2010-12-29 16:56:27 +0000
140@@ -455,6 +455,7 @@
141 # Verify that they have been inserted correctly
142 datasources = list(self.client._registry.GetDataSources())
143 self.assertEquals(len(datasources), 2)
144+ datasources.sort(key=lambda x: x[DataSource.UniqueId])
145 self._assertDataSourceEquals(datasources[0], self._ds1)
146 self._assertDataSourceEquals(datasources[1], self._ds2)
147
148@@ -464,6 +465,7 @@
149 # Verify that it changed correctly
150 datasources = list(self.client._registry.GetDataSources())
151 self.assertEquals(len(datasources), 2)
152+ datasources.sort(key=lambda x: x[DataSource.UniqueId])
153 self._assertDataSourceEquals(datasources[0], self._ds1)
154 self._assertDataSourceEquals(datasources[1], self._ds2b)
155
156
157=== modified file 'zeitgeist-daemon.py'
158--- zeitgeist-daemon.py 2010-12-07 21:36:24 +0000
159+++ zeitgeist-daemon.py 2010-12-29 16:56:27 +0000
160@@ -118,12 +118,13 @@
161 # tell the user which datahub we are running
162 logging.debug("Running datahub (%s) with PID=%i" %(which(DATAHUB), p.pid))
163
164-def setup_handle_sighup(interface):
165- def handle_sighup(signum, frame):
166- """We are using the SIGHUP signal to shutdown zeitgeist in a clean way"""
167- logging.info("got SIGHUP signal, shutting down zeitgeist interface")
168+def setup_handle_exit(interface):
169+ def handle_exit(signum=None, frame=None):
170+ """ We want to end Zeitgeist in a clean way when we receive a
171+ SIGTERM or SIGHUP signal, or the user pressed Ctrl+C. """
172+ logging.info("Shutting down Zeitgeist interface...")
173 interface.Quit()
174- return handle_sighup
175+ return handle_exit
176
177 def setup_logger(log_level):
178 logger = logging.getLogger()
179@@ -167,7 +168,13 @@
180 logging.info("Trying to start the datahub")
181 start_datahub()
182
183- signal.signal(signal.SIGHUP, setup_handle_sighup(interface))
184+ handle_exit = setup_handle_exit(interface)
185+
186+ signal.signal(signal.SIGHUP, handle_exit)
187+ signal.signal(signal.SIGTERM, handle_exit)
188
189 logging.info("Starting Zeitgeist service...")
190- mainloop.run()
191+ try:
192+ mainloop.run()
193+ except KeyboardInterrupt:
194+ handle_exit()