Merge lp:~marco-gallotta/ibid/flight into lp:~ibid-core/ibid/old-trunk-1.6
- flight
- Merge into old-trunk-1.6
Status: | Superseded | ||||
---|---|---|---|---|---|
Proposed branch: | lp:~marco-gallotta/ibid/flight | ||||
Merge into: | lp:~ibid-core/ibid/old-trunk-1.6 | ||||
Diff against target: |
466 lines (+332/-44) 3 files modified
ibid/plugins/ascii.py (+18/-41) ibid/plugins/flight.py (+300/-0) ibid/utils/__init__.py (+14/-3) |
||||
To merge this branch: | bzr merge lp:~marco-gallotta/ibid/flight | ||||
Related bugs: |
|
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Ibid Core Team | Pending | ||
Review via email: mp+16962@code.launchpad.net |
This proposal supersedes a proposal from 2010-01-07.
This proposal has been superseded by a proposal from 2010-01-07.
Commit message
Description of the change
marcog (marco-gallotta) wrote : Posted in a previous version of this proposal | # |
Stefano Rivera (stefanor) wrote : Posted in a previous version of this proposal | # |
Whoops, conflicts in the diff.
- 859. By marcog
-
Reverting ascii to trunk's version...hopefully i got it right *this* time
- 860. By marcog
-
Fix typos; allow "airport *for* cape town
- 861. By marcog
-
Condense responses into one
- 862. By marcog
-
Remove unused imports
- 863. By marcog
-
Make use of etree's findtext and path querying
- 864. By marcog
-
Merge flight and airport features
- 865. By marcog
-
Use dict substitutions for responses
- 866. By marcog
-
Catch ValueError thrown by parse() when the date is invalid
- 867. By marcog
-
Make strings unicode in places where they weren't; split(' ') if s -> string.split()
- 868. By marcog
-
Make parsing a little more resiliant
- 869. By marcog
-
strftime needs an ascii string for pre-2.6
- 870. By marcog
-
Merge trunk and move flight into geography
- 871. By marcog
-
Travelocity changed some form attribute names, this fixes them but flight is still not working
Preview Diff
1 | === modified file 'ibid/plugins/ascii.py' | |||
2 | --- ibid/plugins/ascii.py 2010-01-05 08:00:15 +0000 | |||
3 | +++ ibid/plugins/ascii.py 2010-01-07 12:36:20 +0000 | |||
4 | @@ -1,12 +1,11 @@ | |||
5 | 1 | from BaseHTTPServer import BaseHTTPRequestHandler | ||
6 | 2 | from cStringIO import StringIO | 1 | from cStringIO import StringIO |
7 | 3 | import Image | 2 | import Image |
8 | 4 | from os import remove | 3 | from os import remove |
9 | 5 | import os.path | 4 | import os.path |
10 | 6 | import subprocess | 5 | import subprocess |
11 | 6 | from sys import stderr | ||
12 | 7 | from tempfile import mkstemp | 7 | from tempfile import mkstemp |
15 | 8 | from urllib2 import HTTPError, URLError, urlopen | 8 | from urllib2 import urlopen |
14 | 9 | from urlparse import urlparse | ||
16 | 10 | from zipfile import ZipFile | 9 | from zipfile import ZipFile |
17 | 11 | 10 | ||
18 | 12 | from aalib import AsciiScreen | 11 | from aalib import AsciiScreen |
19 | @@ -14,7 +13,7 @@ | |||
20 | 14 | 13 | ||
21 | 15 | from ibid.config import Option, IntOption | 14 | from ibid.config import Option, IntOption |
22 | 16 | from ibid.plugins import Processor, match | 15 | from ibid.plugins import Processor, match |
24 | 17 | from ibid.utils import file_in_path, url_to_bytestring | 16 | from ibid.utils import file_in_path |
25 | 18 | 17 | ||
26 | 19 | """ | 18 | """ |
27 | 20 | Dependencies: | 19 | Dependencies: |
28 | @@ -44,43 +43,24 @@ | |||
29 | 44 | 43 | ||
30 | 45 | @match(r'^draw\s+(\S+\.\S+)(\s+in\s+colou?r)?(?:\s+w(?:idth)?\s+(\d+))?(?:\s+h(?:eight)\s+(\d+))?$') | 44 | @match(r'^draw\s+(\S+\.\S+)(\s+in\s+colou?r)?(?:\s+w(?:idth)?\s+(\d+))?(?:\s+h(?:eight)\s+(\d+))?$') |
31 | 46 | def draw(self, event, url, colour, width, height): | 45 | def draw(self, event, url, colour, width, height): |
58 | 47 | if not urlparse(url).netloc: | 46 | f = urlopen(url) |
59 | 48 | url = 'http://' + url | 47 | |
60 | 49 | if urlparse(url).scheme == 'file': | 48 | filesize = int(f.info().getheaders('Content-Length')[0]) |
61 | 50 | event.addresponse(u'Are you trying to haxor me?') | 49 | if filesize > self.max_filesize * 1024: |
62 | 51 | return | 50 | event.addresponse(u'File too large (limit is %i KiB)', self.max_filesize) |
63 | 52 | if not urlparse(url).path: | 51 | return |
64 | 53 | url += '/' | 52 | |
39 | 54 | |||
40 | 55 | try: | ||
41 | 56 | f = urlopen(url_to_bytestring(url)) | ||
42 | 57 | except HTTPError, e: | ||
43 | 58 | event.addresponse(u'Sorry, error fetching URL: %s', BaseHTTPRequestHandler.responses[e.code][0]) | ||
44 | 59 | return | ||
45 | 60 | except URLError: | ||
46 | 61 | event.addresponse(u'Sorry, error fetching URL') | ||
47 | 62 | return | ||
48 | 63 | |||
49 | 64 | content_length = f.info().getheaders('Content-Length') | ||
50 | 65 | if content_length and int(content_length[0]) > self.max_filesize * 1024: | ||
51 | 66 | event.addresponse(u'File too large (limit is %i KiB)', self.max_filesize) | ||
52 | 67 | return | ||
53 | 68 | |||
54 | 69 | buffer = f.read(self.max_filesize * 1024) | ||
55 | 70 | if f.read(1) != '': | ||
56 | 71 | event.addresponse(u'File too large (limit is %i KiB)', self.max_filesize) | ||
57 | 72 | return | ||
65 | 73 | try: | 53 | try: |
66 | 74 | ext = os.path.splitext(url)[1] | 54 | ext = os.path.splitext(url)[1] |
67 | 75 | image = mkstemp(suffix=ext)[1] | 55 | image = mkstemp(suffix=ext)[1] |
68 | 76 | file = open(image, 'w') | 56 | file = open(image, 'w') |
70 | 77 | file.write(buffer) | 57 | file.write(f.read()) |
71 | 78 | file.close() | 58 | file.close() |
72 | 79 | 59 | ||
73 | 80 | try: | 60 | try: |
74 | 81 | img = Image.open(StringIO(open(image, 'r').read())).convert('L') | 61 | img = Image.open(StringIO(open(image, 'r').read())).convert('L') |
77 | 82 | except IOError: | 62 | except: |
78 | 83 | event.addresponse(u"Sorry, that doesn't look like an image") | 63 | event.addresponse(u'Cannot understand image format') |
79 | 84 | return | 64 | return |
80 | 85 | input_width, input_height = img.size[0], img.size[1] | 65 | input_width, input_height = img.size[0], img.size[1] |
81 | 86 | 66 | ||
82 | @@ -120,8 +100,8 @@ | |||
83 | 120 | def draw_aa(self, event, image, width, height): | 100 | def draw_aa(self, event, image, width, height): |
84 | 121 | try: | 101 | try: |
85 | 122 | image = Image.open(StringIO(open(image, 'r').read())).convert('L') | 102 | image = Image.open(StringIO(open(image, 'r').read())).convert('L') |
88 | 123 | except IOError: | 103 | except: |
89 | 124 | event.addresponse(u"Sorry, that doesn't look like an image") | 104 | event.addresponse(u'Cannot understand image format') |
90 | 125 | return | 105 | return |
91 | 126 | screen = AsciiScreen(width=width, height=height) | 106 | screen = AsciiScreen(width=width, height=height) |
92 | 127 | image = image.resize(screen.virtual_size) | 107 | image = image.resize(screen.virtual_size) |
93 | @@ -129,6 +109,7 @@ | |||
94 | 129 | event.addresponse(unicode(screen.render()), address=False, conflate=False) | 109 | event.addresponse(unicode(screen.render()), address=False, conflate=False) |
95 | 130 | 110 | ||
96 | 131 | def draw_caca(self, event, image, width, height): | 111 | def draw_caca(self, event, image, width, height): |
97 | 112 | from sys import stderr | ||
98 | 132 | process = subprocess.Popen( | 113 | process = subprocess.Popen( |
99 | 133 | [self.img2txt_bin, '-f', 'irc', '-W', str(width), '-H', str(height), image], | 114 | [self.img2txt_bin, '-f', 'irc', '-W', str(width), '-H', str(height), image], |
100 | 134 | shell=False, stdout=subprocess.PIPE) | 115 | shell=False, stdout=subprocess.PIPE) |
101 | @@ -137,15 +118,14 @@ | |||
102 | 137 | if code == 0: | 118 | if code == 0: |
103 | 138 | event.addresponse(unicode(response.replace('\r', '')), address=False, conflate=False) | 119 | event.addresponse(unicode(response.replace('\r', '')), address=False, conflate=False) |
104 | 139 | else: | 120 | else: |
106 | 140 | event.addresponse(u"Sorry, that doesn't look like an image") | 121 | event.addresponse(u'Sorry, cannot understand image format') |
107 | 141 | 122 | ||
108 | 142 | class WriteFiglet(Processor): | 123 | class WriteFiglet(Processor): |
109 | 143 | u"""figlet <text> [in <font>] | 124 | u"""figlet <text> [in <font>] |
110 | 144 | list figlet fonts [from <index>]""" | 125 | list figlet fonts [from <index>]""" |
111 | 145 | feature = 'figlet' | 126 | feature = 'figlet' |
112 | 146 | 127 | ||
115 | 147 | max_width = IntOption('max_width', 'Maximum width for ascii output', 60) | 128 | fonts_zip = Option('fonts_zip', 'Zip file containing figlet fonts', 'data/figlet-fonts.zip') |
114 | 148 | fonts_zip = Option('fonts_zip', 'Zip file containing figlet fonts', 'ibid/data/figlet-fonts.zip') | ||
116 | 149 | 129 | ||
117 | 150 | def __init__(self, name): | 130 | def __init__(self, name): |
118 | 151 | Processor.__init__(self, name) | 131 | Processor.__init__(self, name) |
119 | @@ -178,7 +158,4 @@ | |||
120 | 178 | del rendered[0] | 158 | del rendered[0] |
121 | 179 | while rendered and rendered[-1].strip() == '': | 159 | while rendered and rendered[-1].strip() == '': |
122 | 180 | del rendered[-1] | 160 | del rendered[-1] |
123 | 181 | if rendered and len(rendered[0]) > self.max_width: | ||
124 | 182 | event.addresponse(u"Sorry that's too long, nobody will be able to read it") | ||
125 | 183 | return | ||
126 | 184 | event.addresponse(unicode('\n'.join(rendered)), address=False, conflate=False) | 161 | event.addresponse(unicode('\n'.join(rendered)), address=False, conflate=False) |
127 | 185 | 162 | ||
128 | === added file 'ibid/plugins/flight.py' | |||
129 | --- ibid/plugins/flight.py 1970-01-01 00:00:00 +0000 | |||
130 | +++ ibid/plugins/flight.py 2010-01-07 12:36:20 +0000 | |||
131 | @@ -0,0 +1,300 @@ | |||
132 | 1 | import csv | ||
133 | 2 | import re | ||
134 | 3 | from sys import maxint | ||
135 | 4 | from urllib import urlencode | ||
136 | 5 | from xml.etree import ElementTree | ||
137 | 6 | |||
138 | 7 | from dateutil.parser import parse | ||
139 | 8 | |||
140 | 9 | from ibid.config import IntOption | ||
141 | 10 | from ibid.plugins import Processor, match | ||
142 | 11 | from ibid.utils import cacheable_download, human_join | ||
143 | 12 | from ibid.utils.html import get_html_parse_tree | ||
144 | 13 | |||
145 | 14 | help = { u'airport' : u'Search for airports', | ||
146 | 15 | u'flight' : u'Search for flights on travelocity' } | ||
147 | 16 | |||
148 | 17 | airports_url = 'http://openflights.svn.sourceforge.net/viewvc/openflights/openflights/data/airports.dat' | ||
149 | 18 | |||
150 | 19 | airports = {} | ||
151 | 20 | |||
152 | 21 | def read_data(): | ||
153 | 22 | # File is listed as ISO 8859-1 (Latin-1) encoded on | ||
154 | 23 | # http://openflights.org/data.html, but from decoding it appears to | ||
155 | 24 | # actually be UTF8 | ||
156 | 25 | filename = cacheable_download(airports_url, 'flight/airports.dat') | ||
157 | 26 | reader = csv.reader(open(filename), delimiter=',', quotechar='"') | ||
158 | 27 | for row in reader: | ||
159 | 28 | airports[int(row[0])] = [unicode(r, 'utf-8') for r in row[1:]] | ||
160 | 29 | |||
161 | 30 | def airport_search(query, search_loc = True): | ||
162 | 31 | if not airports: | ||
163 | 32 | read_data() | ||
164 | 33 | if search_loc: | ||
165 | 34 | ids = airport_search(query, False) | ||
166 | 35 | if len(ids) == 1: | ||
167 | 36 | return ids | ||
168 | 37 | query = [unicode(q) for q in query.lower().split(' ') if q] | ||
169 | 38 | else: | ||
170 | 39 | query = [unicode(query.lower())] | ||
171 | 40 | ids = [] | ||
172 | 41 | for id, airport in airports.items(): | ||
173 | 42 | if search_loc: | ||
174 | 43 | data = (u' '.join(c.lower() for c in airport[:5])).split(' ') | ||
175 | 44 | elif len(query[0]) == 3: | ||
176 | 45 | data = [airport[3].lower()] | ||
177 | 46 | else: # assume lenght 4 (won't break if not) | ||
178 | 47 | data = [airport[4].lower()] | ||
179 | 48 | if len(filter(lambda q: q in data, query)) == len(query): | ||
180 | 49 | ids.append(id) | ||
181 | 50 | return ids | ||
182 | 51 | |||
183 | 52 | def repr_airport(id): | ||
184 | 53 | airport = airports[id] | ||
185 | 54 | code = '' | ||
186 | 55 | if airport[3] or airport[4]: | ||
187 | 56 | code = ' (%s)' % u'/'.join(filter(lambda c: c, airport[3:5])) | ||
188 | 57 | return '%s%s' % (airport[0], code) | ||
189 | 58 | |||
190 | 59 | class AirportSearch(Processor): | ||
191 | 60 | """airport [in] <name|location|code>""" | ||
192 | 61 | |||
193 | 62 | feature = 'airport' | ||
194 | 63 | |||
195 | 64 | @match(r'^airports?\s+(in\s+)?(.+)$') | ||
196 | 65 | def airport_search(self, event, search_loc, query): | ||
197 | 66 | search_loc = search_loc is not None | ||
198 | 67 | if not search_loc and not 3 <= len(query) <= 4: | ||
199 | 68 | event.addresponse(u'Airport code must be 3 or 4 characters') | ||
200 | 69 | return | ||
201 | 70 | ids = airport_search(query, search_loc) | ||
202 | 71 | if len(ids) == 0: | ||
203 | 72 | event.addresponse(u"Sorry, I don't know that airport") | ||
204 | 73 | elif len(ids) == 1: | ||
205 | 74 | id = ids[0] | ||
206 | 75 | airport = airports[id] | ||
207 | 76 | code = 'unknown code' | ||
208 | 77 | if airport[3] and airport[4]: | ||
209 | 78 | code = 'codes %s and %s' % (airport[3], airport[4]) | ||
210 | 79 | elif airport[3]: | ||
211 | 80 | code = 'code %s' % airport[3] | ||
212 | 81 | elif airport[4]: | ||
213 | 82 | code = 'code %s' % airport[4] | ||
214 | 83 | event.addresponse(u'%s in %s, %s has %s' % | ||
215 | 84 | (airport[0], airport[1], airport[2], code)) | ||
216 | 85 | else: | ||
217 | 86 | event.addresponse(u'Found the following airports: %s', human_join(repr_airport(id) for id in ids)[:480]) | ||
218 | 87 | |||
219 | 88 | class Flight: | ||
220 | 89 | def __init__(self): | ||
221 | 90 | self.flight, self.depart_time, self.depart_ap, self.arrive_time, \ | ||
222 | 91 | self.arrive_ap, self.duration, self.stops, self.price = \ | ||
223 | 92 | [], None, None, None, None, None, None, None | ||
224 | 93 | |||
225 | 94 | def int_price(self): | ||
226 | 95 | try: | ||
227 | 96 | return int(self.price[1:]) | ||
228 | 97 | except ValueError: | ||
229 | 98 | return maxint | ||
230 | 99 | |||
231 | 100 | def int_duration(self): | ||
232 | 101 | hours, minutes = 0, 0 | ||
233 | 102 | match = re.search(r'(\d+)hr', self.duration) | ||
234 | 103 | if match: | ||
235 | 104 | hours = int(match.group(1)) | ||
236 | 105 | match = re.search(r'(\d+)min', self.duration) | ||
237 | 106 | if match: | ||
238 | 107 | minutes = int(match.group(1)) | ||
239 | 108 | return int(hours)*60 + int(minutes) | ||
240 | 109 | |||
241 | 110 | MONTH_SHORT = ('Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec') | ||
242 | 111 | MONTH_LONG = ('January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December') | ||
243 | 112 | OTHER_STUFF = ('am', 'pm', 'st', 'nd', 'rd', 'th', 'morning', 'afternoon', 'evening', 'anytime') | ||
244 | 113 | DATE = r'(?:[0-9.:/hT -]|%s)+' % '|'.join(MONTH_SHORT+MONTH_LONG+OTHER_STUFF) | ||
245 | 114 | |||
246 | 115 | class FlightException(Exception): | ||
247 | 116 | pass | ||
248 | 117 | |||
249 | 118 | class FlightSearch(Processor): | ||
250 | 119 | """[<cheapest|quickest]> flight from <departure> to <destination> from <depart_date> [anytime|morning|afternoon|evening|<time>] to <return_date> [anytime|morning|afternoon|evening|<time>]""" | ||
251 | 120 | |||
252 | 121 | feature = 'flight' | ||
253 | 122 | |||
254 | 123 | max_results = IntOption('max_results', 'Maximum number of results to list', 5) | ||
255 | 124 | |||
256 | 125 | def _flight_search(self, event, dpt, to, dep_date, ret_date): | ||
257 | 126 | airport_dpt = airport_search(dpt) | ||
258 | 127 | airport_to = airport_search(to) | ||
259 | 128 | if len(airport_dpt) == 0: | ||
260 | 129 | event.addresponse(u"Sorry, I don't know the airport you want to leave from") | ||
261 | 130 | return | ||
262 | 131 | if len(airport_to) == 0: | ||
263 | 132 | event.addresponse(u"Sorry, I don't know the airport you want to fly to") | ||
264 | 133 | return | ||
265 | 134 | if len(airport_dpt) > 1: | ||
266 | 135 | event.addresponse(u'The following airports match the departure: %s', human_join(repr_airport(id) for id in airport_dpt)[:480]) | ||
267 | 136 | return | ||
268 | 137 | if len(airport_to) > 1: | ||
269 | 138 | event.addresponse(u'The following airports match the destination: %s', human_join(repr_airport(id) for id in airport_to)[:480]) | ||
270 | 139 | return | ||
271 | 140 | |||
272 | 141 | dpt = airport_dpt[0] | ||
273 | 142 | to = airport_to[0] | ||
274 | 143 | |||
275 | 144 | def to_travelocity_date(date): | ||
276 | 145 | date = date.lower() | ||
277 | 146 | time = None | ||
278 | 147 | for period in ['anytime', 'morning', 'afternoon', 'evening']: | ||
279 | 148 | if period in date: | ||
280 | 149 | time = period.title() | ||
281 | 150 | date = date.replace(period, '') | ||
282 | 151 | break | ||
283 | 152 | date = parse(date) | ||
284 | 153 | if time is None: | ||
285 | 154 | if date.hour == 0 and date.minute == 0: | ||
286 | 155 | time = 'Anytime' | ||
287 | 156 | else: | ||
288 | 157 | time = date.strftime('%I:00') | ||
289 | 158 | if time[0] == '0': | ||
290 | 159 | time = time[1:] | ||
291 | 160 | if date.hour < 12: | ||
292 | 161 | time += 'am' | ||
293 | 162 | else: | ||
294 | 163 | time += 'pm' | ||
295 | 164 | date = date.strftime('%m/%d/%Y') | ||
296 | 165 | return (date, time) | ||
297 | 166 | |||
298 | 167 | (dep_date, dep_time) = to_travelocity_date(dep_date) | ||
299 | 168 | (ret_date, ret_time) = to_travelocity_date(ret_date) | ||
300 | 169 | |||
301 | 170 | params = {} | ||
302 | 171 | params['leavingFrom'] = airports[dpt][3] | ||
303 | 172 | params['goingTo'] = airports[to][3] | ||
304 | 173 | params['leavingDate'] = dep_date | ||
305 | 174 | params['dateLeavingTime'] = dep_time | ||
306 | 175 | params['returningDate'] = ret_date | ||
307 | 176 | params['dateReturningTime'] = ret_time | ||
308 | 177 | etree = get_html_parse_tree('http://travel.travelocity.com/flights/InitialSearch.do', data=urlencode(params), treetype='etree') | ||
309 | 178 | while True: | ||
310 | 179 | script = [script for script in etree.getiterator('script')][1] | ||
311 | 180 | matches = script.text and re.search(r'var finurl = "(.*)"', script.text) | ||
312 | 181 | if matches: | ||
313 | 182 | url = 'http://travel.travelocity.com/flights/%s' % matches.group(1) | ||
314 | 183 | etree = get_html_parse_tree(url, treetype='etree') | ||
315 | 184 | else: | ||
316 | 185 | break | ||
317 | 186 | |||
318 | 187 | # Handle error | ||
319 | 188 | div = [d for d in etree.getiterator('div') if d.get(u'class') == 'e_content'] | ||
320 | 189 | if len(div): | ||
321 | 190 | error = div[0].find('h3').text | ||
322 | 191 | raise FlightException(error) | ||
323 | 192 | |||
324 | 193 | departing_flights = self._parse_travelocity(etree) | ||
325 | 194 | return_url = None | ||
326 | 195 | table = [t for t in etree.getiterator('table')][3] | ||
327 | 196 | for tr in table.getiterator('tr'): | ||
328 | 197 | for td in tr.getiterator('td'): | ||
329 | 198 | if td.get(u'class').strip() in ['tfPrice', 'tfPriceOrButton']: | ||
330 | 199 | div = td.find('div') | ||
331 | 200 | if div is not None: | ||
332 | 201 | button = div.find('button') | ||
333 | 202 | if button is not None: | ||
334 | 203 | onclick = button.get('onclick') | ||
335 | 204 | match = re.search(r"location.href='\.\./flights/(.+)'", onclick) | ||
336 | 205 | url_page = match.group(1) | ||
337 | 206 | match = re.search(r'^(.*?)[^/]*$', url) | ||
338 | 207 | url_base = match.group(1) | ||
339 | 208 | return_url = url_base + url_page | ||
340 | 209 | |||
341 | 210 | etree = get_html_parse_tree(return_url, treetype='etree') | ||
342 | 211 | returning_flights = self._parse_travelocity(etree) | ||
343 | 212 | |||
344 | 213 | return (departing_flights, returning_flights, url) | ||
345 | 214 | |||
346 | 215 | def _parse_travelocity(self, etree): | ||
347 | 216 | flights = [] | ||
348 | 217 | table = [t for t in etree.getiterator('table') if t.get(u'id') == 'tfGrid'][0] | ||
349 | 218 | trs = [t for t in table.getiterator('tr')] | ||
350 | 219 | tr_index = 1 | ||
351 | 220 | while tr_index < len(trs): | ||
352 | 221 | tds = [] | ||
353 | 222 | while True: | ||
354 | 223 | new_tds = [t for t in trs[tr_index].getiterator('td')] | ||
355 | 224 | tds.extend(new_tds) | ||
356 | 225 | tr_index += 1 | ||
357 | 226 | if len(filter(lambda t: t.get(u'class').strip() == u'tfAirlineSeatsMR', new_tds)): | ||
358 | 227 | break | ||
359 | 228 | flight = Flight() | ||
360 | 229 | for td in tds: | ||
361 | 230 | if td.get(u'class').strip() == u'tfAirline': | ||
362 | 231 | anchor = td.find('a') | ||
363 | 232 | if anchor is not None: | ||
364 | 233 | airline = anchor.text.strip() | ||
365 | 234 | else: | ||
366 | 235 | airline = td.text.split('\n')[0].strip() | ||
367 | 236 | flight.flight.append(u'%s %s' % (airline, td.find('div').text.strip())) | ||
368 | 237 | if td.get(u'class').strip() == u'tfDepart' and td.text: | ||
369 | 238 | flight.depart_time = td.text.split('\n')[0].strip() | ||
370 | 239 | flight.depart_ap = '%s %s' % (td.find('div').text.strip(), | ||
371 | 240 | td.find('div').find('span').text.strip()) | ||
372 | 241 | if td.get(u'class').strip() == u'tfArrive' and td.text: | ||
373 | 242 | flight.arrive_time = td.text.split('\n')[0].strip() | ||
374 | 243 | span = td.find('span') | ||
375 | 244 | if span is not None and span.get(u'class').strip() == u'tfNextDayDate': | ||
376 | 245 | flight.arrive_time = u'%s %s' % (flight.arrive_time, span.text.strip()[2:]) | ||
377 | 246 | span = [s for s in td.find('div').getiterator('span')][1] | ||
378 | 247 | flight.arrive_ap = '%s %s' % (td.find('div').text.strip(), | ||
379 | 248 | span.text.strip()) | ||
380 | 249 | else: | ||
381 | 250 | flight.arrive_ap = '%s %s' % (td.find('div').text.strip(), | ||
382 | 251 | td.find('div').find('span').text.strip()) | ||
383 | 252 | if td.get(u'class').strip() == u'tfTime' and td.text: | ||
384 | 253 | flight.duration = td.text.strip() | ||
385 | 254 | flight.stops = td.find('span').find('a').text.strip() | ||
386 | 255 | if td.get(u'class').strip() in [u'tfPrice', u'tfPriceOr'] and td.text: | ||
387 | 256 | flight.price = td.text.strip() | ||
388 | 257 | flight.flight = human_join(flight.flight) | ||
389 | 258 | flights.append(flight) | ||
390 | 259 | |||
391 | 260 | return flights | ||
392 | 261 | |||
393 | 262 | @match(r'^(?:(cheapest|quickest)\s+)?flights?\s+from\s+(.+)\s+to\s+(.+)\s+from\s+(%s)\s+to\s+(%s)$' % (DATE, DATE)) | ||
394 | 263 | def flight_search(self, event, priority, dpt, to, dep_date, ret_date): | ||
395 | 264 | try: | ||
396 | 265 | flights = self._flight_search(event, dpt, to, dep_date, ret_date) | ||
397 | 266 | except FlightException, e: | ||
398 | 267 | event.addresponse(unicode(e)) | ||
399 | 268 | return | ||
400 | 269 | if flights is None: | ||
401 | 270 | return | ||
402 | 271 | if len(flights[0]) == 0: | ||
403 | 272 | event.addresponse(u'No matching departure flights found') | ||
404 | 273 | return | ||
405 | 274 | if len(flights[1]) == 0: | ||
406 | 275 | event.addresponse(u'No matching return flights found') | ||
407 | 276 | return | ||
408 | 277 | |||
409 | 278 | cmp = None | ||
410 | 279 | if priority == 'cheapest': | ||
411 | 280 | cmp = lambda a, b: a.int_price() < b.int_price() | ||
412 | 281 | elif priority == 'quickest': | ||
413 | 282 | cmp = lambda a, b: a.int_duration() < b.int_duration() | ||
414 | 283 | if cmp: | ||
415 | 284 | # select best flight based on priority | ||
416 | 285 | for i in xrange(2): | ||
417 | 286 | flights[i].sort(cmp=cmp) | ||
418 | 287 | del flights[i][1:] | ||
419 | 288 | for i, flight_type in zip(xrange(2), ['Departing', 'Returning']): | ||
420 | 289 | if len(flights[i]) > 1: | ||
421 | 290 | event.addresponse(u'%s flights:', flight_type) | ||
422 | 291 | for flight in flights[i][:self.max_results]: | ||
423 | 292 | leading = '' | ||
424 | 293 | if len(flights[i]) == 1: | ||
425 | 294 | leading = u'%s flight: ' % flight_type | ||
426 | 295 | event.addresponse('%s%s departing %s from %s, arriving %s at %s (flight time %s, %s) costs %s per person', | ||
427 | 296 | (leading, flight.flight, flight.depart_time, flight.depart_ap, flight.arrive_time, | ||
428 | 297 | flight.arrive_ap, flight.duration, flight.stops, flight.price or 'unknown')) | ||
429 | 298 | event.addresponse(u'Full results: %s', flights[2]) | ||
430 | 299 | |||
431 | 300 | # vi: set et sta sw=4 ts=4: | ||
432 | 0 | 301 | ||
433 | === modified file 'ibid/utils/__init__.py' | |||
434 | --- ibid/utils/__init__.py 2010-01-06 23:19:42 +0000 | |||
435 | +++ ibid/utils/__init__.py 2010-01-07 12:36:20 +0000 | |||
436 | @@ -80,9 +80,14 @@ | |||
437 | 80 | req.add_header('User-Agent', 'Ibid/' + (ibid_version() or 'dev')) | 80 | req.add_header('User-Agent', 'Ibid/' + (ibid_version() or 'dev')) |
438 | 81 | 81 | ||
439 | 82 | if exists: | 82 | if exists: |
443 | 83 | modified = os.path.getmtime(cachefile) | 83 | if os.path.isfile(cachefile + '.etag'): |
444 | 84 | modified = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(modified)) | 84 | f = file(cachefile + '.etag', 'r') |
445 | 85 | req.add_header("If-Modified-Since", modified) | 85 | req.add_header("If-None-Match", f.readline().strip()) |
446 | 86 | f.close() | ||
447 | 87 | else: | ||
448 | 88 | modified = os.path.getmtime(cachefile) | ||
449 | 89 | modified = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(modified)) | ||
450 | 90 | req.add_header("If-Modified-Since", modified) | ||
451 | 86 | 91 | ||
452 | 87 | try: | 92 | try: |
453 | 88 | connection = urllib2.urlopen(req) | 93 | connection = urllib2.urlopen(req) |
454 | @@ -106,6 +111,12 @@ | |||
455 | 106 | gzipper = GzipFile(fileobj=compressedstream) | 111 | gzipper = GzipFile(fileobj=compressedstream) |
456 | 107 | data = gzipper.read() | 112 | data = gzipper.read() |
457 | 108 | 113 | ||
458 | 114 | etag = connection.headers.get('etag') | ||
459 | 115 | if etag: | ||
460 | 116 | f = file(cachefile + '.etag', 'w') | ||
461 | 117 | f.write(etag + '\n') | ||
462 | 118 | f.close() | ||
463 | 119 | |||
464 | 109 | outfile = file(cachefile, 'wb') | 120 | outfile = file(cachefile, 'wb') |
465 | 110 | outfile.write(data) | 121 | outfile.write(data) |
466 | 111 | outfile.close() | 122 | outfile.close() |
I think the flight plugin is mature enough for some reviews. It's quite possible some gaping bugs are still out there, but it seems to be robust enough.