2 # -*- coding: UTF-8 -*-
4 from gotovienna.BeautifulSoup import BeautifulSoup, NavigableString
5 #from urllib2 import urlopen
6 from UrlOpener import urlopen
7 from urllib import urlencode
8 from datetime import datetime, time, timedelta
9 from textwrap import wrap
14 from gotovienna import defaults
16 POSITION_TYPES = ('stop', 'address', 'poi')
18 DEBUGLOG = os.path.expanduser('~/gotoVienna.debug')
20 class ParserError(Exception):
22 def __init__(self, msg='Parser error'):
26 UNKNOWN, CORRECTION, RESULT = range(3)
29 def extract_city(station):
30 """ Extract city from string if present,
31 else return default city
33 >>> extract_city('Karlsplatz, Wien')
36 if len(station.split(',')) > 1:
37 return station.split(',')[-1].strip()
41 def extract_station(station):
42 """ Remove city from string
44 >>> extract_station('Karlsplatz, Wien')
47 if len(station.split(',')) > 1:
48 return station[:station.rindex(',')].strip()
52 def split_station(station):
53 """ >>> split_station('Karlsplatz, Wien')
54 ('Karlsplatz', 'Wien')
55 >>> split_station('Karlsplatz')
56 ('Karlsplatz', 'Wien')
58 if len(station.split(',')) > 1:
59 return (station[:station.rindex(',')].strip(), station.split(',')[-1].strip())
61 return (station, 'Wien')
63 def guess_location_type(location):
64 """Guess type (stop, address, poi) of a location
66 >>> guess_location_type('pilgramgasse')
69 >>> guess_location_type('karlsplatz 14')
72 >>> guess_location_type('reumannplatz 12/34')
75 parts = location.split()
79 # Assume all single-word locations are stops
83 # If the last part is numeric, assume address
84 if last_part.isdigit() and len(parts) > 1:
87 # Addresses with door number (e.g. "12/34")
88 if all(x.isdigit() or x == '/' for x in last_part):
91 # Sane default - assume it's a stop/station name
94 def search(origin_tuple, destination_tuple, dtime=None):
95 """ build route request
96 returns html result (as urllib response)
99 dtime = datetime.now()
101 origin, origin_type = origin_tuple
102 origin, origin_city = split_station(origin)
104 destination, destination_type = destination_tuple
105 destination, destination_city = split_station(destination)
108 if origin_type is None:
109 origin_type = guess_location_type(origin)
110 print 'Guessed origin type:', origin_type
112 if destination_type is None:
113 destination_type = guess_location_type(destination)
114 print 'Guessed destination type:', destination_type
116 if (origin_type not in POSITION_TYPES or
117 destination_type not in POSITION_TYPES):
118 raise ParserError('Invalid position type')
120 post = defaults.search_post
121 post['name_origin'] = origin
122 post['type_origin'] = origin_type
123 post['name_destination'] = destination
124 post['type_destination'] = destination_type
125 post['itdDateDayMonthYear'] = dtime.strftime('%d.%m.%Y')
126 post['itdTime'] = dtime.strftime('%H:%M')
127 post['place_origin'] = origin_city
128 post['place_destination'] = destination_city
129 params = urlencode(post)
130 url = '%s?%s' % (defaults.action, params)
137 """ Parser for search response
140 def __init__(self, html):
141 self.soup = BeautifulSoup(html)
143 def check_page(self):
144 if self.soup.find('form', {'id': 'form_efaresults'}):
145 return PageType.RESULT
147 if self.soup.find('div', {'class':'form_error'}):
148 return PageType.CORRECTION
150 return PageType.UNKNOWN
152 state = property(check_page)
154 def get_correction(self):
155 names_origin = self.soup.find('select', {'id': 'nameList_origin'})
156 names_destination = self.soup.find('select', {'id': 'nameList_destination'})
157 places_origin = self.soup.find('select', {'id': 'placeList_origin'})
158 places_destination = self.soup.find('select', {'id': 'placeList_destination'})
161 if any([names_origin, names_destination, places_origin, places_destination]):
165 dict['origin'] = map(lambda x: x.text,
166 names_origin.findAll('option'))
167 if names_destination:
168 dict['destination'] = map(lambda x: x.text,
169 names_destination.findAll('option'))
172 dict['place_origin'] = map(lambda x: x.text,
173 names_origin.findAll('option'))
174 if names_destination:
175 dict['place_destination'] = map(lambda x: x.text,
176 names_destination.findAll('option'))
181 raise ParserError('Unable to parse html')
183 def get_result(self):
184 return rParser(str(self.soup))
189 """ Parser for routing results
192 def __init__(self, html):
193 self.soup = BeautifulSoup(html)
194 self._overview = None
198 def get_tdtext(cls, x, cl):
199 return x.find('td', {'class': cl}).text
202 def get_change(cls, x):
203 y = rParser.get_tdtext(x, 'col_change')
210 def get_price(cls, x):
211 y = rParser.get_tdtext(x, 'col_price')
215 return float(y.replace(',', '.'))
220 def get_date(cls, x):
221 y = rParser.get_tdtext(x, 'col_date')
223 return datetime.strptime(y, '%d.%m.%Y').date()
228 def get_datetime(cls, x):
229 y = rParser.get_tdtext(x, 'col_time')
231 if (y.find("-") > 0):
233 times = map(lambda z: time(*map(int, z.split(':'))), y.split('-'))
234 d = rParser.get_date(x)
235 from_dtime = datetime.combine(d, times[0])
236 if times[0] > times[1]:
238 to_dtime = datetime.combine(d + timedelta(1), times[1])
240 to_dtime = datetime.combine(d, times[1])
242 return [from_dtime, to_dtime]
245 dtregex = {'date' : '\d\d\.\d\d',
248 regex = "\s*(?P<date1>{date})?\s*(?P<time1>{time})\s*(?P<date2>{date})?\s*(?P<time2>{time})\s*".format(**dtregex)
249 ma = re.match(regex, y)
256 def extract_datetime(gr, n):
257 if 'date%d' % n in gr and gr['date%d' % n]:
258 if gr['time%d' % n] == '24:00':
259 gr['time%d' % n] = '0:00'
260 from_dtime = datetime.strptime(str(datetime.today().year) + gr['date%d' % n] + gr['time%d' % n], '%Y%d.%m.%H:%M')
262 d = datetime.today().date()
263 # Strange times possible at wienerlinien
264 if gr['time%d' % n] == '24:00':
265 gr['time%d' % n] = '0:00'
266 d += timedelta(days=1)
267 t = datetime.strptime(gr['time%d' % n], '%H:%M').time()
269 return datetime.combine(d, t)
272 from_dtime = extract_datetime(gr, 1)
273 to_dtime = extract_datetime(gr, 2)
275 return [from_dtime, to_dtime]
281 for detail in self.details():
284 def _parse_details(self):
285 tours = self.soup.findAll('div', {'class': 'data_table tourdetail'})
287 trips = map(lambda x: map(lambda y: {
288 'timespan': rParser.get_datetime(y),
289 'station': map(lambda z: z[2:].strip(),
290 filter(lambda x: type(x) == NavigableString, y.find('td', {'class': 'col_station'}).contents)), # filter non NaviStrings
291 'info': map(lambda x: x.strip(),
292 filter(lambda z: type(z) == NavigableString, y.find('td', {'class': 'col_info'}).contents)),
293 }, x.find('tbody').findAll('tr')),
299 """returns list of trip details
300 [ [ { 'time': [datetime.time, datetime.time] if time else [],
301 'station': [u'start', u'end'] if station else [],
302 'info': [u'start station' if station else u'details for walking', u'end station' if station else u'walking duration']
303 }, ... # next trip step
304 ], ... # next trip possibility
307 if not self._details:
308 self._details = self._parse_details()
312 def _parse_overview(self):
315 table = self.soup.find('table', {'id': 'tbl_fahrten'})
317 # check if there is an overview table
318 if table and table.findAll('tr'):
320 rows = table.findAll('tr')[1:] # cut off headline
322 overview = map(lambda x: {
323 'timespan': rParser.get_datetime(x),
324 'change': rParser.get_change(x),
325 'price': rParser.get_price(x),
329 raise ParserError('Unable to parse overview')
342 if not self._overview:
344 self._overview = self._parse_overview()
345 except AttributeError:
346 f = open(DEBUGLOG, 'w')
347 f.write(str(self.soup))
350 return self._overview