prettify gcalc, make youtube search use jsonc interface

This commit is contained in:
Ryan Hitchman 2010-02-28 18:39:29 -07:00
parent a4ada7893c
commit ab8f316eb9
2 changed files with 41 additions and 38 deletions

View File

@ -1,27 +1,28 @@
import urllib2
import re
from lxml import html
from util import hook from util import hook
import urllib, httplib, sys
def doquery(argv):
query=urllib.urlencode({'q':argv})
start='<h2 class=r style="font-size:138%"><b>'
end='</b>'
google=httplib.HTTPConnection("www.google.com")
google.request("GET","/search?"+query)
search=google.getresponse()
data=search.read()
if data.find(start)==-1: return "Could not calculate " + argv
else:
begin=data.index(start)
result=data[begin+len(start):begin+data[begin:].index(end)]
result = result.replace("<font size=-2> </font>",",").replace(" &#215; 10<sup>","E").replace("</sup>","").replace("\xa0",",")
return result
@hook.command @hook.command
def calc(inp): def calc(inp):
'''.calc <term> -- returns Google Calculator result''' '''.calc <term> -- returns Google Calculator result'''
if not inp or not inp.strip(): if not inp:
return calc.__doc__ return calc.__doc__
return doquery(inp)
url = "http://www.google.com/search?q="
request = urllib2.Request(url + urllib2.quote(inp, ''))
request.add_header('User-Agent', 'skybot')
page = urllib2.build_opener().open(request).read()
# ugh, scraping HTML with regexes
m = re.search(r'<h2 class=r style="font-size:138%"><b>(.*?)</b>', page)
if m is None:
return "could not calculate " + inp
result = m.group(1).replace("<font size=-2> </font>",",")
result = result.replace(" &#215; 10<sup>","E").replace("</sup>","")
result = result.replace("\xa0",",")
return result

View File

@ -11,13 +11,14 @@ from urllib import quote_plus
locale.setlocale(locale.LC_ALL, '') locale.setlocale(locale.LC_ALL, '')
youtube_re = re.compile(r'youtube.*?v=([-_a-z0-9]+)', flags=re.I) youtube_re = re.compile(r'youtube.*?v=([-_a-z0-9]+)', flags=re.I)
url = 'http://gdata.youtube.com/feeds/api/videos/%s?v=2&alt=jsonc'
search_api_url = "http://gdata.youtube.com/feeds/api/videos?q=%s&max-results=1&alt=json" base_url = 'http://gdata.youtube.com/feeds/api/'
url = base_url + 'videos/%s?v=2&alt=jsonc'
search_api_url = base_url + 'videos?v=2&alt=jsonc&max-results=1&q=%s'
video_url = "http://youtube.com/watch?v=%s" video_url = "http://youtube.com/watch?v=%s"
def get_video_description(vid): def get_video_description(vid_id):
j = json.load(urllib2.urlopen(url % vid)) j = json.load(urllib2.urlopen(url % vid_id))
if j.get('error'): if j.get('error'):
return return
@ -59,19 +60,20 @@ def youtube_url(inp):
if m: if m:
return get_video_description(m.group(1)) return get_video_description(m.group(1))
@hook.command
@hook.command('y') @hook.command('y')
@hook.command
def youtube(inp): def youtube(inp):
'.youtube <query> -- returns the first YouTube search result for <query>' '.youtube <query> -- returns the first YouTube search result for <query>'
inp = quote_plus(inp) inp = quote_plus(inp)
j = json.load(urllib2.urlopen(search_api_url % (inp))) j = json.load(urllib2.urlopen(search_api_url % (inp)))
if j.get('error'):
return
try: if 'error' in j:
vid = j['feed']['entry'][0]['id']['$t'] return 'error performing search'
#youtube returns a gdata url for this some reason. The videoid has to be stripped out
vid = vid[vid.rfind('/')+1:] if j['data']['totalItems'] == 0:
return get_video_description(vid) + " " + video_url%vid return 'no results found'
except:
return vid_id = j['data']['items'][0]['id']
return get_video_description(vid_id) + " - " + video_url % vid_id