2010-02-13 05:13:55 +00:00
|
|
|
import re
|
2010-02-14 01:11:42 +00:00
|
|
|
import time
|
2010-01-17 04:24:36 +00:00
|
|
|
|
2010-04-23 03:47:41 +00:00
|
|
|
from util import hook, http
|
2010-03-01 01:08:54 +00:00
|
|
|
|
2010-01-17 04:24:36 +00:00
|
|
|
|
2010-03-12 00:19:36 +00:00
|
|
|
youtube_re = (r'(?:youtube.*?(?:v=|/v/)|youtu\.be/|yooouuutuuube.*?id=)'
|
|
|
|
'([-_a-z0-9]+)', re.I)
|
2010-01-17 04:24:36 +00:00
|
|
|
|
2010-03-01 01:39:29 +00:00
|
|
|
base_url = 'http://gdata.youtube.com/feeds/api/'
|
|
|
|
url = base_url + 'videos/%s?v=2&alt=jsonc'
|
2010-04-23 03:47:41 +00:00
|
|
|
search_api_url = base_url + 'videos?v=2&alt=jsonc&max-results=1'
|
2012-04-15 18:54:58 +00:00
|
|
|
video_url = 'http://youtube.com/watch?v=%s'
|
2010-03-01 01:08:54 +00:00
|
|
|
|
2010-03-01 02:32:41 +00:00
|
|
|
|
2010-03-01 01:39:29 +00:00
|
|
|
def get_video_description(vid_id):
|
2010-04-23 03:47:41 +00:00
|
|
|
j = http.get_json(url % vid_id)
|
2010-02-14 01:11:42 +00:00
|
|
|
|
2010-02-28 13:09:56 +00:00
|
|
|
if j.get('error'):
|
2010-03-01 02:32:41 +00:00
|
|
|
return
|
2010-02-13 05:13:55 +00:00
|
|
|
|
2010-02-28 13:09:56 +00:00
|
|
|
j = j['data']
|
2010-02-13 05:13:55 +00:00
|
|
|
|
2010-02-28 13:09:56 +00:00
|
|
|
out = '\x02%s\x02' % j['title']
|
2010-02-13 05:13:55 +00:00
|
|
|
|
2010-12-02 21:25:51 +00:00
|
|
|
if not j.get('duration'):
|
|
|
|
return out
|
|
|
|
|
2010-02-28 13:09:56 +00:00
|
|
|
out += ' - length \x02'
|
|
|
|
length = j['duration']
|
2010-03-01 02:32:41 +00:00
|
|
|
if length / 3600: # > 1 hour
|
2010-02-28 13:09:56 +00:00
|
|
|
out += '%dh ' % (length / 3600)
|
|
|
|
if length / 60:
|
|
|
|
out += '%dm ' % (length / 60 % 60)
|
|
|
|
out += "%ds\x02" % (length % 60)
|
2010-02-14 01:11:42 +00:00
|
|
|
|
2010-02-28 13:09:56 +00:00
|
|
|
if 'rating' in j:
|
|
|
|
out += ' - rated \x02%.2f/5.0\x02 (%d)' % (j['rating'],
|
2014-01-14 21:12:37 +00:00
|
|
|
j['ratingCount'])
|
2010-02-14 01:11:42 +00:00
|
|
|
|
2010-02-28 13:09:56 +00:00
|
|
|
if 'viewCount' in j:
|
2012-04-15 18:54:58 +00:00
|
|
|
out += ' - \x02%s\x02 views' % group_int_digits(j['viewCount'])
|
2010-03-01 02:32:41 +00:00
|
|
|
|
2010-02-28 13:09:56 +00:00
|
|
|
upload_time = time.strptime(j['uploaded'], "%Y-%m-%dT%H:%M:%S.000Z")
|
2012-04-15 18:54:58 +00:00
|
|
|
out += ' - \x02%s\x02 on \x02%s\x02' % (
|
|
|
|
j['uploader'], time.strftime("%Y.%m.%d", upload_time))
|
2010-02-28 13:09:56 +00:00
|
|
|
|
|
|
|
if 'contentRating' in j:
|
|
|
|
out += ' - \x034NSFW\x02'
|
2010-02-13 05:13:55 +00:00
|
|
|
|
2010-02-28 13:09:56 +00:00
|
|
|
return out
|
2010-03-01 02:32:41 +00:00
|
|
|
|
2012-04-15 18:54:58 +00:00
|
|
|
def group_int_digits(number, delimiter=' ', grouping=3):
|
|
|
|
base = str(number).strip()
|
|
|
|
builder = []
|
|
|
|
while base:
|
|
|
|
builder.append(base[-grouping:])
|
|
|
|
base = base[:-grouping]
|
|
|
|
builder.reverse()
|
|
|
|
return delimiter.join(builder)
|
2010-02-14 19:10:21 +00:00
|
|
|
|
2010-03-12 00:19:36 +00:00
|
|
|
@hook.regex(*youtube_re)
|
|
|
|
def youtube_url(match):
|
|
|
|
return get_video_description(match.group(1))
|
2010-03-01 01:08:54 +00:00
|
|
|
|
2010-03-01 01:39:29 +00:00
|
|
|
|
2014-01-09 01:29:35 +00:00
|
|
|
@hook.command('yt')
|
2010-03-01 01:08:54 +00:00
|
|
|
@hook.command('y')
|
2010-03-01 01:39:29 +00:00
|
|
|
@hook.command
|
2010-03-01 01:08:54 +00:00
|
|
|
def youtube(inp):
|
|
|
|
'.youtube <query> -- returns the first YouTube search result for <query>'
|
2010-03-05 23:09:36 +00:00
|
|
|
|
2010-04-23 03:47:41 +00:00
|
|
|
j = http.get_json(search_api_url, q=inp)
|
2010-03-01 01:39:29 +00:00
|
|
|
|
|
|
|
if 'error' in j:
|
2012-04-15 18:54:58 +00:00
|
|
|
return 'error while performing the search'
|
2010-03-01 01:39:29 +00:00
|
|
|
|
2014-09-25 18:22:20 +00:00
|
|
|
if j['data']['totalItems'] == 0 or 'items' not in j['data']:
|
2010-03-01 01:39:29 +00:00
|
|
|
return 'no results found'
|
|
|
|
|
|
|
|
vid_id = j['data']['items'][0]['id']
|
|
|
|
|
|
|
|
return get_video_description(vid_id) + " - " + video_url % vid_id
|