reconnect on socket.error, PEP8

This commit is contained in:
Ryan Hitchman 2011-05-11 15:40:04 -05:00
parent 339d853258
commit a4d70df463
10 changed files with 32 additions and 27 deletions

View File

@ -73,7 +73,7 @@ class crlf_tcp(object):
self.socket.close() self.socket.close()
return return
time.sleep(1) time.sleep(1)
except self.get_timeout_exception_type(), e: except (self.get_timeout_exception_type(), socket.error) as e:
if self.handle_receive_exception(e, last_timestamp): if self.handle_receive_exception(e, last_timestamp):
return return
continue continue

View File

@ -1,9 +1,11 @@
from util import hook, http from util import hook, http
api_key = "" api_key = ""
api_url = "http://ws.audioscrobbler.com/2.0/?format=json" api_url = "http://ws.audioscrobbler.com/2.0/?format=json"
@hook.command @hook.command
def lastfm(inp, nick='', say=None): def lastfm(inp, nick='', say=None):
if inp: if inp:

View File

@ -1,10 +1,10 @@
# metacritic.com scraper # metacritic.com scraper
from util import hook, http
import re import re
from urllib2 import HTTPError from urllib2 import HTTPError
from util import hook, http
@hook.command('mc') @hook.command('mc')
def metacritic(inp): def metacritic(inp):
@ -16,7 +16,7 @@ def metacritic(inp):
args = inp.strip() args = inp.strip()
game_platforms = ('x360', 'ps3', 'pc', 'ds', 'wii', '3ds', 'gba') game_platforms = ('x360', 'ps3', 'pc', 'ds', 'wii', '3ds', 'gba')
all_platforms = game_platforms + ('all','movie','tv','album') all_platforms = game_platforms + ('all', 'movie', 'tv', 'album')
try: try:
plat, title = args.split(' ', 1) plat, title = args.split(' ', 1)
@ -53,10 +53,10 @@ def metacritic(inp):
<div class="main_stats"> <div class="main_stats">
<h3 class="product_title basic_stat">...</h3> <h3 class="product_title basic_stat">...</h3>
<div class="std_score"> <div class="std_score">
<div class="score_wrap"> <div class="score_wrap">
<span class="label">Metascore: </span> <span class="label">Metascore: </span>
<span class="data metascore score_favorable">87</span> <span class="data metascore score_favorable">87</span>
</div> </div>
</div> </div>
</div> </div>
<div class="more_stats extended_stats">...</div> <div class="more_stats extended_stats">...</div>
@ -129,10 +129,7 @@ def metacritic(inp):
except IndexError: except IndexError:
score = None score = None
return '[%s] %s - %s, %s -- %s' % (plat.upper(), name,
result = '[%s] %s - %s, %s -- %s' % (plat.upper(), name,
score or 'no score', score or 'no score',
'release: %s' % release if release else 'unreleased', 'release: %s' % release if release else 'unreleased',
link) link)
return result

View File

@ -15,9 +15,13 @@ def get_version():
revnumber = len(stdout.splitlines()) revnumber = len(stdout.splitlines())
ret = stdout.split(None, 1)[0] shorthash = stdout.split(None, 1)[0]
http.ua_skybot = 'Skybot/r%d %s (http://github.com/rmmh/skybot)' \
% (revnumber, shorthash)
return shorthash, revnumber
return ret, revnumber
#autorejoin channels #autorejoin channels
@hook.event('KICK') @hook.event('KICK')
@ -55,11 +59,10 @@ def onjoin(paraml, conn=None):
# set user-agent # set user-agent
ident, rev = get_version() ident, rev = get_version()
http.ua_skybot = 'Skybot/r%d %s http://github.com/rmmh/skybot' % (rev, ident)
@hook.regex(r'^\x01VERSION\x01$') @hook.regex(r'^\x01VERSION\x01$')
def version(inp, notice=None): def version(inp, notice=None):
ident, rev = get_version() ident, rev = get_version()
notice('\x01VERSION skybot %s r%d - http://github.com/rmmh/' notice('\x01VERSION skybot %s r%d - http://github.com/rmmh/'
'skybot/\x01' % (ident, rev)) 'skybot/\x01' % (ident, rev))
http.ua_skybot = 'Skybot/r%d %s http://github.com/rmmh/skybot' % (rev, ident)

View File

@ -17,6 +17,7 @@ def del_quote(db, chan, nick, add_nick, msg):
chan=? and lower(nick)=lower(?) and msg=msg''') chan=? and lower(nick)=lower(?) and msg=msg''')
db.commit() db.commit()
def get_quotes_by_nick(db, chan, nick): def get_quotes_by_nick(db, chan, nick):
return db.execute("select time, nick, msg from quote where deleted!=1 " return db.execute("select time, nick, msg from quote where deleted!=1 "
"and chan=? and lower(nick)=lower(?) order by time", "and chan=? and lower(nick)=lower(?) order by time",

View File

@ -13,11 +13,10 @@ def sieve_suite(bot, input, func, kind, args):
if input.trigger in bot.config.get('disabled_commands', []): if input.trigger in bot.config.get('disabled_commands', []):
return None return None
ignored = bot.config.get('ignored', []); ignored = bot.config.get('ignored', [])
if input.host in ignored or input.nick in ignored: if input.host in ignored or input.nick in ignored:
return None return None
fn = re.match(r'^plugins.(.+).py$', func._filename) fn = re.match(r'^plugins.(.+).py$', func._filename)
disabled = bot.config.get('disabled_plugins', []) disabled = bot.config.get('disabled_plugins', [])
if fn and fn.group(1).lower() in disabled: if fn and fn.group(1).lower() in disabled:

View File

@ -24,7 +24,7 @@ def snopes(inp):
if status is not None: if status is not None:
status = status.group(0).strip() status = status.group(0).strip()
else: # new-style statuses else: # new-style statuses
status = "Status: %s." % re.search(r"FALSE|TRUE|MIXTURE|UNDETERMINED", status = "Status: %s." % re.search(r"FALSE|TRUE|MIXTURE|UNDETERMINED",
snopes_text).group(0).title() snopes_text).group(0).title()

View File

@ -15,7 +15,8 @@ def stock(inp):
return "error getting stock info" return "error getting stock info"
# Stuff the results in a dict for easy string formatting # Stuff the results in a dict for easy string formatting
results = dict((el.tag, el.attrib['data']) for el in parsed.xpath('//finance/*')) results = dict((el.tag, el.attrib['data'])
for el in parsed.xpath('//finance/*'))
# if we dont get a company name back, the symbol doesn't match a company # if we dont get a company name back, the symbol doesn't match a company
if results['company'] == '': if results['company'] == '':

View File

@ -24,13 +24,14 @@ def get_zipped_xml(*args, **kwargs):
zip_buffer = StringIO(http.get(*args, **kwargs)) zip_buffer = StringIO(http.get(*args, **kwargs))
return etree.parse(ZipFile(zip_buffer, "r").open(path)) return etree.parse(ZipFile(zip_buffer, "r").open(path))
def get_episodes_for_series(seriesname): def get_episodes_for_series(seriesname):
res = {"error":None, "ended":False, "episodes":None, "name":None} res = {"error": None, "ended": False, "episodes": None, "name": None}
# http://thetvdb.com/wiki/index.php/API:GetSeries # http://thetvdb.com/wiki/index.php/API:GetSeries
try: try:
query = http.get_xml(base_url + 'GetSeries.php', seriesname=seriesname) query = http.get_xml(base_url + 'GetSeries.php', seriesname=seriesname)
except URLError: except URLError:
res["error"]="error contacting thetvdb.com" res["error"] = "error contacting thetvdb.com"
return res return res
series_id = query.xpath('//seriesid/text()') series_id = query.xpath('//seriesid/text()')
@ -57,6 +58,7 @@ def get_episodes_for_series(seriesname):
res["name"] = series_name res["name"] = series_name
return res return res
def get_episode_info(episode): def get_episode_info(episode):
first_aired = episode.findtext("FirstAired") first_aired = episode.findtext("FirstAired")
@ -79,10 +81,11 @@ def get_episode_info(episode):
episode_desc += ' - %s' % episode_name episode_desc += ' - %s' % episode_name
return (first_aired, airdate, episode_desc) return (first_aired, airdate, episode_desc)
@hook.command @hook.command
@hook.command('tv') @hook.command('tv')
def tv_next(inp): def tv_next(inp):
".tv_next <series> -- get the next episode of <series> from thetvdb.com" ".tv_next <series> -- get the next episode of <series>"
episodes = get_episodes_for_series(inp) episodes = get_episodes_for_series(inp)
if episodes["error"]: if episodes["error"]:
@ -128,7 +131,7 @@ def tv_next(inp):
@hook.command @hook.command
@hook.command('tv_prev') @hook.command('tv_prev')
def tv_last(inp): def tv_last(inp):
".tv_last <series> -- get the most recently aired episode of <series> from thetvdb.com" ".tv_last <series> -- gets the most recently aired episode of <series>"
episodes = get_episodes_for_series(inp) episodes = get_episodes_for_series(inp)
if episodes["error"]: if episodes["error"]:
@ -158,5 +161,5 @@ def tv_last(inp):
if not prev_ep: if not prev_ep:
return "there are no previously aired episodes for %s" % series_name return "there are no previously aired episodes for %s" % series_name
if ended: if ended:
return '%s has ended. The last episode aired %s' % (series_name, prev_ep) return '%s ended. The last episode aired %s' % (series_name, prev_ep)
return "the last episode of %s aired %s" % (series_name, prev_ep) return "the last episode of %s aired %s" % (series_name, prev_ep)

View File

@ -1,6 +1,5 @@
import re from util import hook, http
from util import http,hook
@hook.regex(r'vimeo.com/([0-9]+)') @hook.regex(r'vimeo.com/([0-9]+)')
def vimeo_url(match): def vimeo_url(match):