flake8 + autopep8 (whitespace fixes)

This commit is contained in:
Ryan Hitchman 2014-01-14 13:12:37 -08:00
parent c065aaa59a
commit cb8c437772
38 changed files with 212 additions and 189 deletions

View File

@ -27,6 +27,7 @@ def censor(text):
class crlf_tcp(object):
"Handles tcp connections that consist of utf-8 lines ending with crlf"
def __init__(self, host, port, timeout=300):
@ -94,7 +95,9 @@ class crlf_tcp(object):
class crlf_ssl_tcp(crlf_tcp):
"Handles ssl tcp connetions that consist of utf-8 lines ending with crlf"
def __init__(self, host, port, ignore_cert_errors, timeout=300):
self.ignore_cert_errors = ignore_cert_errors
crlf_tcp.__init__(self, host, port, timeout)
@ -123,8 +126,10 @@ irc_param_ref = re.compile(r'(?:^|(?<= ))(:.*|[^ ]+)').findall
class IRC(object):
"handles the IRC protocol"
#see the docs/ folder for more information on the protocol
# see the docs/ folder for more information on the protocol
def __init__(self, server, nick, port=6667, channels=[], conf={}):
self.channels = channels
self.conf = conf
@ -200,6 +205,7 @@ class IRC(object):
class FakeIRC(IRC):
def __init__(self, server, nick, port=6667, channels=[], conf={}, fn=""):
self.channels = channels
self.conf = conf
@ -242,6 +248,7 @@ class FakeIRC(IRC):
class SSLIRC(IRC):
def __init__(self, server, nick, port=6667, channels=[], conf={},
ignore_certificate_errors=True):
self.ignore_cert_errors = ignore_certificate_errors

View File

@ -6,6 +6,7 @@ thread.stack_size(1024 * 512) # reduce vm size
class Input(dict):
def __init__(self, conn, raw, prefix, command, params,
nick, user, host, paraml, msg):
@ -80,7 +81,9 @@ def do_sieve(sieve, bot, input, func, type, args):
class Handler(object):
'''Runs plugins in their own threads (ensures order)'''
def __init__(self, func):
self.func = func
self.input_queue = Queue.Queue()

View File

@ -23,13 +23,13 @@ def nrolls(count, n):
if count < 5000:
return [random.randint(0, 1) for x in xrange(count)]
else: # fake it
return [int(random.normalvariate(.5*count, (.75*count)**.5))]
return [int(random.normalvariate(.5 * count, (.75 * count) ** .5))]
else:
if count < 5000:
return [random.randint(1, n) for x in xrange(count)]
else: # fake it
return [int(random.normalvariate(.5*(1+n)*count,
(((n+1)*(2*n+1)/6.-(.5*(1+n))**2)*count)**.5))]
return [int(random.normalvariate(.5 * (1 + n) * count,
(((n + 1) * (2 * n + 1) / 6. - (.5 * (1 + n)) ** 2) * count) ** .5))]
@hook.command('roll')

View File

@ -15,7 +15,7 @@ def urban(inp):
if page['result_type'] == 'no_results':
return 'not found.'
out = defs[0]['word'] + ': ' + defs[0]['definition'].replace('\r\n',' ')
out = defs[0]['word'] + ': ' + defs[0]['definition'].replace('\r\n', ' ')
if len(out) > 400:
out = out[:out.rfind(' ', 0, 400)] + '...'

View File

@ -7,6 +7,7 @@ from util import hook, http
api_url = "http://ws.audioscrobbler.com/2.0/?format=json"
@hook.api_key('lastfm')
@hook.command(autohelp=False)
def lastfm(inp, nick='', say=None, api_key=None):

View File

@ -23,7 +23,7 @@ formats = {'PRIVMSG': '<%(nick)s> %(msg)s',
'QUIT': '-!- %(nick)s has quit [%(msg)s]',
'PING': '',
'NOTICE': ''
}
}
ctcp_formats = {'ACTION': '* %(nick)s %(ctcpmsg)s'}

View File

@ -1,4 +1,3 @@
import re
import socket
import subprocess
import time
@ -23,7 +22,7 @@ def get_version():
return shorthash, revnumber
#autorejoin channels
# autorejoin channels
@hook.event('KICK')
def rejoin(paraml, conn=None):
if paraml[1] == conn.nick:
@ -31,7 +30,7 @@ def rejoin(paraml, conn=None):
conn.join(paraml[0])
#join channels when invited
# join channels when invited
@hook.event('INVITE')
def invite(paraml, conn=None):
conn.join(paraml[-1])

View File

@ -61,9 +61,7 @@ def quote(inp, nick='', chan='', db=None):
elif retrieve:
select, num = retrieve.groups()
by_chan = False
if select.startswith('#'):
by_chan = True
quotes = get_quotes_by_chan(db, select)
else:
quotes = get_quotes_by_nick(db, chan, select)

View File

@ -25,11 +25,13 @@ def rottentomatoes(inp, api_key=None):
if critics_score == -1:
return
reviews = http.get_json(movie_reviews_url % id, apikey=api_key, review_type='all')
reviews = http.get_json(movie_reviews_url %
id, apikey=api_key, review_type='all')
review_count = reviews['total']
fresh = critics_score * review_count / 100
rotten = review_count - fresh
return u"%s - critics: \x02%d%%\x02 (%d\u2191%d\u2193)" \
" audience: \x02%d%%\x02 - %s" % (title, critics_score, fresh, rotten, audience_score, url)
" audience: \x02%d%%\x02 - %s" % (title, critics_score,
fresh, rotten, audience_score, url)

View File

@ -44,7 +44,7 @@ def seen(inp, nick='', chan='', db=None, input=None):
reltime = timesince.timesince(last_seen[1])
if last_seen[0] != inp.lower(): # for glob matching
inp = last_seen[0]
if last_seen[2][0:1]=="\x01":
if last_seen[2][0:1] == "\x01":
return '%s was last seen %s ago: *%s %s*' % \
(inp, reltime, inp, last_seen[2][8:-1])
else:

View File

@ -1,5 +1,3 @@
import random
from util import hook, http

View File

@ -19,7 +19,8 @@ def suggest(inp, inp_unstripped=''):
else:
num = 0
page = http.get('http://google.com/complete/search', output='json', client='hp', q=inp)
page = http.get('http://google.com/complete/search',
output='json', client='hp', q=inp)
page_json = page.split('(', 1)[1][:-1]
suggestions = json.loads(page_json)[1]
if not suggestions:

View File

@ -20,7 +20,9 @@ def munge(inp, munge_count=0):
break
return inp
class PaginatingWinnower(object):
def __init__(self):
self.lock = threading.Lock()
self.last_input = []
@ -48,7 +50,8 @@ class PaginatingWinnower(object):
if inp in inputs:
inputs.remove(inp)
else:
inputs.remove(random.choice([inp for inp in inputs if inp in self.recent]))
inputs.remove(
random.choice([inp for inp in inputs if inp in self.recent]))
else:
if ordered:
inputs.pop()
@ -61,6 +64,7 @@ class PaginatingWinnower(object):
winnow = PaginatingWinnower().winnow
def add_tag(db, chan, nick, subject):
match = db.execute('select * from tag where lower(nick)=lower(?) and'
' chan=? and lower(subject)=lower(?)',
@ -95,7 +99,6 @@ def get_tag_counts_by_chan(db, chan):
tags.sort(key=lambda x: x[1], reverse=True)
if not tags:
return 'no tags in %s' % chan
ret = '%s tags: ' % chan
return winnow(['%s (%d)' % row for row in tags], ordered=True)
@ -151,6 +154,7 @@ def tag(inp, chan='', db=None):
else:
return tag.__doc__
@hook.command
def untag(inp, chan='', db=None):
'.untag <nick> <tag> -- unmarks <nick> as <tag> {related: .tag, .tags, .tagged}'
@ -163,6 +167,7 @@ def untag(inp, chan='', db=None):
else:
return untag.__doc__
@hook.command
def tags(inp, chan='', db=None):
'.tags <nick>/list -- get list of tags for <nick>, or a list of tags {related: .tag, .untag, .tagged}'
@ -182,6 +187,7 @@ def tagged(inp, chan='', db=None):
return get_nicks_by_tagset(db, chan, inp)
def distance(lat1, lon1, lat2, lon2):
deg_to_rad = math.pi / 180
lat1 *= deg_to_rad
@ -190,16 +196,17 @@ def distance(lat1, lon1, lat2, lon2):
lon2 *= deg_to_rad
R = 6371 # km
d = math.acos(math.sin(lat1)*math.sin(lat2) +
math.cos(lat1)*math.cos(lat2) *
math.cos(lon2-lon1)) * R
d = math.acos(math.sin(lat1) * math.sin(lat2) +
math.cos(lat1) * math.cos(lat2) *
math.cos(lon2 - lon1)) * R
return d
@hook.command(autohelp=False)
def near(inp, nick='', chan='', db=None):
try:
loc = db.execute("select lat, lon from location where chan=? and nick=lower(?)", (chan, nick)).fetchone()
loc = db.execute("select lat, lon from location where chan=? and nick=lower(?)",
(chan, nick)).fetchone()
except db.OperationError:
loc = None
@ -213,7 +220,6 @@ def near(inp, nick='', chan='', db=None):
" and nick != lower(?) order by dist limit 20", (lat, lon, chan, nick)).fetchall()
out = '(km) '
last_dist = 10
while nearby and len(out) < 200:
nick, dist = nearby.pop(0)
out += '%s:%.0f ' % (munge(nick, 1), dist)

View File

@ -40,7 +40,8 @@ def unescape(text):
def goog_trans(text, slang, tlang):
url = 'https://www.googleapis.com/language/translate/v2'
parsed = http.get_json(url, key=api_key, q=text, source=slang, target=tlang)
parsed = http.get_json(
url, key=api_key, q=text, source=slang, target=tlang)
if not 200 <= parsed['responseStatus'] < 300:
raise IOError('error with the translation server: %d: %s' % (
parsed['responseStatus'], parsed['responseDetails']))
@ -141,6 +142,7 @@ def babelext(inp, bot=None):
return out
def hasapikey(bot):
api_key = bot.config.get("api_keys", {}).get("googletranslate", None)
return api_key

View File

@ -5,7 +5,6 @@ modified by rmmh 2010, 2013
import datetime
from lxml import etree
from util import hook, http, timesince
@ -31,7 +30,8 @@ def get_episodes_for_series(seriesname):
series_id = series_id[0]
try:
series = http.get_xml(base_url + '%s/series/%s/all/en.xml' % (api_key, series_id))
series = http.get_xml(base_url + '%s/series/%s/all/en.xml' %
(api_key, series_id))
except http.URLError:
res["error"] = "error contacting thetvdb.com"
return res
@ -97,12 +97,13 @@ def tv_next(inp):
(episode_air_date, airdate, episode_desc) = ep_info
if airdate > today:
next_eps = ['%s (%s) (%s)' % (episode_air_date, timesince.timeuntil(datetime.datetime.strptime(episode_air_date, "%Y-%m-%d")), episode_desc)]
next_eps = ['%s (%s) (%s)' % (episode_air_date, timesince.timeuntil(
datetime.datetime.strptime(episode_air_date, "%Y-%m-%d")), episode_desc)]
elif airdate == today:
next_eps = ['Today (%s)' % episode_desc] + next_eps
else:
#we're iterating in reverse order with newest episodes last
#so, as soon as we're past today, break out of loop
# we're iterating in reverse order with newest episodes last
# so, as soon as we're past today, break out of loop
break
if not next_eps:
@ -140,8 +141,8 @@ def tv_last(inp):
(episode_air_date, airdate, episode_desc) = ep_info
if airdate < today:
#iterating in reverse order, so the first episode encountered
#before today was the most recently aired
# iterating in reverse order, so the first episode encountered
# before today was the most recently aired
prev_ep = '%s (%s)' % (episode_air_date, episode_desc)
break

View File

@ -5,6 +5,7 @@ from urllib import quote
from util import hook, http
@hook.api_key('twitter')
@hook.command
def twitter(inp, api_key=None):
@ -50,7 +51,7 @@ def twitter(inp, api_key=None):
500: 'twitter is broken',
502: 'twitter is down ("getting upgraded")',
503: 'twitter is overloaded (lol, RoR)',
410: 'twitter shut off api v1.' }
410: 'twitter shut off api v1.'}
if e.code == 404:
return 'error: invalid ' + ['username', 'tweet id'][getting_id]
if e.code in errors:
@ -75,11 +76,13 @@ def twitter(inp, api_key=None):
screen_name = tweet["user"]["screen_name"]
time = tweet["created_at"]
time = strftime('%Y-%m-%d %H:%M:%S', strptime(time, '%a %b %d %H:%M:%S +0000 %Y'))
time = strftime('%Y-%m-%d %H:%M:%S',
strptime(time, '%a %b %d %H:%M:%S +0000 %Y'))
return "%s \x02%s\x02: %s" % (time, screen_name, text)
@hook.api_key('twitter')
@hook.regex(r'https?://twitter.com/(#!/)?([_0-9a-zA-Z]+)/status/(\d+)')
def show_tweet(match, api_key=None):
return twitter(match.group(3),api_key)
return twitter(match.group(3), api_key)

View File

@ -1,5 +1,4 @@
import math
import re
import time
from util import hook, urlnorm, timesince
@ -17,7 +16,6 @@ def db_init(db):
def insert_history(db, chan, url, nick):
now = time.time()
db.execute("insert into urlhistory(chan, url, nick, time) "
"values(?,?,?,?)", (chan, url, nick, time.time()))
db.commit()

View File

@ -69,14 +69,16 @@ def open(url, query_params=None, user_agent=None, referer=None, post_data=None,
nonce = oauth_nonce()
timestamp = oauth_timestamp()
api_url, req_data = string.split(url, "?")
unsigned_request = oauth_unsigned_request(nonce, timestamp, req_data, oauth_keys['consumer'], oauth_keys['access'])
unsigned_request = oauth_unsigned_request(
nonce, timestamp, req_data, oauth_keys['consumer'], oauth_keys['access'])
signature = oauth_sign_request("GET", api_url, req_data, unsigned_request, oauth_keys['consumer_secret'], oauth_keys['access_secret'])
signature = oauth_sign_request("GET", api_url, req_data, unsigned_request, oauth_keys[
'consumer_secret'], oauth_keys['access_secret'])
header = oauth_build_header(nonce, signature, timestamp, oauth_keys['consumer'], oauth_keys['access'])
header = oauth_build_header(
nonce, signature, timestamp, oauth_keys['consumer'], oauth_keys['access'])
request.add_header('Authorization', header)
if cookies:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(jar))
else:
@ -108,21 +110,24 @@ def to_utf8(s):
def quote_plus(s):
return _quote_plus(to_utf8(s))
def oauth_nonce():
return ''.join([str(random.randint(0, 9)) for i in range(8)])
def oauth_timestamp():
return str(int(time.time()))
def oauth_unsigned_request(nonce, timestamp, req, consumer, token):
d = { 'oauth_consumer_key':consumer,
'oauth_nonce':nonce,
'oauth_signature_method':'HMAC-SHA1',
'oauth_timestamp':timestamp,
'oauth_token':token,
'oauth_version':'1.0' }
k,v = string.split(req, "=")
def oauth_unsigned_request(nonce, timestamp, req, consumer, token):
d = {'oauth_consumer_key': consumer,
'oauth_nonce': nonce,
'oauth_signature_method': 'HMAC-SHA1',
'oauth_timestamp': timestamp,
'oauth_token': token,
'oauth_version': '1.0'}
k, v = string.split(req, "=")
d[k] = v
unsigned_req = ''
@ -134,22 +139,24 @@ def oauth_unsigned_request(nonce, timestamp, req, consumer, token):
return unsigned_req
def oauth_build_header(nonce, signature, timestamp, consumer, token):
d = { 'oauth_consumer_key':consumer,
'oauth_nonce':nonce,
'oauth_signature':signature,
'oauth_signature_method':'HMAC-SHA1',
'oauth_timestamp':timestamp,
'oauth_token':token,
'oauth_version':'1.0' }
header='OAuth '
def oauth_build_header(nonce, signature, timestamp, consumer, token):
d = {'oauth_consumer_key': consumer,
'oauth_nonce': nonce,
'oauth_signature': signature,
'oauth_signature_method': 'HMAC-SHA1',
'oauth_timestamp': timestamp,
'oauth_token': token,
'oauth_version': '1.0'}
header = 'OAuth '
for x in sorted(d, key=lambda key: key):
header += x + '="' + d[x] + '", '
return header[:-1]
def oauth_sign_request(method, url, params, unsigned_request, consumer_secret, token_secret):
key = consumer_secret + "&" + token_secret
@ -161,6 +168,7 @@ def oauth_sign_request(method, url, params, unsigned_request, consumer_secret, t
return signature
def unescape(s):
if not s.strip():
return s

View File

@ -1,11 +1,8 @@
"weather, thanks to wunderground"
import math
from util import hook, http
@hook.api_key('wunderground')
@hook.command(autohelp=False)
def weather(inp, chan='', nick='', reply=None, db=None, api_key=None):
@ -17,7 +14,8 @@ def weather(inp, chan='', nick='', reply=None, db=None, api_key=None):
# this database is used by other plugins interested in user's locations,
# like .near in tag.py
db.execute("create table if not exists location(chan, nick, loc, lat, lon, primary key(chan, nick))")
db.execute(
"create table if not exists location(chan, nick, loc, lat, lon, primary key(chan, nick))")
loc = inp
@ -25,9 +23,9 @@ def weather(inp, chan='', nick='', reply=None, db=None, api_key=None):
if dontsave:
loc = loc[:-9].strip().lower()
if not loc: # blank line
loc = db.execute("select loc from location where chan=? and nick=lower(?)",
loc = db.execute(
"select loc from location where chan=? and nick=lower(?)",
(chan, nick)).fetchone()
if not loc:
try:
@ -105,8 +103,8 @@ def weather(inp, chan='', nick='', reply=None, db=None, api_key=None):
info['humid'] = obs['relative_humidity']
info['wind'] = 'Wind: {mph}mph/{kph}kph' \
.format(mph=obs['wind_mph'], kph=obs['wind_kph'])
reply('{city}: {weather}, {t_f}F/{t_c}C' \
'(H:{h_f}F/{h_c}C L:{l_f}F/{l_c}C)' \
reply('{city}: {weather}, {t_f}F/{t_c}C'
'(H:{h_f}F/{h_c}C L:{l_f}F/{l_c}C)'
', Humidity: {humid}, {wind}'.format(**info))
lat = float(obs['display_location']['latitude'])
@ -116,5 +114,3 @@ def weather(inp, chan='', nick='', reply=None, db=None, api_key=None):
db.execute("insert or replace into location(chan, nick, loc, lat, lon) "
"values (?, ?, ?, ?,?)", (chan, nick.lower(), inp, lat, lon))
db.commit()

View File

@ -1,6 +1,7 @@
from util import hook, http
from random import choice
@hook.api_key('yahoo')
@hook.command
def answer(inp, api_key=None):
@ -25,4 +26,3 @@ def answer(inp, api_key=None):
response = "%s -- %s" % (link, answer)
return " ".join(response.split())