2010-02-06 02:54:28 +00:00
|
|
|
import math
|
2010-01-19 03:16:40 +00:00
|
|
|
import re
|
2010-02-06 02:54:28 +00:00
|
|
|
import time
|
2010-01-19 03:16:40 +00:00
|
|
|
|
2010-02-06 02:54:28 +00:00
|
|
|
from util import hook, urlnorm, timesince
|
2010-01-19 03:16:40 +00:00
|
|
|
|
2010-07-14 20:45:26 +00:00
|
|
|
url_re = r'([a-zA-Z]+://|www\.)[^ ]+'
|
2010-01-19 03:16:40 +00:00
|
|
|
|
2010-03-01 02:32:41 +00:00
|
|
|
expiration_period = 60 * 60 * 24 # 1 day
|
2010-01-19 03:16:40 +00:00
|
|
|
|
|
|
|
ignored_urls = [urlnorm.normalize("http://google.com")]
|
|
|
|
|
2010-03-01 02:32:41 +00:00
|
|
|
|
2010-03-13 14:24:19 +00:00
|
|
|
def db_init(db):
|
2010-02-02 04:42:34 +00:00
|
|
|
db.execute("create table if not exists urlhistory"
|
2010-02-01 07:29:50 +00:00
|
|
|
"(chan, url, nick, time)")
|
2010-02-02 04:42:34 +00:00
|
|
|
db.commit()
|
2010-01-19 03:16:40 +00:00
|
|
|
|
2010-03-01 02:32:41 +00:00
|
|
|
|
2010-02-02 04:42:34 +00:00
|
|
|
def insert_history(db, chan, url, nick):
|
2010-01-19 03:16:40 +00:00
|
|
|
now = time.time()
|
2010-02-02 04:42:34 +00:00
|
|
|
db.execute("insert into urlhistory(chan, url, nick, time) "
|
2010-02-01 07:29:50 +00:00
|
|
|
"values(?,?,?,?)", (chan, url, nick, time.time()))
|
2010-02-02 04:42:34 +00:00
|
|
|
db.commit()
|
2010-01-19 03:16:40 +00:00
|
|
|
|
2010-03-01 02:32:41 +00:00
|
|
|
|
2010-03-05 03:04:25 +00:00
|
|
|
def get_history(db, chan, url):
|
2010-03-06 17:19:22 +00:00
|
|
|
db.execute("delete from urlhistory where time < ?",
|
2010-03-06 19:42:59 +00:00
|
|
|
(time.time() - expiration_period,))
|
|
|
|
return db.execute("select nick, time from urlhistory where "
|
|
|
|
"chan=? and url=? order by time desc", (chan, url)).fetchall()
|
2010-03-05 03:04:25 +00:00
|
|
|
|
|
|
|
|
2010-02-06 02:54:28 +00:00
|
|
|
def nicklist(nicks):
|
|
|
|
nicks = sorted(dict(nicks), key=unicode.lower)
|
2010-01-19 03:16:40 +00:00
|
|
|
if len(nicks) <= 2:
|
|
|
|
return ' and '.join(nicks)
|
|
|
|
else:
|
|
|
|
return ', and '.join((', '.join(nicks[:-1]), nicks[-1]))
|
|
|
|
|
2010-03-01 02:32:41 +00:00
|
|
|
|
2010-02-06 02:54:28 +00:00
|
|
|
def format_reply(history):
|
|
|
|
if not history:
|
|
|
|
return
|
|
|
|
|
|
|
|
last_nick, recent_time = history[0]
|
|
|
|
last_time = timesince.timesince(recent_time)
|
|
|
|
|
|
|
|
if len(history) == 1:
|
|
|
|
return "%s linked that %s ago." % (last_nick, last_time)
|
|
|
|
|
|
|
|
hour_span = math.ceil((time.time() - history[-1][1]) / 3600)
|
|
|
|
hour_span = '%.0f hours' % hour_span if hour_span > 1 else 'hour'
|
|
|
|
|
|
|
|
hlen = len(history)
|
|
|
|
ordinal = ["once", "twice", "%d times" % hlen][min(hlen, 3) - 1]
|
|
|
|
|
|
|
|
if len(dict(history)) == 1:
|
|
|
|
last = "last linked %s ago" % last_time
|
|
|
|
else:
|
|
|
|
last = "last linked by %s %s ago" % (last_nick, last_time)
|
2010-03-01 02:32:41 +00:00
|
|
|
|
|
|
|
return "that url has been posted %s in the past %s by %s (%s)." % (ordinal,
|
2010-02-06 02:54:28 +00:00
|
|
|
hour_span, nicklist(history), last)
|
2010-03-01 02:32:41 +00:00
|
|
|
|
|
|
|
|
2010-03-12 00:19:36 +00:00
|
|
|
@hook.regex(url_re)
|
2010-03-13 14:24:19 +00:00
|
|
|
def urlinput(match, nick='', chan='', db=None, bot=None):
|
|
|
|
db_init(db)
|
2010-03-12 00:19:36 +00:00
|
|
|
url = urlnorm.normalize(match.group().encode('utf-8'))
|
2010-02-06 02:54:28 +00:00
|
|
|
if url not in ignored_urls:
|
2010-04-23 03:47:41 +00:00
|
|
|
url = url.decode('utf-8')
|
2010-02-06 02:54:28 +00:00
|
|
|
history = get_history(db, chan, url)
|
|
|
|
insert_history(db, chan, url, nick)
|
2010-03-06 19:42:59 +00:00
|
|
|
if nick not in dict(history):
|
2010-02-06 02:54:28 +00:00
|
|
|
return format_reply(history)
|