From 7380470cf0ecc82dac1efb00aee409daf6a18a53 Mon Sep 17 00:00:00 2001 From: melonhead Date: Mon, 18 Jan 2010 12:47:55 -0500 Subject: [PATCH] -Add "timeline" URL history plugin -Create persist dir automatically if it does not exist --- bot.py | 2 + plugins/urlhistory.py | 111 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 113 insertions(+) create mode 100644 plugins/urlhistory.py diff --git a/bot.py b/bot.py index 6066049..3654ca6 100755 --- a/bot.py +++ b/bot.py @@ -39,6 +39,8 @@ except Exception, e: sys.exit() bot.persist_dir = os.path.abspath('persist') +if not os.path.exists(bot.persist_dir): + os.mkdir(bot.persist_dir) print 'Running main loop' diff --git a/plugins/urlhistory.py b/plugins/urlhistory.py new file mode 100644 index 0000000..0bd33d2 --- /dev/null +++ b/plugins/urlhistory.py @@ -0,0 +1,111 @@ +import os +import time +from datetime import datetime +import sqlite3 +import pickle +from datetime import timedelta +import re + +from util import hook, timesince + +url_re = re.compile(r'([a-zA-Z]+://|www\.)[^ ]*') + + +dbname = "skybot.db" + +expiration_period = timedelta(days=1) + +#TODO: Generate expiration_period_text from expiration_period +expiration_period_text = "24 hours" + +def adapt_datetime(ts): + return time.mktime(ts.timetuple()) + +sqlite3.register_adapter(datetime, adapt_datetime) + + +def insert_history(conn, url, channel, nick): + cursor = conn.cursor() + now = datetime.now() + cursor.execute("insert into urlhistory(url, nick, chan, time) values(?,?,?,?)", (url, nick, channel, now)) + conn.commit() + +def select_history_for_url_and_channel(conn, url, channel): + cursor = conn.cursor() + results = cursor.execute("select nick, time from urlhistory where url=? and chan=?", (url, channel)).fetchall() + j = 0 + now = datetime.now() + nicks = [] + for i in xrange(len(results)): + reltime = datetime.fromtimestamp(results[j][1]) + if (now - reltime) > expiration_period: + conn.execute("delete from urlhistory where url=? and chan=? and nick=? and time=?", (url, channel, results[j][0], results[j][1])) + results.remove(results[j]) + else: + nicks.append(results[j][0]) + j += 1 + return nicks + +def get_nicklist(nicks): + nicks = list(set(nicks)) + nicks.sort() + l = len(nicks) + if l == 0: + return "" + elif l == 1: + return nicks[0] + elif l == 2: + return nicks[0] + " and " + nicks[1] + else: + result = "" + for i in xrange(l-1): + result += nicks[i] + ", " + result += "and " + nicks[-1] + return result + +def dbconnect(db): + "check to see that our db has the the seen table and return a connection." + conn = sqlite3.connect(db) + + results = conn.execute("select count(*) from sqlite_master where name=?", + ("urlhistory",)).fetchone() + if(results[0] == 0): + conn.execute("create table if not exists urlhistory(url text not null, nick text not null, chan text not null, time datetime not null, primary key(url, nick, chan, time));") + conn.commit() + return conn + +def normalize_url(url): + # TODO: do something so that: + # - http://www.google.com + # - www.google.com + # - http://google.com + # - http://google.com/ + # etc are all considered to be the same URL + return url + +def get_once_twice(count): + if count == 1: + return "once" + elif count == 2: + return "twice" + else: + return str(count) + " times" + +@hook.command(hook=r'(.*)', prefix=False, ignorebots=True) +def urlinput(bot, input): + dbpath = os.path.join(bot.persist_dir, dbname) + m = url_re.search(input.msg) + if m: + # URL detected + url = normalize_url(m.group(0)) + conn = dbconnect(dbpath) + dupes = select_history_for_url_and_channel(conn, url, input.chan) + num_dupes = len(dupes) + if num_dupes > 0 and input.nick not in dupes: + nicks = get_nicklist(dupes) + reply = "That link has been posted " + get_once_twice(num_dupes) + reply += " in the past " + expiration_period_text + " by " + nicks + input.reply(reply) + insert_history(conn, url, input.chan, input.nick) + conn.close() +