vyvanse/internal/dao/logs.go

82 lines
2.0 KiB
Go

package dao
import (
"context"
"git.xeserv.us/xena/gorqlite"
"github.com/Xe/ln"
"github.com/bwmarrin/discordgo"
opentracing "github.com/opentracing/opentracing-go"
splog "github.com/opentracing/opentracing-go/log"
"google.golang.org/api/support/bundler"
)
type Logs struct {
conn gorqlite.Connection
bdl *bundler.Bundler
}
func NewLogs(conn gorqlite.Connection) *Logs {
l := &Logs{conn: conn}
l.bdl = bundler.NewBundler("string", l.writeLines)
return l
}
func (l *Logs) writeLines(sqlLines interface{}) {
sp, ctx := opentracing.StartSpanFromContext(context.Background(), "logs.write.lines")
defer sp.Finish()
_, err := l.conn.Write(sqlLines.([]string))
if err != nil {
ln.Error(ctx, err, ln.F{"action": "write lines that are batched"})
}
}
func (l *Logs) Migrate(ctx context.Context) error {
sp, ctx := opentracing.StartSpanFromContext(ctx, "logs.migrate")
defer sp.Finish()
migrationDDL := []string{
`CREATE TABLE IF NOT EXISTS logs(id INTEGER PRIMARY KEY, discord_id TEXT UNIQUE, channel_id TEXT, content TEXT, timestamp INTEGER, mention_everyone INTEGER, author_id TEXT, author_username TEXT)`,
}
res, err := l.conn.Write(migrationDDL)
if err != nil {
sp.LogFields(splog.Error(err))
}
for i, re := range res {
if re.Err != nil {
sp.LogFields(splog.Error(re.Err))
return re.Err
}
sp.LogFields(splog.Int("migration.step", i), splog.Float64("timing", re.Timing), splog.Int64("rows.affected", re.RowsAffected))
}
return nil
}
func (l *Logs) Add(ctx context.Context, m *discordgo.Message) error {
sp, ctx := opentracing.StartSpanFromContext(ctx, "logs.add")
defer sp.Finish()
stmt := gorqlite.NewPreparedStatement(`INSERT INTO logs (discord_id, channel_id, content, timestamp, mention_everyone, author_id, author_username) VALUES (%s, %s, %s, %d, %d, %s, %s)`)
ts, err := m.Timestamp.Parse()
if err != nil {
return err
}
var me int
if m.MentionEveryone {
me = 1
}
bd := stmt.Bind(m.ID, m.ChannelID, m.Content, ts.Unix(), me, m.Author.ID, m.Author.Username)
l.bdl.Add(bd, len(bd))
return nil
}