robots and sitemap

This commit is contained in:
Cadey Ratio 2019-03-21 10:30:20 -07:00
parent 27f177c94a
commit d9ed20cb2c
4 changed files with 51 additions and 2 deletions

View File

@ -19,6 +19,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
blackfriday "github.com/russross/blackfriday"
"github.com/snabb/sitemap"
"within.website/ln"
)
@ -81,7 +82,8 @@ type Site struct {
rssFeed *feeds.Feed
jsonFeed *jsonfeed.Feed
mux *http.ServeMux
mux *http.ServeMux
sitemap []byte
templates map[string]*template.Template
tlock sync.RWMutex
@ -93,6 +95,8 @@ func (s *Site) ServeHTTP(w http.ResponseWriter, r *http.Request) {
s.mux.ServeHTTP(w, r)
}
var arbDate = time.Date(2019, time.March, 21, 18, 0, 0, 0, time.UTC)
// Build creates a new Site instance or fails.
func Build() (*Site, error) {
type postFM struct {
@ -100,6 +104,31 @@ func Build() (*Site, error) {
Date string
}
smi := sitemap.New()
smi.Add(&sitemap.URL{
Loc: "https://christine.website/resume",
LastMod: &arbDate,
ChangeFreq: sitemap.Monthly,
})
smi.Add(&sitemap.URL{
Loc: "https://christine.website/contact",
LastMod: &arbDate,
ChangeFreq: sitemap.Monthly,
})
smi.Add(&sitemap.URL{
Loc: "https://christine.website/",
LastMod: &arbDate,
ChangeFreq: sitemap.Monthly,
})
smi.Add(&sitemap.URL{
Loc: "https://christine.website/blog",
LastMod: &arbDate,
ChangeFreq: sitemap.Weekly,
})
s := &Site{
rssFeed: &feeds.Feed{
Title: "Christine Dodrill's Blog",
@ -135,7 +164,7 @@ func Build() (*Site, error) {
if info.IsDir() {
return nil
}
fin, err := os.Open(path)
if err != nil {
return err
@ -164,6 +193,12 @@ func Build() (*Site, error) {
}
s.Posts = append(s.Posts, p)
itime, _ := time.Parse("2006-01-02", p.Date)
smi.Add(&sitemap.URL{
Loc: "https://christine.website/" + p.Link,
LastMod: &itime,
ChangeFreq: sitemap.Monthly,
})
return nil
})
@ -224,6 +259,13 @@ func Build() (*Site, error) {
s.mux.HandleFunc("/sw.js", func(w http.ResponseWriter, r *http.Request) {
http.ServeFile(w, r, "./static/js/sw.js")
})
s.mux.HandleFunc("/robots.txt", func(w http.ResponseWriter, r *http.Request) {
http.ServeFile(w, r, "./static/robots.txt")
})
s.mux.Handle("/sitemap.xml", middlewareMetrics("sitemap", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/xml")
smi.WriteTo(w)
})))
return s, nil
}

1
go.mod
View File

@ -12,6 +12,7 @@ require (
github.com/prometheus/procfs v0.0.0-20190319124303-40f3c57fb198 // indirect
github.com/russross/blackfriday v2.0.0+incompatible
github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect
github.com/snabb/sitemap v1.0.0
github.com/stretchr/testify v1.3.0
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 // indirect
gopkg.in/yaml.v2 v2.2.1

4
go.sum
View File

@ -55,6 +55,10 @@ github.com/russross/blackfriday v2.0.0+incompatible/go.mod h1:JO/DiYxRf+HjHt06Oy
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/snabb/diagio v1.0.0 h1:kovhQ1rDXoEbmpf/T5N2sUp2iOdxEg+TcqzbYVHV2V0=
github.com/snabb/diagio v1.0.0/go.mod h1:ZyGaWFhfBVqstGUw6laYetzeTwZ2xxVPqTALx1QQa1w=
github.com/snabb/sitemap v1.0.0 h1:7vJeNPAaaj7fQSRS3WYuJHzUjdnhLdSLLpvVtnhbzC0=
github.com/snabb/sitemap v1.0.0/go.mod h1:Id8uz1+WYdiNmSjEi4BIvL5UwNPYLsTHzRbjmDwNDzA=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=

2
static/robots.txt Normal file
View File

@ -0,0 +1,2 @@
User-Agent: *
Sitemap: https://christine.website/sitemap.xml