diff options
Diffstat (limited to 'cmd/build')
-rw-r--r-- | cmd/build/atom.go | 43 | ||||
-rw-r--r-- | cmd/build/main.go | 294 |
2 files changed, 337 insertions, 0 deletions
diff --git a/cmd/build/atom.go b/cmd/build/atom.go new file mode 100644 index 0000000..9116278 --- /dev/null +++ b/cmd/build/atom.go @@ -0,0 +1,43 @@ +package main + +import ( + "encoding/xml" + "time" + + . "alanpearce.eu/website/internal/config" +) + +func makeTagURI(config Config, specific string) string { + return "tag:" + config.OriginalDomain + "," + config.DomainStartDate + ":" + specific +} + +type Link struct { + XMLName xml.Name `xml:"link"` + Rel string `xml:"rel,attr"` + Type string `xml:"type,attr"` + Href string `xml:"href,attr"` +} + +func makeLink(url string) Link { + return Link{ + Rel: "alternate", + Type: "text/html", + Href: url, + } +} + +type FeedContent struct { + Content string `xml:",innerxml"` + Type string `xml:"type,attr"` +} + +type FeedEntry struct { + XMLName xml.Name `xml:"entry"` + Title string `xml:"title"` + Link Link `xml:"link"` + Id string `xml:"id"` + Updated time.Time `xml:"updated"` + Summary string `xml:"summary,omitempty"` + Author string `xml:"author>name"` + Content FeedContent `xml:"content"` +} diff --git a/cmd/build/main.go b/cmd/build/main.go new file mode 100644 index 0000000..48c9739 --- /dev/null +++ b/cmd/build/main.go @@ -0,0 +1,294 @@ +package main + +import ( + "bytes" + "encoding/xml" + "fmt" + "log" + "os" + "path" + "path/filepath" + "slices" + "strings" + "time" + + . "alanpearce.eu/website/internal/config" + + "github.com/PuerkitoBio/goquery" + "github.com/adrg/frontmatter" + "github.com/antchfx/xmlquery" + mapset "github.com/deckarep/golang-set/v2" + "github.com/yuin/goldmark" + "github.com/yuin/goldmark/extension" +) + +type PostMatter struct { + Date time.Time `toml:"date"` + Description string `toml:"description"` + Title string `toml:"title"` + Taxonomies struct { + Tags []string `toml:"tags"` + } `toml:"taxonomies"` +} + +type Post struct { + Input string + Output string + Basename string + URL string + Content string + PostMatter +} + +type Tags mapset.Set[string] + +func check(err error) { + if err != nil { + log.Panic(err) + } +} + +func getPost(filename string) (PostMatter, []byte) { + matter := PostMatter{} + content, err := os.Open(filename) + check(err) + rest, err := frontmatter.Parse(content, &matter) + check(err) + + return matter, rest +} + +func readPosts(root string, inputDir string, outputDir string) ([]Post, Tags) { + tags := mapset.NewSet[string]() + posts := []Post{} + subdir := filepath.Join(root, inputDir) + files, err := os.ReadDir(subdir) + outputReplacer := strings.NewReplacer(root, outputDir, ".md", "/index.html") + urlReplacer := strings.NewReplacer(root, "", ".md", "/") + check(err) + md := goldmark.New( + goldmark.WithExtensions( + extension.GFM, + extension.Footnote, + extension.Typographer, + ), + ) + for _, f := range files { + pathFromRoot := filepath.Join(subdir, f.Name()) + check(err) + if !f.IsDir() && path.Ext(pathFromRoot) == ".md" { + output := outputReplacer.Replace(pathFromRoot) + url := urlReplacer.Replace(pathFromRoot) + matter, content := getPost(pathFromRoot) + + for _, tag := range matter.Taxonomies.Tags { + tags.Add(strings.ToLower(tag)) + } + + var buf bytes.Buffer + err := md.Convert(content, &buf) + check(err) + post := Post{ + Input: pathFromRoot, + Output: output, + Basename: filepath.Base(url), + URL: url, + PostMatter: matter, + Content: buf.String(), + } + + posts = append(posts, post) + } + } + slices.SortFunc(posts, func(a, b Post) int { + return b.Date.Compare(a.Date) + }) + return posts, tags +} + +func layout(html string, config Config, pageTitle string) *goquery.Document { + css, err := os.ReadFile("templates/style.css") + check(err) + doc, err := goquery.NewDocumentFromReader(strings.NewReader(html)) + check(err) + doc.Find("html").SetAttr("lang", config.DefaultLanguage) + doc.Find("head > link[rel=alternate]").Add(".title").SetAttr("title", config.Title) + doc.Find("title").Add(".p-name").SetText(pageTitle) + doc.Find("head > style").SetText(string(css)) + nav := doc.Find("nav") + navLink := doc.Find("nav a") + nav.Empty() + for _, link := range config.Menus["main"] { + nav.AppendSelection(navLink.Clone().SetAttr("href", link.URL).SetText(link.Name)) + } + return doc +} + +func renderPost(post Post, config Config) string { + tbytes, err := os.ReadFile("templates/post.html") + check(err) + doc := layout(string(tbytes), config, post.PostMatter.Title) + doc.Find(".title").AddClass("h-card p-author").SetAttr("rel", "author") + datetime, err := post.PostMatter.Date.MarshalText() + check(err) + doc.Find(".h-entry .dt-published").SetAttr("datetime", string(datetime)).SetText( + post.PostMatter.Date.Format("2006-01-02"), + ) + doc.Find(".h-entry .e-content").SetHtml(post.Content) + categories := doc.Find(".h-entry .p-categories") + cat := categories.Find(".p-category").ParentsUntilSelection(categories) + cat.Remove() + for _, tag := range post.Taxonomies.Tags { + categories.AppendSelection(cat.Clone().Find(".p-category").SetAttr("href", fmt.Sprintf("/tags/%s/", tag)).SetText("#" + tag)).Parent() + } + html, err := doc.Html() + check(err) + return html +} + +func renderTags(tags Tags, config Config) string { + tbytes, err := os.ReadFile("templates/tags.html") + check(err) + + doc := layout(string(tbytes), config, config.Title) + tagList := doc.Find(".tags") + tpl := doc.Find(".h-feed") + tpl.Remove() + for _, tag := range mapset.Sorted(tags) { + tagList.AppendSelection( + tpl.Clone().SetAttr("href", fmt.Sprintf("/tags/%s/", tag)).SetText("#" + tag), + ) + } + html, err := doc.Html() + check(err) + return html +} + +func renderListPage(tag string, config Config, posts []Post) string { + tbytes, err := os.ReadFile("templates/list.html") + check(err) + var title string + if len(tag) > 0 { + title = tag + } else { + title = config.Title + } + doc := layout(string(tbytes), config, title) + feed := doc.Find(".h-feed") + tpl := feed.Find(".h-entry") + tpl.Remove() + + doc.Find(".title").AddClass("h-card p-author").SetAttr("rel", "author") + if tag == "" { + doc.Find(".filter").Remove() + } else { + doc.Find(".filter").Find("h3").SetText("#" + tag) + } + + for _, post := range posts { + entry := tpl.Clone() + datetime, err := post.PostMatter.Date.MarshalText() + check(err) + + entry.Find(".p-name").SetText(post.Title).SetAttr("href", post.URL) + entry.Find(".dt-published").SetAttr("datetime", string(datetime)).SetText(post.PostMatter.Date.Format("2006-01-02")) + feed.AppendSelection(entry) + } + + html, err := doc.Html() + check(err) + return html +} + +func renderFeed(title string, config Config, posts []Post, specific string) string { + reader, err := os.Open("templates/feed.xml") + check(err) + doc, err := xmlquery.Parse(reader) + feed := doc.SelectElement("feed") + feed.SelectElement("title").FirstChild.Data = title + feed.SelectElement("link").SetAttr("href", config.BaseURL) + feed.SelectElement("id").FirstChild.Data = makeTagURI(config, specific) + datetime, err := posts[0].Date.MarshalText() + feed.SelectElement("updated").FirstChild.Data = string(datetime) + tpl := feed.SelectElement("entry") + xmlquery.RemoveFromTree(tpl) + + for _, post := range posts { + text, err := xml.MarshalIndent(&FeedEntry{ + Title: post.Title, + Link: makeLink(post.URL), + Id: makeTagURI(config, post.Basename), + Updated: post.Date, + Summary: post.Description, + Author: config.Title, + Content: FeedContent{ + Content: post.Content, + Type: "html", + }, + }, " ", " ") + check(err) + entry, err := xmlquery.ParseWithOptions(strings.NewReader(string(text)), xmlquery.ParserOptions{ + Decoder: &xmlquery.DecoderOptions{ + Strict: false, + AutoClose: xml.HTMLAutoClose, + Entity: xml.HTMLEntity, + }, + }) + check(err) + xmlquery.AddChild(feed, entry.SelectElement("entry")) + } + + return doc.OutputXML(true) +} + +func main() { + err, config := GetConfig() + check(err) + err = os.MkdirAll("public/post", 0755) + check(err) + log.Print("Generating site...") + posts, tags := readPosts("content", "post", "public") + for _, post := range posts { + err := os.MkdirAll(path.Join("public", "post", post.Basename), 0755) + check(err) + // output := renderPost(post, config) + // err = os.WriteFile(post.Output, []byte(output), 0755) + // check(err) + } + err = os.MkdirAll("public/tags", 0755) + check(err) + // fmt.Printf("%+v\n", renderTags(tags, config)) + // err = os.WriteFile("public/tags/index.html", []byte(renderTags(tags, config)), 0755) + // check(err) + for _, tag := range tags.ToSlice() { + matchingPosts := []Post{} + for _, post := range posts { + if slices.Contains(post.Taxonomies.Tags, tag) { + matchingPosts = append(matchingPosts, post) + } + } + err := os.MkdirAll(path.Join("public", "tags", tag), 0755) + check(err) + // tagPage := renderListPage(tag, config, matchingPosts) + // fmt.Printf("%+v\n", tagPage) + // err = os.WriteFile(path.Join("public", "tags", tag, "index.html"), []byte(tagPage), 0755) + // check(err) + + // TODO: tag atom + // fmt.Printf("%+v\n", renderFeed(fmt.Sprintf("%s - %s", config.Title, tag), config, matchingPosts, tag)) + + // fmt.Printf("%+v\n", renderListPage("", config, posts)) + + // TODO: atom + // fmt.Printf("%+v\n", renderFeed(config.Title, config, posts, "feed")) + + // TODO: renderFeedStyles + + // TODO: renderHomepage + + // TODO: render404Page + } + + fmt.Printf("%+v\n", tags) + fmt.Println() +} |