diff options
author | Alan Pearce | 2024-04-17 20:28:04 +0200 |
---|---|---|
committer | Alan Pearce | 2024-04-17 20:53:45 +0200 |
commit | 30fed9da228ef9bab5734e000e598ff380cb55f5 (patch) | |
tree | cfc89a40a12dcc572c0a457b62bdde129690e482 /src | |
parent | 1e05c18ed8e44078e3e102ff5bb8af6e25717fc9 (diff) | |
download | website-30fed9da228ef9bab5734e000e598ff380cb55f5.tar.lz website-30fed9da228ef9bab5734e000e598ff380cb55f5.tar.zst website-30fed9da228ef9bab5734e000e598ff380cb55f5.zip |
bye bye bun
Diffstat (limited to 'src')
-rw-r--r-- | src/app.ts | 272 | ||||
-rw-r--r-- | src/config.ts | 8 | ||||
-rw-r--r-- | src/index.ts | 36 | ||||
-rw-r--r-- | src/posts.ts | 61 | ||||
-rw-r--r-- | src/templates.ts | 290 |
5 files changed, 0 insertions, 667 deletions
diff --git a/src/app.ts b/src/app.ts deleted file mode 100644 index a51e235..0000000 --- a/src/app.ts +++ /dev/null @@ -1,272 +0,0 @@ -import path from "node:path"; -import fs from "node:fs/promises"; -import type { Stats } from "node:fs"; -import type { BunFile, Serve } from "bun"; -import * as Sentry from "@sentry/node"; -import prom from "bun-prometheus-client"; -import log from "loglevel"; - -import config from "./config"; - -log.setLevel((import.meta.env["LOG_LEVEL"] || "info") as log.LogLevelDesc); - -Sentry.init({ - release: `homestead@${import.meta.env["FLY_MACHINE_VERSION"]}`, - tracesSampleRate: 1.0, -}); - -const expectedHostURL = new URL( - import.meta.env.NODE_ENV === "production" - ? config.base_url - : "http://localhost:3000", -); -const defaultHeaders = { - ...config.extra.headers, - vary: "Accept-Encoding", -}; - -type File = { - filename: string; - handle: BunFile; - relPath: string; - type: string; - size: number; - mtime: Date; - etag: string; -}; - -const metrics = { - requests: new prom.Counter({ - name: "homestead_requests", - help: "Number of requests by path, status code, and method", - labelNames: ["status_code", "content_encoding", "cache_basis"] as const, - }), - requestDuration: new prom.Histogram({ - name: "homestead_request_duration_seconds", - help: "Request duration in seconds", - labelNames: ["path"] as const, - }), -}; - -let files = new Map<string, File>(); - -async function hashFile(file: BunFile): Promise<string> { - return new Bun.CryptoHasher("sha256") - .update(await file.arrayBuffer()) - .digest("base64"); -} - -async function registerFile( - path: string, - pathname: string, - filename: string, - stat: Stats, -): Promise<void> { - pathname = "/" + (pathname === "." || pathname === "./" ? "" : pathname); - - if (files.get(pathname) !== undefined) { - log.warn("File already registered:", pathname); - } - const handle = Bun.file(filename); - - files.set(pathname, { - filename, - relPath: "/" + path, - handle: handle, - type: pathname.startsWith("/feed-styles.xsl") ? "text/xsl" : handle.type, - size: stat.size, - mtime: stat.mtime, - etag: `W/"${await hashFile(handle)}"`, - }); -} - -async function walkDirectory(root: string) { - for (let relPath of await fs.readdir(root, { recursive: true })) { - const absPath = path.join(root, relPath); - const stat = await fs.stat(absPath); - if (stat.isFile()) { - if (relPath.includes("index.html")) { - const dir = relPath.replace("index.html", ""); - await registerFile(relPath, dir, absPath, stat); - } else { - await registerFile(relPath, relPath, absPath, stat); - } - } - } -} - -await walkDirectory("public/"); - -async function serveFile( - file: File, - statusCode: number = 200, - extraHeaders: Record<string, string> = {}, -): Promise<Response> { - return new Response(await file.handle.arrayBuffer(), { - headers: { - "last-modified": file.mtime.toUTCString(), - ...extraHeaders, - ...defaultHeaders, - }, - status: statusCode, - }); -} - -function parseIfModifiedSinceHeader(header: string | null): number { - return header ? new Date(header).getTime() + 999 : 0; -} - -export const metricsServer = { - port: 9091, - fetch: async function (request) { - const pathname = new URL(request.url).pathname; - switch (pathname) { - case "/metrics": - return new Response(await prom.register.metrics()); - default: - return new Response("", { status: 404 }); - } - }, -} satisfies Serve; - -export const server = { - fetch: async function (request) { - const url = new URL(request.url); - const pathname = url.pathname.replace(/\/\/+/g, "/"); - const hostname = request.headers.get("host")?.toLowerCase() || "unknown"; - const endTimer = metrics.requestDuration.startTimer({ path: pathname }); - let status; - let newpath; - try { - if (pathname === "/health") { - return new Response("OK", { status: (status = 200) }); - } else if ( - config.redirect_other_hostnames && - hostname !== expectedHostURL.host - ) { - metrics.requests.inc({ - content_encoding: "identity", - status_code: (status = 301), - }); - return new Response("", { - status, - headers: { - location: new URL(pathname, expectedHostURL).toString(), - }, - }); - } - const { base, ext } = path.parse(pathname); - const file = files.get(pathname); - let contentEncoding = "identity"; - let suffix = ""; - if ( - ![".br", ".zst", ".gz"].includes(ext || base) && - !pathname.startsWith("/404.html") && - file && - (await file.handle.exists()) - ) { - let etagMatch = request.headers.get("if-none-match") === file.etag; - let mtimeMatch = - parseIfModifiedSinceHeader( - request.headers.get("if-modified-since"), - ) >= file?.mtime.getTime(); - if (etagMatch || mtimeMatch) { - metrics.requests.inc({ - content_encoding: contentEncoding, - status_code: (status = 304), - cache_basis: etagMatch ? "etag" : "mtime", - }); - return new Response("", { status: status, headers: defaultHeaders }); - } - const encodings = (request.headers.get("accept-encoding") || "") - .split(",") - .map((x) => x.trim().toLowerCase()); - if (encodings.includes("br") && files.has(pathname + ".br")) { - contentEncoding = "br"; - suffix = ".br"; - } else if (encodings.includes("zstd") && files.has(pathname + ".zst")) { - contentEncoding = "zstd"; - suffix = ".zst"; - } else if (encodings.includes("gzip") && files.has(pathname + ".gz")) { - contentEncoding = "gzip"; - suffix = ".gz"; - } - - status = 200; - metrics.requests.inc({ - status_code: status, - content_encoding: contentEncoding, - }); - const endFile = files.get(pathname + suffix); - if (!endFile) { - throw new Error(`File ${pathname} not found`); - } - return serveFile(endFile, status, { - "content-encoding": contentEncoding, - "content-type": file.type, - // weak etags can be used for multiple equivalent representations - etag: file.etag, - }); - } else { - if (files.has(pathname + "/")) { - newpath = pathname + "/"; - metrics.requests.inc({ - content_encoding: contentEncoding, - status_code: (status = 302), - }); - return new Response("", { - status: status, - headers: { location: newpath }, - }); - } else if ( - pathname.endsWith("index.html") && - files.has(pathname.replace(/index.html$/, "")) - ) { - newpath = pathname.replace(/index.html$/, ""); - metrics.requests.inc({ - content_encoding: contentEncoding, - status_code: (status = 302), - }); - return new Response("", { - status: status, - headers: { location: newpath }, - }); - } - status = 404; - const notfound = files.get("/404.html"); - if (!request.headers.get("accept")?.split(",").includes("text/html")) { - return new Response("404 Not Found", { - status, - headers: defaultHeaders, - }); - } - if (notfound) { - return serveFile(notfound, status, { - "content-type": "text/html; charset=utf-8", - }); - } else { - log.warn("404.html not found"); - return new Response("404 Not Found", { - status: status, - headers: { "content-type": "text/plain", ...defaultHeaders }, - }); - } - } - } catch (error) { - metrics.requests.inc({ - status_code: status, - content_encoding: "identity", - }); - Sentry.captureException(error); - log.error("Error", error); - return new Response("Something went wrong", { status: status }); - } finally { - if (status === 200) { - const seconds = endTimer(); - metrics.requestDuration.observe(seconds); - } - } - }, -} satisfies Serve; - -export default server; diff --git a/src/config.ts b/src/config.ts deleted file mode 100644 index 428d63c..0000000 --- a/src/config.ts +++ /dev/null @@ -1,8 +0,0 @@ -import fs from "node:fs"; -import toml from "toml"; - -const config = toml.parse(fs.readFileSync("config.toml", "utf-8")); -if (import.meta.env.NODE_ENV === "development") { - config.base_url = "http://localhost:3000"; -} -export default config; diff --git a/src/index.ts b/src/index.ts deleted file mode 100644 index 30a6f34..0000000 --- a/src/index.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { performance } from "node:perf_hooks"; - -performance.mark("startup-begin"); - -import log from "loglevel"; - -import { server, metricsServer } from "./app"; - -log.setLevel((import.meta.env["LOG_LEVEL"] || "info") as log.LogLevelDesc); - -const metricsServed = Bun.serve(metricsServer); -log.info(`Metrics server started on port ${metricsServed.port}`); - -const served = Bun.serve(server); -log.info(`Serving website on ${served.url}`); - -process.on("SIGTERM", function () { - log.info("SIGTERM received, shutting down..."); - metricsServed.stop(); - served.stop(); - process.exit(0); -}); - -process.on("SIGINT", function () { - log.info("SIGINT received, shutting down..."); - metricsServed.stop(); - served.stop(); - process.exit(0); -}); - -performance.mark("startup-end"); - -performance.measure("startup", "startup-begin", "startup-end"); -for (const entry of performance.getEntriesByName("startup")) { - log.info(`Startup took ${entry.duration} ms`); -} diff --git a/src/posts.ts b/src/posts.ts deleted file mode 100644 index 2b678b9..0000000 --- a/src/posts.ts +++ /dev/null @@ -1,61 +0,0 @@ -import path from "node:path"; -import fs from "node:fs/promises"; - -import { matter } from "toml-matter"; - -type MatterFile = ReturnType<typeof matter>; - -export type Post = { - input: string; - output: string; - basename: string; - url: string; - title: string; - date: Date; - description: string | undefined; - taxonomies: Record<string, string[]>; -}; - -export async function getPost(filename: string): Promise<MatterFile> { - return matter(await fs.readFile(filename, "utf8")); -} - -export async function readPosts( - root: string, - inputDir: string, - outputDir: string, -): Promise<{ posts: Array<Post>; tags: Set<string> }> { - let tags = new Set<string>(); - let posts = new Array<Post>(); - const subdir = path.join(root, inputDir); - for (let pathname of await fs.readdir(subdir)) { - const pathFromRoot = path.join(subdir, pathname); - const stat = await fs.stat(pathFromRoot); - if (stat.isFile() && path.extname(pathname) === ".md") { - if (pathname !== "_index.md") { - const input = pathFromRoot; - const output = pathFromRoot - .replace(root, outputDir) - .replace(".md", "/index.html"); - const url = pathFromRoot.replace(root, "").replace(".md", "/"); - - const file = await getPost(input); - - (file.data["taxonomies"] as any)?.tags?.map((t: string) => - tags.add(t.toLowerCase()), - ); - posts.push({ - input, - output, - basename: path.basename(pathname, ".md"), - url, - ...file.data, - } as Post); - } - } - } - return { - posts: posts.sort((a, b) => b.date.getTime() - a.date.getTime()), - tags, - }; -} diff --git a/src/templates.ts b/src/templates.ts deleted file mode 100644 index 8099065..0000000 --- a/src/templates.ts +++ /dev/null @@ -1,290 +0,0 @@ -import * as fs from "node:fs/promises"; -import * as cheerio from "cheerio"; -import { matter } from "toml-matter"; -import { Marked } from "marked"; -import { PurgeCSS } from "purgecss"; -import log from "loglevel"; - -import config from "./config"; -import { getPost, readPosts, type Post } from "./posts"; - -const css = await Bun.file("templates/style.css").text(); - -const marked = new Marked(); -marked.use({ - gfm: true, -}); - -const purgeCSS = new PurgeCSS(); - -function addMenu( - parent: cheerio.Cheerio<cheerio.AnyNode>, - child: cheerio.Cheerio<cheerio.AnyNode>, -) { - parent.empty(); - for (const link of config.menus.main) { - parent.append(child.clone().attr("href", link.url).text(link.name)); - } -} - -export async function layout( - html: string, - pageTitle: string, -): Promise<cheerio.CheerioAPI> { - const ccss = ( - await purgeCSS.purge({ - content: [ - { - raw: html, - extension: ".html", - }, - ], - css: [{ raw: css }], - }) - )[0].css; - const $ = cheerio.load(html); - $("html").attr("lang", config.default_language); - $("head > link[rel=alternate]").attr("title", config.title); - addMenu($("nav"), $("nav a")); - $(".title").text(config.title); - $("title").text(pageTitle); - $(".p-name").text(pageTitle); - $("head").children("style").text(ccss); - return $; -} - -async function render404Page(): Promise<string> { - const $ = await layout( - await fs.readFile("templates/404.html", "utf-8"), - "404 Not Found", - ); - return $.html(); -} - -async function renderHomepage(posts: Array<Post>): Promise<string> { - const file = matter(await fs.readFile("content/_index.md", "utf-8")); - const $ = await layout( - await fs.readFile("templates/homepage.html", "utf-8"), - config.title, - ); - - $("body").addClass("h-card"); - $(".title").addClass("p-name").addClass("u-url"); - $("#content").html(await marked.parse(file.content)); - const $feed = $(".h-feed"); - const $entry = $(".h-entry").remove(); - - for (const post of posts) { - const $post = $entry.clone(); - $post.find(".p-name").text(post.title); - $post.find(".u-url").attr("href", post.url); - $post - .find(".dt-published") - .attr("datetime", post.date.toISOString().replace(/\.\d{3}/, "")) - .text(post.date.toISOString().slice(0, 10)); - $post.appendTo($feed); - } - - $(".u-email").attr("href", `mailto:${config.email}`).text(config.email); - const $elsewhere = $(".elsewhere"); - const $linkRelMe = $elsewhere.find(".u-url[rel=me]").parentsUntil("ul"); - $linkRelMe.remove(); - for (const link of config.menus.me) { - const $link = $linkRelMe.clone(); - $link.find("a").attr("href", link.url).text(link.name); - $link.appendTo($elsewhere); - } - return $.html(); -} - -async function renderPost(file: Post, content: string) { - const $ = await layout( - await fs.readFile("templates/post.html", "utf-8"), - file.title, - ); - - $(".title").addClass("h-card p-author").attr("rel", "author"); - $(".h-entry .dt-published") - .attr("datetime", file.date.toISOString().replace(/\.\d{3}/, "")) - .text(file.date.toISOString().slice(0, 10)); - $(".h-entry .e-content").html(content); - const categories = $(".h-entry .p-categories"); - const cat = categories.find(".p-category").parentsUntil(categories); - cat.remove(); - for (const tag of file.taxonomies["tags"].sort()) { - categories.append( - cat - .clone() - .find(".p-category") - .attr("href", `/tags/${tag}/`) - .text(`#${tag}`) - .parent(), - ); - } - - return $.html(); -} - -async function renderListPage(tag: string, posts: Post[]) { - const $ = await layout( - await fs.readFile("templates/list.html", "utf-8"), - tag || config.title, - ); - const $feed = $(".h-feed"); - const $tpl = $(".h-entry").remove(); - $(".title").addClass("p-author h-card").attr("rel", "author"); - if (tag === "") { - $(".filter").remove(); - } else { - $(".filter").find("h3").text(`#${tag}`); - } - - for (const post of posts) { - const $post = $tpl.clone(); - $post.find(".p-name").text(post.title); - $post.find(".u-url").attr("href", post.url); - $post - .find(".dt-published") - .attr("datetime", post.date.toISOString().replace(/\.\d{3}/, "")) - .text(post.date.toISOString().slice(0, 10)); - $post.appendTo($feed); - } - return $.html(); -} - -async function renderTags(tags: string[]) { - const $ = await layout( - await fs.readFile("templates/tags.html", "utf-8"), - config.title, - ); - const $tags = $(".tags"); - const $tpl = $(".h-feed"); - $tpl.remove(); - for (const tag of tags) { - const $tag = $tpl.clone(); - $tag.find("a").attr("href", `/tags/${tag}/`).text(`#${tag}`); - $tag.appendTo($tags); - } - return $.html(); -} - -const makeTagURI = (specific: string) => - `tag:${config.original_domain},${config.domain_start_date}:${specific}`; - -async function renderFeed(title: string, posts: Post[], tag?: string) { - const $ = cheerio.load(await fs.readFile("templates/feed.xml", "utf-8"), { - xml: true, - }); - const $feed = $("feed"); - $feed.children("title").text(title); - $feed.children("link").attr("href", config.base_url); - $feed.children("id").text(makeTagURI(tag || "feed")); - $feed - .children("updated") - .text(posts[0].date.toISOString().replace(/\.\d{3}/, "")); - - const $tpl = $("feed > entry").remove(); - for (const post of posts) { - const $post = $tpl.clone(); - $post.children("title").text(post.title); - $post - .children("link") - .attr("href", new URL(post.url, config.base_url).href); - $post.children("id").text(makeTagURI(post.basename)); - $post - .children("updated") - .text(post.date.toISOString().replace(/\.\d{3}/, "")); - $post.find("author > name").text(config.title); - $post.children("summary").text(post.description || ""); - const content = marked.parse((await getPost(post.input)).content); - $post.children("content").html(await content); - $post.appendTo($feed); - } - - return $.xml(); -} - -async function renderFeedStyles() { - const $ = cheerio.load( - await fs.readFile("templates/feed-styles.xsl", "utf-8"), - { - xml: true, - }, - ); - $("style").text(css); - return $.xml(); -} - -export default async function generateSite() { - const tasks = []; - const { posts, tags } = await readPosts("content", "post", "public"); - await fs.mkdir("public/post", { recursive: true }); - for (const post of posts) { - const content = await marked.parse((await getPost(post.input)).content); - await fs.mkdir(`public/post/${post.basename}`, { recursive: true }); - tasks.push(async () => { - log.debug(`Rendering post ${post.basename} to ${post.output}`); - return fs.writeFile(post.output, await renderPost(post, content)); - }); - } - await fs.mkdir("public/tags", { recursive: true }); - tasks.push(async () => { - log.debug("Rendering tags page to public/tags/index.html"); - return fs.writeFile( - "public/tags/index.html", - await renderTags([...tags].sort()), - ); - }); - for (const tag of tags) { - log.debug(`Processing tag ${tag}`); - const matchingPosts = posts.filter((p) => - p.taxonomies["tags"].includes(tag), - ); - await fs.mkdir(`public/tags/${tag}`, { recursive: true }); - tasks.push(async () => { - log.debug(`Rendering tag ${tag} to public/tags/${tag}/index.html`); - return fs.writeFile( - `public/tags/${tag}/index.html`, - await renderListPage(tag, matchingPosts), - ); - }); - - tasks.push(async () => { - log.debug(`Rendering tag ${tag} feed to public/tags/${tag}/atom.xml`); - return fs.writeFile( - `public/tags/${tag}/atom.xml`, - await renderFeed(`${config.title} - ${tag}`, matchingPosts, tag), - ); - }); - } - tasks.push(async () => { - log.debug("Rendering posts page to public/post/index.html"); - return fs.writeFile( - "public/post/index.html", - await renderListPage("", posts), - ); - }); - tasks.push(async () => { - log.debug("Rendering site feed to public/atom.xml"); - return fs.writeFile( - "public/atom.xml", - await renderFeed(config.title, posts), - ); - }); - tasks.push(async () => { - log.debug("Rendering feed styles to public/feed-styles.xsl"); - return fs.writeFile("public/feed-styles.xsl", await renderFeedStyles()); - }); - tasks.push(async () => { - log.debug("Rendering homepage to public/index.html"); - return fs.writeFile( - "public/index.html", - await renderHomepage(posts.slice(0, 3)), - ); - }); - tasks.push(async () => { - log.debug("Rendering 404 page to public/404.html"); - return fs.writeFile("public/404.html", await render404Page()); - }); - return Promise.all(tasks.map((f) => f())); -} |