Skip to content

Commit

Permalink
Add permalinks
Browse files Browse the repository at this point in the history
If a page has a permalink value set in frontmatter, its slug is set to that permalink. If a slug ends with a trailing slash, the page is created at /slug/index.html. If not, the page is created at /slug. This allows us to remove a workaround for the 404 page, and enables saving a feed to feed.xml.

The "permalink" name, through inaccurate in this case in my opinion, was picked because it's already used by popular generators such as 11ty and Cobalt.

This commit also processes .xml files as pages, to make sure they're ran through mustache. Previously, xml feeds had to be saved under a .html extname.
  • Loading branch information
robinmetral committed Aug 14, 2024
1 parent b44d4e4 commit 99236fb
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 13 deletions.
27 changes: 17 additions & 10 deletions brut/src/buildPages.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
/** @typedef {import('.').Config} Config */

import fs from "fs-extra";
import { resolve, basename, extname } from "path";
import { resolve, basename, extname, dirname } from "path";
import { cwd } from "process";
import { load } from "js-yaml";
import { unified } from "unified";
Expand Down Expand Up @@ -216,7 +216,7 @@ async function loadPartials(partialsDir) {
}

/**
* Returns a slug from a given file path
* Returns a slug from a given file path. Should always have a trailing slash.
* @param {string} path The full filesystem file path
* @param {string} pagesDir The config pages directory, to strip from slugs
* @returns {string}
Expand All @@ -239,7 +239,7 @@ function getSlug(path, pagesDir) {
} else {
// index: (n/a)
// post: /posts/my-post/
slug = slug.replace(".md", "/").replace(".html", "/");
slug = slug.replace(extname(slug), "/");
}
return slug;
}
Expand All @@ -256,13 +256,18 @@ async function loadPages(pagesDir) {
const pages = /** @type {Page[]} */ ([]);
await Promise.all(
paths.map(async (path) => {
// only process markdown or html pages
if (path.endsWith(".md") || path.endsWith(".html")) {
// only process markdown or html pages... or xml
const extension = extname(path);
if (
extension === ".md" ||
extension === ".html" ||
extension === ".xml" // for feeds
) {
const file = await readFile(path, "utf-8");
const { frontmatter, content } = extractFrontmatter(file);
pages.push({
path,
slug: getSlug(path, pagesDir),
slug: frontmatter.permalink || getSlug(path, pagesDir),
frontmatter,
content,
});
Expand Down Expand Up @@ -359,13 +364,15 @@ export default async function buildPages({
partials,
});
// 2. write to fs
// TEMP: fix 404 pages for Cloudflare Pages, see https://github.com/robinmetral/brut/issues/20
if (page.slug === `/404/`) {
await writeFile(`${outDir}/404.html`, result);
if (!page.slug.endsWith("/")) {
// if there's no trailing slash to the slug, the page shouldn't be saved under `${slug}index.html`
const parentDir = `${outDir}${dirname(page.slug)}`;
await mkdir(parentDir, { recursive: true });
await writeFile(`${outDir}${page.slug}`, result);
} else {
const parentDir = `${outDir}${page.slug}`;
await mkdir(parentDir, { recursive: true });
await writeFile(`${parentDir}/index.html`, result);
await writeFile(`${parentDir}index.html`, result);
}
})
);
Expand Down
3 changes: 3 additions & 0 deletions www/pages/404.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
---
template: default
permalink: /404.html
---

# Not found

[Go back home](/)
1 change: 1 addition & 0 deletions www/pages/feed.html → www/pages/feed.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
base_url: https://brut.pages.dev
author: Robin
lang: en
permalink: /feed.xml
-->
<?xml version="1.0" encoding="utf-8"?>
<feed xmlns="http://www.w3.org/2005/Atom">
Expand Down
5 changes: 2 additions & 3 deletions www/pages/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ <h1>Welcome to Brut</h1>
</li>
</ul>
<p>
It is designed to help build static websites out of a collection of raw js (or
ts), html, and markdown files (plus static assets). Nothing more, nothing
less.
It is designed to help build static websites out of a collection of raw html,
js, and markdown files (plus static assets). Nothing more, nothing less.
</p>

0 comments on commit 99236fb

Please sign in to comment.