diff --git a/docs/display-race.md b/docs/display-race.md deleted file mode 100644 index 58e1f69b4..000000000 --- a/docs/display-race.md +++ /dev/null @@ -1,19 +0,0 @@ -# Display race - -```js echo -async function sleep(ms) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} -``` - -```js echo -const value = (function* () { - yield 2000; - yield 1000; -})(); -``` - -```js echo -await sleep(value); -display(value); -``` diff --git a/docs/number.md b/docs/number.md deleted file mode 100644 index 387fe622a..000000000 --- a/docs/number.md +++ /dev/null @@ -1,34 +0,0 @@ -# Big number test - -This is a test of the `big` and `small` classes. - -3,541 -+18 ↗︎ - -Test +18 ↗︎ test - -
-
-
-

GitHub Stars

- 3,541 - +18 ↗︎ -
-
-

Daily pageviews

- 3,699 - +2.5% ↗︎ -
-
-

Daily npm downloads

- 2,737 - −33.5% ↘︎ -
-
-

Total npm downloads

- 1,169,127 - all-time -
-
- -This has been a test of the `big` and `small` classes. diff --git a/src/config.ts b/src/config.ts index 66c8ce20d..240a957a3 100644 --- a/src/config.ts +++ b/src/config.ts @@ -1,3 +1,4 @@ +import {createHash} from "node:crypto"; import {existsSync, readFileSync} from "node:fs"; import {stat} from "node:fs/promises"; import op from "node:path"; @@ -8,7 +9,7 @@ import type MarkdownIt from "markdown-it"; import {LoaderResolver} from "./dataloader.js"; import {visitMarkdownFiles} from "./files.js"; import {formatIsoDate, formatLocaleDate} from "./format.js"; -import {createMarkdownIt, parseMarkdown} from "./markdown.js"; +import {createMarkdownIt, parseMarkdownMetadata} from "./markdown.js"; import {isAssetPath, parseRelativeUrl, resolvePath} from "./path.js"; import {resolveTheme} from "./theme.js"; @@ -88,18 +89,29 @@ export async function readDefaultConfig(root?: string): Promise { return normalizeConfig(await importConfig(tsPath), root); } +let cachedPages: {key: string; pages: Page[]} | null = null; + function readPages(root: string, md: MarkdownIt): Page[] { - const pages: Page[] = []; + const files: {file: string; source: string}[] = []; + const hash = createHash("sha256"); for (const file of visitMarkdownFiles(root)) { if (file === "index.md" || file === "404.md") continue; const source = readFileSync(join(root, file), "utf8"); - const parsed = parseMarkdown(source, {path: file, md}); + files.push({file, source}); + hash.update(file).update(source); + } + const key = hash.digest("hex"); + if (cachedPages?.key === key) return cachedPages.pages; + const pages: Page[] = []; + for (const {file, source} of files) { + const parsed = parseMarkdownMetadata(source, {path: file, md}); if (parsed?.data?.draft) continue; const name = basename(file, ".md"); const page = {path: join("/", dirname(file), name), name: parsed.title ?? "Untitled"}; if (name === "index") pages.unshift(page); else pages.push(page); } + cachedPages = {key, pages}; return pages; } @@ -109,7 +121,15 @@ export function setCurrentDate(date = new Date()): void { currentDate = date; } +// The config is used as a cache key for other operations; for example the pages +// are used as a cache key for search indexing and the previous & next links in +// the footer. When given the same spec (because import returned the same +// module), we want to return the same Config instance. +const configCache = new WeakMap(); + export function normalizeConfig(spec: any = {}, defaultRoot = "docs"): Config { + const cachedConfig = configCache.get(spec); + if (cachedConfig) return cachedConfig; let { root = defaultRoot, output = "dist", @@ -168,6 +188,7 @@ export function normalizeConfig(spec: any = {}, defaultRoot = "docs"): Config { }; if (pages === undefined) Object.defineProperty(config, "pages", {get: () => readPages(root, md)}); if (sidebar === undefined) Object.defineProperty(config, "sidebar", {get: () => config.pages.length > 0}); + configCache.set(spec, config); return config; } diff --git a/src/files.ts b/src/files.ts index 3298444a9..5a24fea91 100644 --- a/src/files.ts +++ b/src/files.ts @@ -52,6 +52,11 @@ export function* visitMarkdownFiles(root: string): Generator { export function* visitFiles(root: string): Generator { const visited = new Set(); const queue: string[] = [(root = normalize(root))]; + try { + visited.add(statSync(join(root, ".observablehq")).ino); + } catch { + // ignore the .observablehq directory, if it exists + } for (const path of queue) { const status = statSync(path); if (status.isDirectory()) { diff --git a/src/markdown.ts b/src/markdown.ts index b714be4f1..b5a1e1c46 100644 --- a/src/markdown.ts +++ b/src/markdown.ts @@ -343,6 +343,16 @@ export function parseMarkdown(input: string, options: ParseOptions): MarkdownPag }; } +/** Like parseMarkdown, but optimized to return only metadata. */ +export function parseMarkdownMetadata(input: string, options: ParseOptions): Pick { + const {md, path} = options; + const {content, data} = matter(input, {}); + return { + data: isEmpty(data) ? null : data, + title: data.title ?? findTitle(md.parse(content, {code: [], startLine: 0, currentLine: 0, path})) ?? null + }; +} + function getHtml( key: "head" | "header" | "footer", data: Record, diff --git a/src/search.ts b/src/search.ts index 593008830..120daa029 100644 --- a/src/search.ts +++ b/src/search.ts @@ -9,8 +9,8 @@ import {parseMarkdown} from "./markdown.js"; import {faint, strikethrough} from "./tty.js"; // Avoid reindexing too often in preview. -const indexCache = new WeakMap(); -const reindexingDelay = 10 * 60 * 1000; // 10 minutes +const indexCache = new WeakMap(); +const reindexDelay = 10 * 60 * 1000; // 10 minutes export interface SearchIndexEffects { logger: Logger; @@ -19,17 +19,18 @@ export interface SearchIndexEffects { const defaultEffects: SearchIndexEffects = {logger: console}; const indexOptions = { - fields: ["title", "text", "keywords"], + fields: ["title", "text", "keywords"], // fields to return with search results storeFields: ["title"], - processTerm(term) { - return term.match(/\p{N}/gu)?.length > 6 ? null : term.slice(0, 15).toLowerCase(); // fields to return with search results + processTerm(term: string) { + return (term.match(/\p{N}/gu)?.length ?? 0) > 6 ? null : term.slice(0, 15).toLowerCase(); } }; export async function searchIndex(config: Config, effects = defaultEffects): Promise { const {root, pages, search, md} = config; if (!search) return "{}"; - if (indexCache.has(config) && indexCache.get(config).freshUntil > +new Date()) return indexCache.get(config).json; + const cached = indexCache.get(pages); + if (cached && cached.freshUntil > Date.now()) return cached.json; // Get all the listed pages (which are indexed by default) const pagePaths = new Set(["/index"]); @@ -84,7 +85,7 @@ export async function searchIndex(config: Config, effects = defaultEffects): Pro ) ); - indexCache.set(config, {json, freshUntil: +new Date() + reindexingDelay}); + indexCache.set(pages, {json, freshUntil: Date.now() + reindexDelay}); return json; } diff --git a/test/build-test.ts b/test/build-test.ts index 3ad3bf700..796f80ae3 100644 --- a/test/build-test.ts +++ b/test/build-test.ts @@ -42,7 +42,7 @@ describe("build", () => { await rm(actualDir, {recursive: true, force: true}); if (generate) console.warn(`! generating ${expectedDir}`); - const config = Object.assign(await readConfig(undefined, path), {output: outputDir}); + const config = {...(await readConfig(undefined, path)), output: outputDir}; await build({config, addPublic}, new TestEffects(outputDir)); // In the addPublic case, we don’t want to test the contents of the public diff --git a/test/config-test.ts b/test/config-test.ts index 85b0c8313..efc768300 100644 --- a/test/config-test.ts +++ b/test/config-test.ts @@ -4,7 +4,7 @@ import {normalizeConfig as config, mergeToc, readConfig, setCurrentDate} from ". import {LoaderResolver} from "../src/dataloader.js"; describe("readConfig(undefined, root)", () => { - before(() => setCurrentDate(new Date("2024-01-11T01:02:03"))); + before(() => setCurrentDate(new Date("2024-01-10T16:00:00"))); it("imports the config file at the specified root", async () => { const {md, loaders, ...config} = await readConfig(undefined, "test/input/build/config"); assert(md instanceof MarkdownIt); @@ -28,7 +28,7 @@ describe("readConfig(undefined, root)", () => { head: "", header: "", footer: - 'Built with Observable on Jan 11, 2024.', + 'Built with Observable on Jan 10, 2024.', deploy: { workspace: "acme", project: "bi" @@ -54,7 +54,7 @@ describe("readConfig(undefined, root)", () => { head: "", header: "", footer: - 'Built with Observable on Jan 11, 2024.', + 'Built with Observable on Jan 10, 2024.', deploy: null, search: false });