diff --git a/.gitignore b/.gitignore
index c82ae3624..303f644da 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,6 +6,6 @@ docs/themes.md
docs/theme/*.md
dist/
node_modules/
-test/output/*-changed.*
-test/output/build/*-changed/
+test/output/**/*-changed.*
+test/output/build/**/*-changed/
yarn-error.log
diff --git a/bin/observable.ts b/bin/observable.ts
index fe12e75bc..202f6ae00 100644
--- a/bin/observable.ts
+++ b/bin/observable.ts
@@ -2,7 +2,6 @@ import {type ParseArgsConfig, parseArgs} from "node:util";
import * as clack from "@clack/prompts";
import {readConfig} from "../src/config.js";
import {CliError} from "../src/error.js";
-import {enableNpmVersionResolution, enableRemoteModulePreload} from "../src/javascript/imports.js";
import {faint, link, red} from "../src/tty.js";
const args = process.argv.slice(2);
@@ -150,8 +149,6 @@ try {
else if (name === "open") values.open = true;
}
const {config, root, host, port, open} = values;
- enableNpmVersionResolution(false);
- enableRemoteModulePreload(false);
await import("../src/preview.js").then(async (preview) =>
preview.preview({
config: await readConfig(config, root),
diff --git a/docs/css/card.md b/docs/css/card.md
index d5abe5885..20b86870e 100644
--- a/docs/css/card.md
+++ b/docs/css/card.md
@@ -10,7 +10,7 @@ The `card` class is used to group and delineate content. The `card` classes appl
```
-
Observable Plot ’s
title and
subtitle options generate
h2
and
h3
elements, respectively, and so will inherit these card styles.
+Observable Plot ’s
title and
subtitle options generate
h2
and
h3
elements, respectively, and so will inherit these card styles.
Cards can be used on their own, but they most often exist in a [grid](./grid). Cards can contain whatever you like, including text, images, charts, tables, inputs, and more.
diff --git a/docs/getting-started.md b/docs/getting-started.md
index d990d47a0..90e63060c 100644
--- a/docs/getting-started.md
+++ b/docs/getting-started.md
@@ -303,7 +303,7 @@ async function json(url) {
const station = await json(`https://api.weather.gov/points/${latitude},${longitude}`);
const forecast = await json(station.properties.forecastHourly);
-process.stdout.write(JSON.stringify(forecast));
+process.stdout.write(JSON.stringify(forecast));
```js
const location = view(Locator([-122.47, 37.8]));
diff --git a/docs/inputs/button.md b/docs/inputs/button.md
index 1a0301db4..6cc07b018 100644
--- a/docs/inputs/button.md
+++ b/docs/inputs/button.md
@@ -2,7 +2,7 @@
API ·
Source · The button input emits an *input* event when you click it. Buttons may be used to trigger the evaluation of cells, say to restart an animation. For example, below is an animation that progressively hides a bar. Clicking the button will restart the animation.
-
+
```js echo
const replay = view(Inputs.button("Replay"));
diff --git a/docs/inputs/search.md b/docs/inputs/search.md
index 186cd7cb0..3f5422ca5 100644
--- a/docs/inputs/search.md
+++ b/docs/inputs/search.md
@@ -4,7 +4,12 @@
By default, the query is split into terms separated by spaces; each term is then prefix-matched against the property values (the fields) of each row in the data. Try searching for “gen” below to find Gentoo penguins.
-```js echo
+```js
+const searchInput = Inputs.search(penguins, {placeholder: "Search penguins…"});
+const search = view(searchInput);
+```
+
+```js run=false
const search = view(Inputs.search(penguins, {placeholder: "Search penguins…"}));
```
@@ -19,11 +24,11 @@ Inputs.table(search)
```
```js
-const input = document.querySelector("input[placeholder='Search penguins…']");
for (const abbr of document.querySelectorAll("abbr")) {
abbr.title = `Search for “${abbr.textContent}”`;
abbr.style.cursor = "default";
abbr.onclick = () => {
+ const input = searchInput.querySelector("input");
input.value = abbr.textContent;
input.dispatchEvent(new Event("input", {bubbles: true}));
};
diff --git a/docs/javascript/bar.js b/docs/javascript/bar.js
new file mode 100644
index 000000000..c399d5689
--- /dev/null
+++ b/docs/javascript/bar.js
@@ -0,0 +1 @@
+export const bar = "bar";
diff --git a/docs/javascript/files.md b/docs/javascript/files.md
index 263eac835..1d14a19c4 100644
--- a/docs/javascript/files.md
+++ b/docs/javascript/files.md
@@ -145,15 +145,15 @@ A common gotcha with JSON is that it has no built-in date type; dates are theref
To display an image, you can use a static image in [Markdown](../markdown) such as ` ` or ``. Likewise, you can use a `video` or `audio` element. Per [file-based routing](../routing#files), static references to these files are automatically detected and therefore these files will be included in the built output.
-
+
-```md
-
+```html run=false
+
```
If you want to manipulate an image in JavaScript, use `file.image`. For example, below we load an image and invert the RGB channel values.
-
+
```js echo
const canvas = document.querySelector("#horse-canvas");
diff --git a/docs/javascript/foo.js b/docs/javascript/foo.js
index 9d7381d29..a93c1b573 100644
--- a/docs/javascript/foo.js
+++ b/docs/javascript/foo.js
@@ -1 +1,2 @@
+export {bar} from "./bar.js"
export const foo = 42;
diff --git a/docs/javascript/reactivity.md b/docs/javascript/reactivity.md
index 4ed39d1ae..e16f33f88 100644
--- a/docs/javascript/reactivity.md
+++ b/docs/javascript/reactivity.md
@@ -38,7 +38,7 @@ const colors = ["#4269d0", "#efb118", "#ff725c", "#6cc5b0"];
const duration = 2000;
```
-
+
```js echo
const canvas = document.querySelector("#canvas");
@@ -60,7 +60,7 @@ invalidation.then(() => cancelAnimationFrame(frame));
The `visibility` function returns a promise that resolves when the code block’s display root is visible. This allows you to defer animation or computation until the content scrolls into view. If you missed the animation, try reloading the page and then scrolling down.
-
+
```js echo
await visibility(); // wait until this node is visible
diff --git a/docs/lib/shapefile.md b/docs/lib/shapefile.md
index 56ade8058..3afd3d776 100644
--- a/docs/lib/shapefile.md
+++ b/docs/lib/shapefile.md
@@ -43,7 +43,9 @@ Plot.plot({
Or, streaming to a canvas:
-
+
+
+
```js echo
const canvas = document.querySelector("#map-canvas");
@@ -52,8 +54,12 @@ context.fillStyle = getComputedStyle(canvas).color;
context.clearRect(0, 0, canvas.width, canvas.height);
const path = d3.geoPath(d3.geoEquirectangular(), context);
-const stream = await FileAttachment("ne_110m_land/ne_110m_land.shp").url();
-const source = await shapefile.open(stream);
+const source = await shapefile.open(
+ ...(await Promise.all([
+ FileAttachment("ne_110m_land/ne_110m_land.shp").stream(),
+ FileAttachment("ne_110m_land/ne_110m_land.dbf").stream()
+ ]))
+);
while (true) {
const {done, value} = await source.read();
diff --git a/docs/lib/vega-lite.md b/docs/lib/vega-lite.md
index 4379ba871..1344dbab5 100644
--- a/docs/lib/vega-lite.md
+++ b/docs/lib/vega-lite.md
@@ -25,33 +25,17 @@ Or you can use a [Vega-Lite JSON view specification](https://vega.github.io/vega
```js echo
vl.render({
spec: {
- "width": 640,
- "height": 400,
- "data": {
- "url": await FileAttachment("gistemp.csv").url()
- },
- "mark": "point",
- "encoding": {
- "x": {
- "type": "temporal",
- "field": "Date"
- },
- "y": {
- "type": "quantitative",
- "field": "Anomaly"
- },
- "color": {
- "type": "quantitative",
- "field": "Anomaly",
- "scale": {
- "range": "diverging",
- "reverse": true
- }
- }
+ width: 640,
+ height: 400,
+ data: {url: await FileAttachment("gistemp.csv").url(), format: {type: "csv"}},
+ mark: "point",
+ encoding: {
+ x: {type: "temporal", field: "Date"},
+ y: {type: "quantitative", field: "Anomaly"},
+ color: {type: "quantitative", field: "Anomaly", scale: {range: "diverging", reverse: true}}
}
}
})
```
When loading data from a file as above, use
FileAttachment
so that referenced files are included on
build .
-
diff --git a/docs/lib/xlsx.md b/docs/lib/xlsx.md
index 3c6ebba43..fdee6a0ac 100644
--- a/docs/lib/xlsx.md
+++ b/docs/lib/xlsx.md
@@ -60,6 +60,6 @@ Some additional details on values: dates are interpreted as UTC; formula results
If you’d prefer to use [ExcelJS](https://github.com/exceljs/exceljs) directly, you can import it like so:
-```js echo
+```js run=false
import Excel from "npm:exceljs";
```
diff --git a/docs/markdown.md b/docs/markdown.md
index f591859e9..e8962ab38 100644
--- a/docs/markdown.md
+++ b/docs/markdown.md
@@ -2,8 +2,6 @@
Markdown in Observable Framework follows the [CommonMark spec](https://spec.commonmark.org/) and is powered by [markdown-it](https://github.com/markdown-it/markdown-it). We also feature [live JavaScript](./javascript) as either [fenced code blocks](./javascript#fenced-code-blocks) (```js
) or [inline expressions](./javascript#inline-expressions) ($\{…}
), and [HTML in Markdown](#html), and [front matter](#front-matter) for page-level configuration. If you don’t already know Markdown, please see [GitHub’s guide to Markdown](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax) for an introduction.
-Observable Framework currently deviates from CommonMark in how blank lines are handled in HTML; see below. This is a limitation of our parser needed for incremental update during preview.
-
Here are a few examples of Markdown content to get you started.
## Front matter
@@ -104,30 +102,4 @@ This produces:
This text is not visible by default.
-In Markdown, blank lines denote separate HTML blocks; be sure to avoid blank lines if you want to treat a chunk of HTML as a single block. For example, write this:
-
-```md
-
-
-```
-
-Don’t write this:
-
-```md
-
-
-
- one
- two
- three
-
-
-```
-
-In the latter case, the li elements become top-level and wrapped in a span, rather than children of the ul.
-
Also see [Hypertext Literal](./lib/htl) for generating dynamic HTML in JavaScript.
diff --git a/package.json b/package.json
index d2c0b9fbb..9617a864a 100644
--- a/package.json
+++ b/package.json
@@ -55,14 +55,13 @@
"ci-info": "^4.0.0",
"esbuild": "^0.19.8",
"fast-array-diff": "^1.1.0",
- "fast-deep-equal": "^3.1.3",
"gray-matter": "^4.0.3",
"he": "^1.2.0",
"highlight.js": "^11.8.0",
"is-docker": "^3.0.0",
"is-wsl": "^3.1.0",
+ "jsdom": "^24.0.0",
"jszip": "^3.10.1",
- "linkedom": "^0.15.6",
"markdown-it": "^13.0.2",
"markdown-it-anchor": "^8.6.7",
"mime": "^3.0.0",
@@ -70,6 +69,7 @@
"open": "^9.1.0",
"rollup": "^4.6.0",
"rollup-plugin-esbuild": "^6.1.0",
+ "semver": "^7.5.4",
"send": "^0.18.0",
"tar-stream": "^3.1.6",
"tsx": "~4.2.1",
@@ -78,7 +78,9 @@
"ws": "^8.14.2"
},
"devDependencies": {
+ "@types/d3-array": "^3.2.1",
"@types/he": "^1.2.3",
+ "@types/jsdom": "^21.1.6",
"@types/markdown-it": "^13.0.2",
"@types/mime": "^3.0.2",
"@types/mocha": "^10.0.2",
@@ -98,6 +100,7 @@
"eslint-config-prettier": "^9.1.0",
"eslint-import-resolver-typescript": "^3.6.1",
"eslint-plugin-import": "^2.29.0",
+ "fast-deep-equal": "^3.1.3",
"mocha": "^10.2.0",
"prettier": "^3.0.3 <3.1",
"typescript": "^5.2.2",
diff --git a/src/build.ts b/src/build.ts
index 539776d3f..4e69d7141 100644
--- a/src/build.ts
+++ b/src/build.ts
@@ -1,31 +1,29 @@
+import {createHash} from "node:crypto";
import {existsSync} from "node:fs";
import {access, constants, copyFile, readFile, writeFile} from "node:fs/promises";
-import {basename, dirname, join} from "node:path";
-import {fileURLToPath} from "node:url";
-import type {Config, Style} from "./config.js";
-import {mergeStyle} from "./config.js";
+import {basename, dirname, extname, join} from "node:path";
+import type {Config} from "./config.js";
import {Loader} from "./dataloader.js";
import {CliError, isEnoent} from "./error.js";
import {getClientPath, prepareOutput, visitMarkdownFiles} from "./files.js";
-import {createImportResolver, rewriteModule} from "./javascript/imports.js";
+import {getModuleHash} from "./javascript/module.js";
+import {transpileModule} from "./javascript/transpile.js";
import type {Logger, Writer} from "./logger.js";
-import {renderServerless} from "./render.js";
+import type {MarkdownPage} from "./markdown.js";
+import {parseMarkdown} from "./markdown.js";
+import {populateNpmCache, resolveNpmImport, resolveNpmSpecifier} from "./npm.js";
+import {isPathImport, relativePath, resolvePath} from "./path.js";
+import {renderPage} from "./render.js";
+import type {Resolvers} from "./resolvers.js";
+import {getModuleResolver, getResolvers} from "./resolvers.js";
+import {resolveFilePath, resolveImportPath, resolveStylesheetPath} from "./resolvers.js";
import {bundleStyles, rollupClient} from "./rollup.js";
import {searchIndex} from "./search.js";
import {Telemetry} from "./telemetry.js";
-import {faint} from "./tty.js";
-import {resolvePath} from "./url.js";
-
-const EXTRA_FILES = new Map([
- [
- join(fileURLToPath(import.meta.resolve("@observablehq/runtime")), "../../dist/runtime.js"),
- "_observablehq/runtime.js"
- ]
-]);
+import {faint, yellow} from "./tty.js";
export interface BuildOptions {
config: Config;
- clientEntry?: string;
addPublic?: boolean;
}
@@ -47,7 +45,7 @@ export interface BuildEffects {
}
export async function build(
- {config, addPublic = true, clientEntry = "./src/client/index.js"}: BuildOptions,
+ {config, addPublic = true}: BuildOptions,
effects: BuildEffects = new FileBuildEffects(config.output)
): Promise {
const {root} = config;
@@ -62,84 +60,91 @@ export async function build(
if (!pageCount) throw new CliError(`Nothing to build: no page files found in your ${root} directory.`);
effects.logger.log(`${faint("found")} ${pageCount} ${faint(`page${pageCount === 1 ? "" : "s"} in`)} ${root}`);
- // Render .md files, building a list of file attachments as we go.
- const files: string[] = [];
- const imports: string[] = [];
- const styles: Style[] = [];
+ // Parse .md files, building a list of additional assets as we go.
+ const pages = new Map();
+ const files = new Set();
+ const localImports = new Set();
+ const globalImports = new Set();
+ const stylesheets = new Set();
for await (const sourceFile of visitMarkdownFiles(root)) {
const sourcePath = join(root, sourceFile);
- const outputPath = join(dirname(sourceFile), basename(sourceFile, ".md") + ".html");
- effects.output.write(`${faint("render")} ${sourcePath} ${faint("→")} `);
const path = join("/", dirname(sourceFile), basename(sourceFile, ".md"));
- const render = await renderServerless(sourcePath, {path, ...config});
- const resolveFile = ({name}) => resolvePath(sourceFile, name);
- files.push(...render.files.map(resolveFile));
- imports.push(...render.imports.filter((i) => i.type === "local").map(resolveFile));
- await effects.writeFile(outputPath, render.html);
- const style = mergeStyle(path, render.data?.style, render.data?.theme, config.style);
- if (style && !styles.some((s) => styleEquals(s, style))) styles.push(style);
+ const options = {path, ...config};
+ effects.output.write(`${faint("parse")} ${sourcePath} `);
+ const start = performance.now();
+ const page = await parseMarkdown(sourcePath, options);
+ const resolvers = await getResolvers(page, {root, path: sourceFile});
+ const elapsed = Math.floor(performance.now() - start);
+ for (const f of resolvers.assets) files.add(resolvePath(sourceFile, f));
+ for (const f of resolvers.files) files.add(resolvePath(sourceFile, f));
+ for (const i of resolvers.localImports) localImports.add(resolvePath(sourceFile, i));
+ for (const i of resolvers.globalImports) globalImports.add(resolvePath(sourceFile, resolvers.resolveImport(i)));
+ for (const s of resolvers.stylesheets) stylesheets.add(/^\w+:/.test(s) ? s : resolvePath(sourceFile, s));
+ effects.output.write(`${faint("in")} ${(elapsed >= 100 ? yellow : faint)(`${elapsed}ms`)}\n`);
+ pages.set(sourceFile, {page, resolvers});
}
- // Add imported local scripts.
- for (const script of config.scripts) {
- if (!/^\w+:/.test(script.src)) {
- imports.push(script.src);
- }
+ // For cache-breaking we rename most assets to include content hashes.
+ const aliases = new Map();
+
+ // Add the search bundle and data, if needed.
+ if (config.search) {
+ globalImports.add("/_observablehq/search.js");
+ const contents = await searchIndex(config, effects);
+ effects.output.write(`${faint("index →")} `);
+ const hash = createHash("sha256").update(contents).digest("hex").slice(0, 8);
+ const alias = `/_observablehq/minisearch.${hash}.json`;
+ aliases.set("/_observablehq/minisearch.json", alias);
+ await effects.writeFile(join("_observablehq", `minisearch.${hash}.json`), contents);
}
- // Generate the client bundles.
+ // Generate the client bundles (JavaScript and styles). TODO Use a content
+ // hash, or perhaps the Framework version number for built-in modules.
if (addPublic) {
- for (const [entry, name] of [
- [clientEntry, "client.js"],
- ["./src/client/stdlib.js", "stdlib.js"],
- // TODO Prune this list based on which libraries are actually used.
- // TODO Remove library helpers (e.g., duckdb) when they are published to npm.
- ["./src/client/stdlib/dot.js", "stdlib/dot.js"],
- ["./src/client/stdlib/duckdb.js", "stdlib/duckdb.js"],
- ["./src/client/stdlib/inputs.css", "stdlib/inputs.css"],
- ["./src/client/stdlib/inputs.js", "stdlib/inputs.js"],
- ["./src/client/stdlib/mermaid.js", "stdlib/mermaid.js"],
- ["./src/client/stdlib/sqlite.js", "stdlib/sqlite.js"],
- ["./src/client/stdlib/tex.js", "stdlib/tex.js"],
- ["./src/client/stdlib/vega-lite.js", "stdlib/vega-lite.js"],
- ["./src/client/stdlib/xlsx.js", "stdlib/xlsx.js"],
- ["./src/client/stdlib/zip.js", "stdlib/zip.js"],
- ...(config.search ? [["./src/client/search.js", "search.js"]] : [])
- ]) {
- const clientPath = getClientPath(entry);
- const outputPath = join("_observablehq", name);
- effects.output.write(`${faint("bundle")} ${clientPath} ${faint("→")} `);
- const code = await (entry.endsWith(".css")
- ? bundleStyles({path: clientPath})
- : rollupClient(clientPath, {minify: true}));
- await effects.writeFile(outputPath, code);
- }
- if (config.search) {
- const outputPath = join("_observablehq", "minisearch.json");
- const code = await searchIndex(config, effects);
- effects.output.write(`${faint("search")} ${faint("→")} `);
- await effects.writeFile(outputPath, code);
+ for (const path of globalImports) {
+ if (path.startsWith("/_observablehq/")) {
+ const clientPath = getClientPath(`./src/client/${path === "/_observablehq/client.js" ? "index.js" : path.slice("/_observablehq/".length)}`); // prettier-ignore
+ effects.output.write(`${faint("build")} ${clientPath} ${faint("→")} `);
+ const define: {[key: string]: string} = {};
+ if (config.search) define["global.__minisearch"] = JSON.stringify(relativePath(path, aliases.get("/_observablehq/minisearch.json")!)); // prettier-ignore
+ const contents = await rollupClient(clientPath, root, path, {minify: true, define});
+ await effects.writeFile(path, contents);
+ }
}
- for (const style of styles) {
- if ("path" in style) {
- const outputPath = join("_import", style.path);
- const sourcePath = join(root, style.path);
- effects.output.write(`${faint("style")} ${sourcePath} ${faint("→")} `);
- const code = await bundleStyles({path: sourcePath});
- await effects.writeFile(outputPath, code);
- } else {
- const outputPath = join("_observablehq", `theme-${style.theme}.css`);
- effects.output.write(`${faint("bundle")} theme-${style.theme}.css ${faint("→")} `);
- const code = await bundleStyles({theme: style.theme});
- await effects.writeFile(outputPath, code);
+ for (const specifier of stylesheets) {
+ if (specifier.startsWith("observablehq:")) {
+ const path = `/_observablehq/${specifier.slice("observablehq:".length)}`;
+ effects.output.write(`${faint("build")} ${specifier} ${faint("→")} `);
+ if (specifier.startsWith("observablehq:theme-")) {
+ const match = /^observablehq:theme-(?[\w-]+(,[\w-]+)*)?\.css$/.exec(specifier);
+ const contents = await bundleStyles({theme: match!.groups!.theme?.split(",") ?? []});
+ await effects.writeFile(path, contents);
+ } else {
+ const clientPath = getClientPath(`./src/client/${path.slice("/_observablehq/".length)}`);
+ const contents = await bundleStyles({path: clientPath});
+ await effects.writeFile(`/_observablehq/${specifier.slice("observablehq:".length)}`, contents);
+ }
+ } else if (specifier.startsWith("npm:")) {
+ effects.output.write(`${faint("copy")} ${specifier} ${faint("→")} `);
+ const path = await resolveNpmImport(root, specifier.slice("npm:".length));
+ const sourcePath = await populateNpmCache(root, path); // TODO effects
+ await effects.copyFile(sourcePath, path);
+ } else if (!/^\w+:/.test(specifier)) {
+ const sourcePath = join(root, specifier);
+ effects.output.write(`${faint("build")} ${sourcePath} ${faint("→")} `);
+ const contents = await bundleStyles({path: sourcePath});
+ const hash = createHash("sha256").update(contents).digest("hex").slice(0, 8);
+ const ext = extname(specifier);
+ const alias = `/${join("_import", dirname(specifier), `${basename(specifier, ext)}.${hash}${ext}`)}`;
+ aliases.set(resolveStylesheetPath(root, specifier), alias);
+ await effects.writeFile(alias, contents);
}
}
}
- // Copy over the referenced files.
+ // Copy over the referenced files, accumulating hashed aliases.
for (const file of files) {
let sourcePath = join(root, file);
- const outputPath = join("_file", file);
if (!existsSync(sourcePath)) {
const loader = Loader.find(root, join("/", file), {useStale: true});
if (!loader) {
@@ -154,30 +159,87 @@ export async function build(
}
}
effects.output.write(`${faint("copy")} ${sourcePath} ${faint("→")} `);
- await effects.copyFile(sourcePath, outputPath);
+ const contents = await readFile(sourcePath);
+ const hash = createHash("sha256").update(contents).digest("hex").slice(0, 8);
+ const ext = extname(file);
+ const alias = `/${join("_file", dirname(file), `${basename(file, ext)}.${hash}${ext}`)}`;
+ aliases.set(resolveFilePath(root, file), alias);
+ await effects.writeFile(alias, contents);
}
- // Copy over the imported modules.
- const importResolver = createImportResolver(root);
- for (const file of imports) {
- const sourcePath = join(root, file);
- const outputPath = join("_import", file);
+ // Download npm imports. TODO It might be nice to use content hashes for
+ // these, too, but it would involve rewriting the files since populateNpmCache
+ // doesn’t let you pass in a resolver.
+ for (const path of globalImports) {
+ if (!path.startsWith("/_npm/")) continue; // skip _observablehq
+ effects.output.write(`${faint("copy")} npm:${resolveNpmSpecifier(path)} ${faint("→")} `);
+ const sourcePath = await populateNpmCache(root, path); // TODO effects
+ await effects.copyFile(sourcePath, path);
+ }
+
+ // Copy over imported local modules, overriding import resolution so that
+ // module hash is incorporated into the file name rather than in the query
+ // string. Note that this hash is not of the content of the module itself, but
+ // of the transitive closure of the module and its imports and files.
+ const resolveImportAlias = (path: string): string => {
+ const hash = getModuleHash(root, path).slice(0, 8);
+ const ext = extname(path);
+ return `/${join("_import", dirname(path), basename(path, ext))}.${hash}${ext}`;
+ };
+ for (const path of localImports) {
+ const sourcePath = join(root, path);
if (!existsSync(sourcePath)) {
effects.logger.error("missing referenced file", sourcePath);
continue;
}
effects.output.write(`${faint("copy")} ${sourcePath} ${faint("→")} `);
- const contents = await rewriteModule(await readFile(sourcePath, "utf-8"), file, importResolver);
- await effects.writeFile(outputPath, contents);
+ const resolveImport = getModuleResolver(root, path);
+ const input = await readFile(sourcePath, "utf-8");
+ const contents = await transpileModule(input, {
+ root,
+ path,
+ async resolveImport(specifier) {
+ return isPathImport(specifier)
+ ? relativePath(join("_import", path), resolveImportAlias(resolvePath(path, specifier)))
+ : resolveImport(specifier);
+ }
+ });
+ const alias = resolveImportAlias(path);
+ aliases.set(resolveImportPath(root, path), alias);
+ await effects.writeFile(alias, contents);
}
- // Copy over required distribution files.
- if (addPublic) {
- for (const [sourcePath, outputPath] of EXTRA_FILES) {
- effects.output.write(`${faint("copy")} ${sourcePath} ${faint("→")} `);
- await effects.copyFile(sourcePath, outputPath);
- }
+ // Render pages, resolving against content-hashed file names!
+ for (const [sourceFile, {page, resolvers}] of pages) {
+ const sourcePath = join(root, sourceFile);
+ const outputPath = join(dirname(sourceFile), basename(sourceFile, ".md") + ".html");
+ const path = join("/", dirname(sourceFile), basename(sourceFile, ".md"));
+ const options = {path, ...config};
+ effects.output.write(`${faint("render")} ${sourcePath} ${faint("→")} `);
+ const html = await renderPage(page, {
+ ...options,
+ resolvers: {
+ ...resolvers,
+ resolveFile(specifier) {
+ const r = resolvers.resolveFile(specifier);
+ const a = aliases.get(resolvePath(path, r));
+ return a ? relativePath(path, a) : specifier; // fallback to specifier if enoent
+ },
+ resolveStylesheet(specifier) {
+ const r = resolvers.resolveStylesheet(specifier);
+ const a = aliases.get(resolvePath(path, r));
+ return a ? relativePath(path, a) : isPathImport(specifier) ? specifier : r; // fallback to specifier if enoent
+ },
+ resolveImport(specifier) {
+ const r = resolvers.resolveImport(specifier);
+ const a = aliases.get(resolvePath(path, r));
+ return a ? relativePath(path, a) : isPathImport(specifier) ? specifier : r; // fallback to specifier if enoent
+ }
+ }
+ });
+ await effects.writeFile(outputPath, html);
}
+
Telemetry.record({event: "build", step: "finish", pageCount});
}
@@ -207,11 +269,3 @@ export class FileBuildEffects implements BuildEffects {
await writeFile(destination, contents);
}
}
-
-function styleEquals(a: Style, b: Style): boolean {
- return "path" in a && "path" in b
- ? a.path === b.path
- : "theme" in a && "theme" in b
- ? a.theme.join() === b.theme.join()
- : false;
-}
diff --git a/src/client/main.js b/src/client/main.js
index 2d295b551..260d3f3e2 100644
--- a/src/client/main.js
+++ b/src/client/main.js
@@ -1,5 +1,4 @@
import {Runtime} from "observablehq:runtime";
-import {registerFile} from "observablehq:stdlib";
import {FileAttachment, Generators, Mutable, resize} from "observablehq:stdlib";
import {inspect, inspectError} from "./inspect.js";
import * as recommendedLibraries from "./stdlib/recommendedLibraries.js";
@@ -19,10 +18,10 @@ const library = {
const runtime = new Runtime(library);
export const main = runtime.module();
-export const cellsById = new Map(); // TODO hide
+const cellsById = new Map();
export function define(cell) {
- const {id, inline, inputs = [], outputs = [], files = [], body} = cell;
+ const {id, inline, inputs = [], outputs = [], body} = cell;
const variables = [];
cellsById.get(id)?.variables.forEach((v) => v.delete());
cellsById.set(id, {cell, variables});
@@ -58,7 +57,11 @@ export function define(cell) {
v.define(outputs.length ? `cell ${id}` : null, inputs, body);
variables.push(v);
for (const o of outputs) variables.push(main.variable(true).define(o, [`cell ${id}`], (exports) => exports[o]));
- for (const f of files) registerFile(f.name, f);
+}
+
+export function undefine(id) {
+ cellsById.get(id)?.variables.forEach((v) => v.delete());
+ cellsById.delete(id);
}
// Note: Element.prototype is instanceof Node, but cannot be inserted!
diff --git a/src/client/preview.js b/src/client/preview.js
index f92ddf934..6bee13c6c 100644
--- a/src/client/preview.js
+++ b/src/client/preview.js
@@ -1,4 +1,5 @@
-import {cellsById, define} from "./main.js";
+import {registerFile} from "npm:@observablehq/stdlib";
+import {undefine} from "./main.js";
import {enableCopyButtons} from "./pre.js";
export * from "./index.js";
@@ -23,13 +24,6 @@ export function open({hash, eval: compile} = {}) {
location.reload();
break;
}
- case "refresh": {
- message.cellIds.forEach((id) => {
- const cell = cellsById.get(id);
- if (cell) define(cell.cell);
- });
- break;
- }
case "update": {
const root = document.querySelector("main");
if (message.previousHash !== hash) {
@@ -39,53 +33,34 @@ export function open({hash, eval: compile} = {}) {
}
hash = message.updatedHash;
let offset = 0;
- for (const {type, oldPos, items} of message.diff) {
+ const addedCells = new Map();
+ const removedCells = new Map();
+ for (const {type, oldPos, items} of message.diffHtml) {
switch (type) {
case "add": {
for (const item of items) {
- switch (item.type) {
- case "html":
- if (oldPos + offset < root.children.length) {
- root.children[oldPos + offset].insertAdjacentHTML("beforebegin", item.html);
- } else {
- root.insertAdjacentHTML("beforeend", item.html);
- }
- ++offset;
- item.cellIds.forEach((id) => {
- const cell = cellsById.get(id);
- if (cell) define(cell.cell);
- });
- break;
- case "cell":
- define({
- id: item.id,
- inline: item.inline,
- inputs: item.inputs,
- outputs: item.outputs,
- files: item.files,
- body: compile(item.body)
- });
- break;
+ const pos = oldPos + offset;
+ if (pos < root.children.length) {
+ root.children[pos].insertAdjacentHTML("beforebegin", item);
+ } else {
+ root.insertAdjacentHTML("beforeend", item);
}
+ indexCells(addedCells, root.children[pos]);
+ ++offset;
}
break;
}
case "remove": {
let removes = 0;
- for (const item of items) {
- switch (item.type) {
- case "html":
- if (oldPos + offset < root.children.length) {
- root.children[oldPos + offset].remove();
- ++removes;
- } else {
- console.error(`remove out of range: ${oldPos + offset} ≮ ${root.children.length}`);
- }
- break;
- case "cell":
- cellsById.get(item.id)?.variables.forEach((v) => v.delete());
- cellsById.delete(item.id);
- break;
+ for (let i = 0; i < items.length; ++i) {
+ const pos = oldPos + offset;
+ if (pos < root.children.length) {
+ const child = root.children[pos];
+ indexCells(removedCells, child);
+ child.remove();
+ ++removes;
+ } else {
+ console.error(`remove out of range: ${pos} ≮ ${root.children.length}`);
}
}
offset -= removes;
@@ -93,22 +68,43 @@ export function open({hash, eval: compile} = {}) {
}
}
}
+ for (const [id, removed] of removedCells) {
+ addedCells.get(id)?.replaceWith(removed);
+ }
+ for (const id of message.diffCode.removed) {
+ undefine(id);
+ }
+ for (const body of message.diffCode.added) {
+ compile(body);
+ }
+ for (const name of message.diffFiles.removed) {
+ registerFile(name, null);
+ }
+ for (const file of message.diffFiles.added) {
+ registerFile(file.name, file);
+ }
+ const {addedStylesheets, removedStylesheets} = message;
+ if (addedStylesheets.length === 1 && removedStylesheets.length === 1) {
+ const [newHref] = addedStylesheets;
+ const [oldHref] = removedStylesheets;
+ const link = document.head.querySelector(`link[rel="stylesheet"][href="${oldHref}"]`);
+ link.href = newHref;
+ } else {
+ for (const href of addedStylesheets) {
+ const link = document.createElement("link");
+ link.rel = "stylesheet";
+ link.type = "text/css";
+ link.crossOrigin = "";
+ link.href = href;
+ document.head.appendChild(link);
+ }
+ for (const href of removedStylesheets) {
+ document.head.querySelector(`link[rel="stylesheet"][href="${href}"]`)?.remove();
+ }
+ }
enableCopyButtons();
break;
}
- case "add-stylesheet": {
- const link = document.createElement("link");
- link.rel = "stylesheet";
- link.type = "text/css";
- link.crossOrigin = "";
- link.href = message.href;
- document.head.appendChild(link);
- break;
- }
- case "remove-stylesheet": {
- document.head.querySelector(`link[rel="stylesheet"][href="${message.href}"]`)?.remove();
- break;
- }
}
};
@@ -120,6 +116,15 @@ export function open({hash, eval: compile} = {}) {
console.info("socket close");
};
+ function indexCells(map, node) {
+ if (node.id.startsWith("cell-")) {
+ map.set(node.id, node);
+ }
+ for (const cell of node.querySelectorAll("[id^=cell-]")) {
+ map.set(cell.id, cell);
+ }
+ }
+
function send(message) {
console.info("↑", message);
socket.send(JSON.stringify(message));
diff --git a/src/client/runtime.js b/src/client/runtime.js
new file mode 100644
index 000000000..d6c74f4a7
--- /dev/null
+++ b/src/client/runtime.js
@@ -0,0 +1 @@
+export {Inspector, Runtime, RuntimeError} from "@observablehq/runtime";
diff --git a/src/client/search-init.ts b/src/client/search-init.ts
index 1c1bab88d..3b58edd06 100644
--- a/src/client/search-init.ts
+++ b/src/client/search-init.ts
@@ -5,8 +5,7 @@ container.setAttribute("data-shortcut", `${/Mac|iPhone/.test(navigator.platform)
// Load search.js on demand
const input = container.querySelector("input")!;
-const base = container.getAttribute("data-root");
-const load = () => import(`${base}_observablehq/search.js`);
+const load = () => import("observablehq:search");
input.addEventListener("focus", load, {once: true});
input.addEventListener("keydown", load, {once: true});
diff --git a/src/client/search.js b/src/client/search.js
index 148dbc5bb..d86347fb1 100644
--- a/src/client/search.js
+++ b/src/client/search.js
@@ -8,7 +8,7 @@ const resultsContainer = document.querySelector("#observablehq-search-results");
const activeClass = "observablehq-link-active";
let currentValue;
-const index = await fetch(import.meta.resolve("./minisearch.json"))
+const index = await fetch(import.meta.resolve(global.__minisearch))
.then((response) => {
if (!response.ok) throw new Error(`unable to load minisearch.json: ${response.status}`);
return response.json();
diff --git a/src/client/stdlib.js b/src/client/stdlib.js
index 451182c42..6a891820a 100644
--- a/src/client/stdlib.js
+++ b/src/client/stdlib.js
@@ -2,3 +2,5 @@ export {AbstractFile, FileAttachment, registerFile} from "./stdlib/fileAttachmen
export * as Generators from "./stdlib/generators/index.js";
export {Mutable} from "./stdlib/mutable.js";
export {resize} from "./stdlib/resize.js";
+export class Library {} // TODO remove @observablehq/runtime dependency
+export const FileAttachments = undefined; // TODO remove @observablehq/runtime dependency
diff --git a/src/client/toc.ts b/src/client/toc.ts
index 4ad6eaffd..8b71a96df 100644
--- a/src/client/toc.ts
+++ b/src/client/toc.ts
@@ -2,9 +2,9 @@ const toc = document.querySelector("#observablehq-toc");
if (toc) {
const highlight = toc.appendChild(document.createElement("div"));
highlight.classList.add("observablehq-secondary-link-highlight");
- const headings = Array.from(document.querySelectorAll(toc.dataset.selector!))
- .reverse()
- .filter((e) => e.querySelector("a.observablehq-header-anchor"));
+ const headings = Array.from(document.querySelector("#observablehq-main")!.querySelectorAll(toc.dataset.selector!))
+ .filter((e) => e.querySelector("a.observablehq-header-anchor"))
+ .reverse();
const links = toc.querySelectorAll(".observablehq-secondary-link");
const relink = (): HTMLElement | undefined => {
for (const link of links) {
diff --git a/src/config.ts b/src/config.ts
index a1cf671a9..834cb34ba 100644
--- a/src/config.ts
+++ b/src/config.ts
@@ -2,8 +2,8 @@ import {basename, dirname, join} from "node:path";
import {visitMarkdownFiles} from "./files.js";
import {formatIsoDate, formatLocaleDate} from "./format.js";
import {parseMarkdown} from "./markdown.js";
+import {resolvePath} from "./path.js";
import {resolveTheme} from "./theme.js";
-import {resolvePath} from "./url.js";
export interface Page {
name: string;
@@ -184,6 +184,6 @@ export function mergeStyle(path: string, style: any, theme: any, defaultStyle: n
: style === null
? null // disable
: style !== undefined
- ? {path: resolvePath(path, style)}
+ ? {path: resolvePath(path, String(style))}
: {theme: normalizeTheme(theme)};
}
diff --git a/src/deploy.ts b/src/deploy.ts
index b050fccb8..2c30b07b3 100644
--- a/src/deploy.ts
+++ b/src/deploy.ts
@@ -352,9 +352,10 @@ class DeployBuildEffects implements BuildEffects {
this.output = effects.output;
}
async copyFile(sourcePath: string, outputPath: string) {
- this.logger.log(outputPath);
+ const relativePath = outputPath.replace(/^\//, "");
+ this.logger.log(relativePath);
try {
- await this.apiClient.postDeployFile(this.deployId, sourcePath, outputPath);
+ await this.apiClient.postDeployFile(this.deployId, sourcePath, relativePath);
} catch (error) {
if (isApiError(error) && error.details.errors.some((e) => e.code === "FILE_QUOTA_EXCEEDED")) {
throw new CliError("You have reached the total file size limit.", {cause: error});
@@ -368,9 +369,10 @@ class DeployBuildEffects implements BuildEffects {
}
}
async writeFile(outputPath: string, content: Buffer | string) {
- this.logger.log(outputPath);
+ const relativePath = outputPath.replace(/^\//, "");
+ this.logger.log(relativePath);
try {
- await this.apiClient.postDeployFileContents(this.deployId, content, outputPath);
+ await this.apiClient.postDeployFileContents(this.deployId, content, relativePath);
} catch (error) {
if (isApiError(error) && error.details.errors.some((e) => e.code === "FILE_QUOTA_EXCEEDED")) {
throw new CliError("You have reached the total file size limit.", {cause: error});
diff --git a/src/fileWatchers.ts b/src/fileWatchers.ts
index 96fd1a564..1ddff7568 100644
--- a/src/fileWatchers.ts
+++ b/src/fileWatchers.ts
@@ -1,18 +1,20 @@
import {type FSWatcher, existsSync, watch} from "node:fs";
+import {join} from "node:path";
import {Loader} from "./dataloader.js";
import {isEnoent} from "./error.js";
import {maybeStat} from "./files.js";
-import {resolvePath} from "./url.js";
+import {resolvePath} from "./path.js";
export class FileWatchers {
private readonly watchers: FSWatcher[] = [];
- static async of(root: string, path: string, names: string[], callback: (name: string) => void) {
+ static async of(root: string, path: string, names: Iterable, callback: (name: string) => void) {
const that = new FileWatchers();
const {watchers} = that;
- for (const name of new Set(names)) {
- const exactPath = resolvePath(root, path, name);
- const watchPath = existsSync(exactPath) ? exactPath : Loader.find(root, resolvePath(path, name))?.path;
+ for (const name of names) {
+ const filePath = resolvePath(path, name);
+ const exactPath = join(root, filePath);
+ const watchPath = existsSync(exactPath) ? exactPath : Loader.find(root, filePath)?.path;
if (!watchPath) continue;
let currentStat = await maybeStat(watchPath);
let watcher: FSWatcher;
@@ -26,7 +28,7 @@ export class FileWatchers {
watcher = watchers[index] = watch(watchPath, watched);
} catch (error) {
if (!isEnoent(error)) throw error;
- console.error(`file no longer exists: ${path}`);
+ console.error(`file no longer exists: ${watchPath}`);
return;
}
setTimeout(() => watched("change"), 100); // delay to avoid a possibly-empty file
diff --git a/src/files.ts b/src/files.ts
index 2da58e669..9c5ddf8e1 100644
--- a/src/files.ts
+++ b/src/files.ts
@@ -3,20 +3,13 @@ import {mkdir, readdir, stat} from "node:fs/promises";
import {dirname, extname, join, normalize, relative} from "node:path";
import {cwd} from "node:process";
import {fileURLToPath} from "node:url";
-import mime from "mime";
import {isEnoent} from "./error.js";
-import type {FileReference} from "./javascript.js";
-import {relativeUrl, resolvePath} from "./url.js";
-
-// A path is local if it doesn’t go outside the the root.
-export function getLocalPath(sourcePath: string, name: string): string | null {
- if (/^\w+:/.test(name)) return null; // URL
- if (name.startsWith("#")) return null; // anchor tag
- const path = resolvePath(sourcePath, name);
- if (path.startsWith("../")) return null; // goes above root
- return path;
-}
+/**
+ * Returns the relative path from the current working directory to the given
+ * Framework source file, such as "./src/client/search.js". This is typically
+ * used to rollup JavaScript and style bundles for built-in modules.
+ */
export function getClientPath(entry: string): string {
const path = relative(cwd(), join(dirname(fileURLToPath(import.meta.url)), "..", entry));
if (path.endsWith(".js") && !existsSync(path)) {
@@ -26,14 +19,7 @@ export function getClientPath(entry: string): string {
return path;
}
-export function fileReference(name: string, sourcePath: string): FileReference {
- return {
- name: relativeUrl(sourcePath, name),
- mimeType: mime.getType(name),
- path: relativeUrl(sourcePath, join("_file", name))
- };
-}
-
+/** Yields every Markdown (.md) file within the given root, recursively. */
export async function* visitMarkdownFiles(root: string): AsyncGenerator {
for await (const file of visitFiles(root)) {
if (extname(file) !== ".md") continue;
@@ -41,6 +27,7 @@ export async function* visitMarkdownFiles(root: string): AsyncGenerator
}
}
+/** Yields every file within the given root, recursively. */
export async function* visitFiles(root: string): AsyncGenerator {
const visited = new Set();
const queue: string[] = [(root = normalize(root))];
@@ -58,7 +45,7 @@ export async function* visitFiles(root: string): AsyncGenerator {
}
}
-// Like fs.stat, but returns undefined instead of throwing ENOENT if not found.
+/** Like fs.stat, but returns undefined instead of throwing ENOENT if not found. */
export async function maybeStat(path: string): Promise {
try {
return await stat(path);
@@ -67,6 +54,7 @@ export async function maybeStat(path: string): Promise {
}
}
+/** Like recursive mkdir, but for the parent of the specified output. */
export async function prepareOutput(outputPath: string): Promise {
const outputDir = dirname(outputPath);
if (outputDir === ".") return;
diff --git a/src/hash.ts b/src/hash.ts
deleted file mode 100644
index 6118f504b..000000000
--- a/src/hash.ts
+++ /dev/null
@@ -1,5 +0,0 @@
-import {createHash} from "node:crypto";
-
-export function computeHash(source: string): string {
- return createHash("sha256").update(source).digest("hex");
-}
diff --git a/src/html.ts b/src/html.ts
index 2f8ce5801..3220efdbe 100644
--- a/src/html.ts
+++ b/src/html.ts
@@ -1,5 +1,123 @@
/* eslint-disable import/no-named-as-default-member */
import he from "he";
+import hljs from "highlight.js";
+import type {DOMWindow} from "jsdom";
+import {JSDOM, VirtualConsole} from "jsdom";
+import {relativePath, resolveLocalPath} from "./path.js";
+
+const ASSET_PROPERTIES: readonly [selector: string, src: string][] = [
+ ["a[href][download]", "href"],
+ ["audio source[src]", "src"],
+ ["audio[src]", "src"],
+ ["img[src]", "src"],
+ ["img[srcset]", "srcset"],
+ ["link[href]", "href"],
+ ["picture source[srcset]", "srcset"],
+ ["video source[src]", "src"],
+ ["video[src]", "src"]
+];
+
+export function isAssetPath(specifier: string): boolean {
+ return !/^(\w+:|#)/.test(specifier);
+}
+
+export function parseHtml(html: string): DOMWindow {
+ return new JSDOM(`${html}`, {virtualConsole: new VirtualConsole()}).window;
+}
+
+export function findAssets(html: string, path: string): Set {
+ const {document} = parseHtml(html);
+ const assets = new Set();
+
+ const maybeAsset = (specifier: string): void => {
+ if (!isAssetPath(specifier)) return;
+ const localPath = resolveLocalPath(path, specifier);
+ if (!localPath) return console.warn(`non-local asset path: ${specifier}`);
+ assets.add(relativePath(path, localPath));
+ };
+
+ for (const [selector, src] of ASSET_PROPERTIES) {
+ for (const element of document.querySelectorAll(selector)) {
+ const source = decodeURIComponent(element.getAttribute(src)!);
+ if (src === "srcset") {
+ for (const s of parseSrcset(source)) {
+ maybeAsset(s);
+ }
+ } else {
+ maybeAsset(source);
+ }
+ }
+ }
+
+ return assets;
+}
+
+export function rewriteHtml(html: string, resolve: (specifier: string) => string = String): string {
+ const {document} = parseHtml(html);
+
+ const maybeResolve = (specifier: string): string => {
+ return isAssetPath(specifier) ? resolve(specifier) : specifier;
+ };
+
+ for (const [selector, src] of ASSET_PROPERTIES) {
+ for (const element of document.querySelectorAll(selector)) {
+ const source = decodeURIComponent(element.getAttribute(src)!);
+ element.setAttribute(src, src === "srcset" ? resolveSrcset(source, maybeResolve) : maybeResolve(source));
+ }
+ }
+
+ // Syntax highlighting for elements. The code could contain an inline
+ // expression within, or other HTML, but we only highlight text nodes that are
+ // direct children of code elements.
+ for (const code of document.querySelectorAll("code[class*='language-']")) {
+ const language = [...code.classList].find((c) => c.startsWith("language-"))?.slice("language-".length);
+ if (!language) continue;
+ if (code.parentElement?.tagName === "PRE") code.parentElement.setAttribute("data-language", language);
+ if (!hljs.getLanguage(language)) continue;
+ let html = "";
+ code.normalize(); // coalesce adjacent text nodes
+ for (const child of code.childNodes) {
+ html += isText(child)
+ ? hljs.highlight(child.textContent!, {language}).value
+ : isElement(child)
+ ? child.outerHTML
+ : "";
+ }
+ code.innerHTML = html;
+ }
+
+ return document.body.innerHTML;
+}
+
+function parseSrcset(srcset: string): string[] {
+ return srcset
+ .trim()
+ .split(/\s*,\s*/)
+ .filter((src) => src)
+ .map((src) => src.split(/\s+/)[0]);
+}
+
+function resolveSrcset(srcset: string, resolve: (specifier: string) => string): string {
+ return srcset
+ .trim()
+ .split(/\s*,\s*/)
+ .filter((src) => src)
+ .map((src) => {
+ const parts = src.split(/\s+/);
+ const path = resolve(parts[0]);
+ if (path) parts[0] = path;
+ return parts.join(" ");
+ })
+ .join(", ");
+}
+
+function isText(node: Node): node is Text {
+ return node.nodeType === 3;
+}
+
+function isElement(node: Node): node is Element {
+ return node.nodeType === 1;
+}
/**
* Denotes a string that contains HTML source; when interpolated into an html
diff --git a/src/javascript.ts b/src/javascript.ts
deleted file mode 100644
index 3e4192f70..000000000
--- a/src/javascript.ts
+++ /dev/null
@@ -1,169 +0,0 @@
-import {Parser, tokTypes} from "acorn";
-import type {Expression, Identifier, Node, Options, Program} from "acorn";
-import {fileReference} from "./files.js";
-import {findAssignments} from "./javascript/assignments.js";
-import {findAwaits} from "./javascript/awaits.js";
-import {findDeclarations} from "./javascript/declarations.js";
-import {findFeatures} from "./javascript/features.js";
-import {findExports, findImportDeclarations, findImports} from "./javascript/imports.js";
-import {createImportResolver, rewriteImports} from "./javascript/imports.js";
-import {findReferences} from "./javascript/references.js";
-import {syntaxError} from "./javascript/syntaxError.js";
-import {Sourcemap} from "./sourcemap.js";
-import {red} from "./tty.js";
-
-export interface FileReference {
- /** The relative path from the page to the original file (e.g., "./test.txt"). */
- name: string;
- /** The MIME type, if known; derived from the file extension. */
- mimeType: string | null;
- /** The relative path from the page to the file in _file (e.g., "../_file/sub/test.txt"). */
- path: string;
-}
-
-export interface ImportReference {
- name: string;
- type: "global" | "local";
-}
-
-export interface Feature {
- type: "FileAttachment";
- name: string;
-}
-
-export interface BaseTranspile {
- id: string;
- expression: boolean;
- inputs?: string[];
- outputs?: string[];
- inline?: boolean;
- files?: FileReference[];
- imports?: ImportReference[];
-}
-
-export interface PendingTranspile extends BaseTranspile {
- body: () => Promise;
-}
-
-export interface Transpile extends BaseTranspile {
- body: string;
-}
-
-export interface ParseOptions {
- id: string;
- root: string;
- sourcePath: string;
- inline?: boolean;
- sourceLine?: number;
- globals?: Set;
- verbose?: boolean;
-}
-
-export function transpileJavaScript(input: string, options: ParseOptions): PendingTranspile {
- const {id, root, sourcePath, verbose = true} = options;
- try {
- const node = parseJavaScript(input, options);
- const files = node.features
- .filter((f) => f.type === "FileAttachment")
- .map(({name}) => fileReference(name, sourcePath));
- const inputs = Array.from(new Set(node.references.map((r) => r.name)));
- const implicitDisplay = node.expression && !inputs.includes("display") && !inputs.includes("view");
- if (implicitDisplay) inputs.push("display"), (node.async = true);
- if (findImportDeclarations(node).length > 0) node.async = true;
- return {
- id,
- expression: node.expression,
- ...(inputs.length ? {inputs} : null),
- ...(options.inline ? {inline: true} : null),
- ...(node.declarations?.length ? {outputs: node.declarations.map(({name}) => name)} : null),
- ...(files.length ? {files} : null),
- body: async () => {
- const output = new Sourcemap(input);
- output.trim();
- if (implicitDisplay) {
- output.insertLeft(0, "display(await(\n");
- output.insertRight(input.length, "\n))");
- }
- await rewriteImports(output, node, sourcePath, createImportResolver(root, "_import"));
- const result = `${node.async ? "async " : ""}(${inputs}) => {
-${String(output)}${node.declarations?.length ? `\nreturn {${node.declarations.map(({name}) => name)}};` : ""}
-}`;
- return result;
- },
- ...(node.imports.length ? {imports: node.imports} : null)
- };
- } catch (error) {
- if (!(error instanceof SyntaxError)) throw error;
- const message = error.message;
- // TODO: Consider showing a code snippet along with the error. Also, consider
- // whether we want to show the file name here.
- if (verbose) {
- let warning = error.message;
- const match = /^(.+)\s\((\d+):(\d+)\)$/.exec(message);
- if (match) {
- const line = +match[2] + (options?.sourceLine ?? 0);
- const column = +match[3] + 1;
- warning = `${match[1]} at line ${line}, column ${column}`;
- } else if (options?.sourceLine) {
- warning = `${message} at line ${options.sourceLine + 1}`;
- }
- console.error(red(`${error.name}: ${warning}`));
- }
- return {
- id,
- expression: true,
- body: async () => `() => { throw new SyntaxError(${JSON.stringify(message)}); }`
- };
- }
-}
-
-export const parseOptions: Options = {ecmaVersion: 13, sourceType: "module"};
-
-export interface JavaScriptNode {
- body: Node;
- declarations: Identifier[] | null; // null for expressions that can’t declare top-level variables, a.k.a outputs
- references: Identifier[]; // the unbound references, a.k.a. inputs
- features: Feature[];
- imports: ImportReference[];
- expression: boolean; // is this an expression or a program cell?
- async: boolean; // does this use top-level await?
-}
-
-function parseJavaScript(input: string, options: ParseOptions): JavaScriptNode {
- const {inline = false, root, sourcePath} = options;
- // First attempt to parse as an expression; if this fails, parse as a program.
- let expression = maybeParseExpression(input, parseOptions);
- if (expression?.type === "ClassExpression" && expression.id) expression = null; // treat named class as program
- if (expression?.type === "FunctionExpression" && expression.id) expression = null; // treat named function as program
- if (!expression && inline) throw new SyntaxError("invalid expression");
- const body = expression ?? Parser.parse(input, parseOptions);
- const exports = findExports(body);
- if (exports.length) throw syntaxError("Unexpected token 'export'", exports[0], input); // disallow exports
- const references = findReferences(body);
- findAssignments(body, references, input);
- const declarations = expression ? null : findDeclarations(body as Program, input);
- const {imports, features: importedFeatures} = findImports(body, root, sourcePath);
- const features = findFeatures(body, sourcePath, references, input);
- return {
- body,
- declarations,
- references,
- features: [...features, ...importedFeatures],
- imports,
- expression: !!expression,
- async: findAwaits(body).length > 0
- };
-}
-
-// Parses a single expression; like parseExpressionAt, but returns null if
-// additional input follows the expression.
-function maybeParseExpression(input: string, options: Options): Expression | null {
- const parser = new (Parser as any)(options, input, 0); // private constructor
- parser.nextToken();
- try {
- const node = parser.parseExpression();
- return parser.type === tokTypes.eof ? node : null;
- } catch {
- return null;
- }
-}
diff --git a/src/javascript/assignments.ts b/src/javascript/assignments.ts
index c867c3b43..6cee3895d 100644
--- a/src/javascript/assignments.ts
+++ b/src/javascript/assignments.ts
@@ -3,8 +3,11 @@ import {simple} from "acorn-walk";
import {defaultGlobals} from "./globals.js";
import {syntaxError} from "./syntaxError.js";
-export function findAssignments(node: Node, references: Identifier[], input: string): void {
- function checkConst(node: Expression | Pattern | VariableDeclaration) {
+type Assignable = Expression | Pattern | VariableDeclaration;
+
+/** Throws a SyntaxError for any illegal assignments. */
+export function checkAssignments(node: Node, references: Identifier[], input: string): void {
+ function checkConst(node: Assignable) {
switch (node.type) {
case "Identifier":
if (references.includes(node)) throw syntaxError(`Assignment to external variable '${node.name}'`, node, input);
@@ -21,22 +24,17 @@ export function findAssignments(node: Node, references: Identifier[], input: str
break;
}
}
-
+ function checkConstLeft({left}: {left: Assignable}) {
+ checkConst(left);
+ }
+ function checkConstArgument({argument}: {argument: Assignable}) {
+ checkConst(argument);
+ }
simple(node, {
- AssignmentExpression(node) {
- checkConst(node.left);
- },
- AssignmentPattern(node) {
- checkConst(node.left);
- },
- UpdateExpression(node) {
- checkConst(node.argument);
- },
- ForOfStatement(node) {
- checkConst(node.left);
- },
- ForInStatement(node) {
- checkConst(node.left);
- }
+ AssignmentExpression: checkConstLeft,
+ AssignmentPattern: checkConstLeft,
+ UpdateExpression: checkConstArgument,
+ ForOfStatement: checkConstLeft,
+ ForInStatement: checkConstLeft
});
}
diff --git a/src/javascript/features.ts b/src/javascript/features.ts
deleted file mode 100644
index 5e969567c..000000000
--- a/src/javascript/features.ts
+++ /dev/null
@@ -1,118 +0,0 @@
-import type {CallExpression, Identifier, Literal, Node, TemplateLiteral} from "acorn";
-import {simple} from "acorn-walk";
-import {getLocalPath} from "../files.js";
-import type {Feature} from "../javascript.js";
-import {defaultGlobals} from "./globals.js";
-import {findReferences} from "./references.js";
-import {syntaxError} from "./syntaxError.js";
-
-export function findFeatures(node: Node, path: string, references: Identifier[], input: string): Feature[] {
- const featureMap = getFeatureReferenceMap(node);
- const features: Feature[] = [];
-
- simple(node, {
- CallExpression(node) {
- const {callee} = node;
- if (callee.type !== "Identifier") return;
- let type = featureMap.get(callee);
- // If this feature wasn’t explicitly imported into this cell, then ignore
- // function calls that are not references to the feature. For example, if
- // there’s a local variable called FileAttachment, that will mask the
- // built-in FileAttachment and won’t be considered a feature.
- if (!type) {
- if (!references.includes(callee)) return;
- const name = callee.name;
- if (name !== "FileAttachment") return;
- type = name;
- }
- features.push(getFeature(type, node, path, input));
- }
- });
-
- return features;
-}
-
-/**
- * Returns a map from Identifier to the feature type, such as FileAttachment.
- * Note that this may be different than the identifier.name because of aliasing.
- */
-export function getFeatureReferenceMap(node: Node): Map {
- const declarations = new Set<{name: string}>();
- const alias = new Map();
- let globals: Set | undefined;
-
- // Find the declared local names of the imported symbol. Only named imports
- // are supported. TODO Support namespace imports?
- simple(node, {
- ImportDeclaration(node) {
- if (node.source.value === "npm:@observablehq/stdlib") {
- for (const specifier of node.specifiers) {
- if (
- specifier.type === "ImportSpecifier" &&
- specifier.imported.type === "Identifier" &&
- specifier.imported.name === "FileAttachment"
- ) {
- declarations.add(specifier.local);
- alias.set(specifier.local.name, specifier.imported.name);
- }
- }
- }
- }
- });
-
- // If the import is masking a global, don’t treat it as a global (since we’ll
- // ignore the import declaration below).
- for (const name of alias.keys()) {
- if (defaultGlobals.has(name)) {
- if (globals === undefined) globals = new Set(defaultGlobals);
- globals.delete(name);
- }
- }
-
- function filterDeclaration(node: {name: string}): boolean {
- return !declarations.has(node); // treat the imported declaration as unbound
- }
-
- const references = findReferences(node, {globals, filterDeclaration});
- const map = new Map();
- for (const r of references) {
- const type = alias.get(r.name);
- if (type) map.set(r, type);
- }
- return map;
-}
-
-export function getFeature(type: Feature["type"], node: CallExpression, path: string, input: string): Feature {
- const {
- arguments: args,
- arguments: [arg]
- } = node;
-
- // Forbid dynamic calls.
- if (args.length !== 1 || !isStringLiteral(arg)) {
- throw syntaxError(`${type} requires a single literal string argument`, node, input);
- }
-
- // Forbid file attachments that are not local paths; normalize the path.
- let name: string | null = getStringLiteralValue(arg);
- if (type === "FileAttachment") {
- const localPath = getLocalPath(path, name);
- if (!localPath) throw syntaxError(`non-local file path: ${name}`, node, input);
- name = localPath;
- }
-
- return {type, name};
-}
-
-export function isStringLiteral(node: any): node is Literal | TemplateLiteral {
- return (
- node &&
- ((node.type === "Literal" && /^['"]/.test(node.raw)) ||
- (node.type === "TemplateLiteral" && node.expressions.length === 0))
- );
-}
-
-// Note: only valid if isStringLiteral returned true;
-export function getStringLiteralValue(node: any): string {
- return node.type === "Literal" ? (node.value as string) : node.quasis[0].value.cooked!;
-}
diff --git a/src/javascript/files.ts b/src/javascript/files.ts
new file mode 100644
index 000000000..a13f01be3
--- /dev/null
+++ b/src/javascript/files.ts
@@ -0,0 +1,121 @@
+import {extname} from "node:path";
+import type {CallExpression, MemberExpression, Node} from "acorn";
+import {ancestor, simple} from "acorn-walk";
+import {relativePath, resolveLocalPath} from "../path.js";
+import {defaultGlobals} from "./globals.js";
+import {findReferences} from "./references.js";
+import {getStringLiteralValue, isStringLiteral} from "./source.js";
+import {syntaxError} from "./syntaxError.js";
+
+export type FileExpression = {
+ /** The FileAttachment(name) call expression. */
+ node: CallExpression;
+ /** The relative path to the source file from the referencing source. */
+ name: string;
+ /** The method, if known; e.g., "arrow" for FileAttachment("foo").arrow. */
+ method?: string;
+};
+
+const KNOWN_FILE_EXTENSIONS = {
+ ".arrow": "arrow",
+ ".csv": "csv",
+ ".db": "sqlite",
+ ".html": "html",
+ ".json": "json",
+ ".parquet": "parquet",
+ ".sqlite": "sqlite",
+ ".tsv": "tsv",
+ ".txt": "text",
+ ".xlsx": "xlsx",
+ ".xml": "xml",
+ ".zip": "zip"
+};
+
+/**
+ * Returns all calls to FileAttachment in the specified body. Throws a
+ * SyntaxError if any of the calls are invalid (e.g., when FileAttachment is
+ * passed a dynamic argument, or references a file that is outside the root).
+ */
+export function findFiles(
+ body: Node,
+ path: string,
+ input: string,
+ aliases?: Iterable // ["FileAttachment"] for implicit import
+): FileExpression[] {
+ const declarations = new Set<{name: string}>();
+ const alias = new Set(aliases);
+ let globals: Set | undefined;
+
+ // Find the declared local names of FileAttachment. Currently only named
+ // imports are supported, and stdlib must be imported without a version. TODO
+ // Support namespace imports? Error if stdlib is expressed with a version?
+ simple(body, {
+ ImportDeclaration(node) {
+ if (node.source.value === "npm:@observablehq/stdlib") {
+ for (const specifier of node.specifiers) {
+ if (
+ specifier.type === "ImportSpecifier" &&
+ specifier.imported.type === "Identifier" &&
+ specifier.imported.name === "FileAttachment"
+ ) {
+ declarations.add(specifier.local);
+ alias.add(specifier.local.name);
+ }
+ }
+ }
+ }
+ });
+
+ // If the import is masking a global, don’t treat it as a global (since we’ll
+ // ignore the import declaration below).
+ for (const name of alias.keys()) {
+ if (defaultGlobals.has(name)) {
+ (globals ??= new Set(defaultGlobals)).delete(name);
+ }
+ }
+
+ // Collect all references to FileAttachment.
+ const references = new Set(
+ findReferences(body, {
+ globals,
+ filterDeclaration: (identifier) => !declarations.has(identifier) // treat the imported declaration as unbound
+ })
+ );
+
+ const files: FileExpression[] = [];
+
+ // Find all calls to FileAttachment. If the call is part of a member
+ // expression such as FileAttachment("foo.txt").csv, use this to determine the
+ // file method ("csv"); otherwise fallback to the to file extension to
+ // determine the method. Also enforce that FileAttachment is passed a single
+ // static string literal.
+ //
+ // Note that while dynamic imports require that paths start with ./, ../, or
+ // /, the same requirement is not true for file attachments. Unlike imports,
+ // you can’t reference a global file as a file attachment.
+ ancestor(body, {
+ CallExpression(node, state, stack) {
+ const {callee} = node;
+ if (callee.type !== "Identifier" || !alias.has(callee.name) || !references.has(callee)) return;
+ const args = node.arguments;
+ if (args.length !== 1) throw syntaxError("FileAttachment requires a single literal string argument", node, input);
+ const [arg] = args;
+ if (!isStringLiteral(arg)) throw syntaxError("FileAttachment requires a single literal string argument", node, input); // prettier-ignore
+ const fileName = getStringLiteralValue(arg);
+ const filePath = resolveLocalPath(path, fileName);
+ if (!filePath) throw syntaxError(`non-local file path: ${fileName}`, node, input);
+ const parent = stack[stack.length - 2];
+ const fileMethod =
+ parent && isMemberExpression(parent) && parent.property.type === "Identifier"
+ ? parent.property.name // FileAttachment("foo.csv").csv
+ : KNOWN_FILE_EXTENSIONS[extname(fileName)]; // bare FileAttachment("foo.csv")
+ files.push({node, name: relativePath(path, filePath), method: fileMethod});
+ }
+ });
+
+ return files;
+}
+
+function isMemberExpression(node: Node): node is MemberExpression {
+ return node.type === "MemberExpression";
+}
diff --git a/src/javascript/imports.ts b/src/javascript/imports.ts
index 53988311f..4e35b0651 100644
--- a/src/javascript/imports.ts
+++ b/src/javascript/imports.ts
@@ -1,39 +1,26 @@
-import {createHash} from "node:crypto";
-import {readFileSync} from "node:fs";
-import {join} from "node:path";
-import {Parser} from "acorn";
-import type {Identifier, Node, Program} from "acorn";
+import type {Node} from "acorn";
import type {ExportAllDeclaration, ExportNamedDeclaration, ImportDeclaration, ImportExpression} from "acorn";
import {simple} from "acorn-walk";
-import {isEnoent} from "../error.js";
-import {type Feature, type ImportReference, type JavaScriptNode} from "../javascript.js";
-import {parseOptions} from "../javascript.js";
-import {Sourcemap} from "../sourcemap.js";
-import {relativeUrl, resolvePath} from "../url.js";
-import {getFeature, getFeatureReferenceMap, getStringLiteralValue, isStringLiteral} from "./features.js";
+import {isPathImport, relativePath, resolveLocalPath} from "../path.js";
+import {getStringLiteralValue, isStringLiteral} from "./source.js";
+import {syntaxError} from "./syntaxError.js";
-type ImportNode = ImportDeclaration | ImportExpression;
-type ExportNode = ExportAllDeclaration | ExportNamedDeclaration;
-
-let npmVersionResolutionEnabled = true;
-let remoteModulePreloadEnabled = true;
-
-export function enableNpmVersionResolution(enabled = true) {
- npmVersionResolutionEnabled = enabled;
-}
-
-export function enableRemoteModulePreload(enabled = true) {
- remoteModulePreloadEnabled = enabled;
+export interface ImportReference {
+ /** The relative path to the import from the referencing source. */
+ name: string;
+ /** Is this a reference to a local module, or a non-local (e.g., npm) one? */
+ type: "local" | "global";
+ /** Is this a static import declaration, or a dynamic import expression? */
+ method: "static" | "dynamic";
}
-export interface ImportsAndFeatures {
- imports: ImportReference[];
- features: Feature[];
-}
+export type ImportNode = ImportDeclaration | ImportExpression;
+export type ExportNode = ExportAllDeclaration | ExportNamedDeclaration;
/**
* Finds all export declarations in the specified node. (This is used to
- * disallow exports within JavaScript code blocks.)
+ * disallow exports within JavaScript code blocks.) Note that this includes both
+ * "export const foo" declarations and "export {foo} from bar" declarations.
*/
export function findExports(body: Node): ExportNode[] {
const exports: ExportNode[] = [];
@@ -50,466 +37,46 @@ export function findExports(body: Node): ExportNode[] {
return exports;
}
-/**
- * Finds all imports (both static and dynamic) in the specified node.
- * Recursively processes any imported local ES modules. The returned transitive
- * import paths are relative to the given source path.
- */
-export function findImports(body: Node, root: string, path: string): ImportsAndFeatures {
- const imports: ImportReference[] = [];
- const features: Feature[] = [];
- const paths: string[] = [];
+/** Returns true if the body includes an import declaration. */
+export function hasImportDeclaration(body: Node): boolean {
+ let has = false;
simple(body, {
- ImportDeclaration: findImport,
- ImportExpression: findImport
- });
-
- function findImport(node: ImportNode) {
- if (isStringLiteral(node.source)) {
- const value = getStringLiteralValue(node.source);
- if (isLocalImport(value, path)) {
- paths.push(resolvePath(path, decodeURIComponent(value)));
- } else {
- imports.push({name: value, type: "global"});
- }
- }
- }
-
- // Recursively process any imported local ES modules.
- const transitive = parseLocalImports(root, paths);
- imports.push(...transitive.imports);
- features.push(...transitive.features);
-
- // Make all local paths relative to the source path.
- for (const i of imports) {
- if (i.type === "local") {
- i.name = relativeUrl(path, i.name);
+ ImportDeclaration() {
+ has = true;
}
- }
+ });
- return {imports, features};
+ return has;
}
-
/**
- * Parses the module at the specified path to find transitive imports,
- * processing imported modules recursively. Accumulates visited paths, and
- * appends to imports. The paths here are always relative to the root (unlike
- * findImports above!).
+ * Finds all imports (both static and dynamic, local and global) with
+ * statically-analyzable sources in the specified node. Note: this includes only
+ * direct imports, not transitive imports. Note: this also includes exports, but
+ * exports are only allowed in JavaScript modules (not in Markdown).
*/
-export function parseLocalImports(root: string, paths: string[]): ImportsAndFeatures {
+export function findImports(body: Node, path: string, input: string): ImportReference[] {
const imports: ImportReference[] = [];
- const features: Feature[] = [];
- const set = new Set(paths);
-
- for (const path of set) {
- imports.push({type: "local", name: path});
- try {
- const input = readFileSync(join(root, path), "utf-8");
- const body = Parser.parse(input, parseOptions);
-
- simple(
- body,
- {
- ImportDeclaration: findImport,
- ImportExpression: findImport,
- ExportAllDeclaration: findImport,
- ExportNamedDeclaration: findImport
- },
- undefined,
- path
- );
-
- features.push(...findImportFeatures(body, path, input));
- } catch (error) {
- if (!isEnoent(error) && !(error instanceof SyntaxError)) throw error;
- }
- }
-
- function findImport(node: ImportNode | ExportNode, path: string) {
- if (isStringLiteral(node.source)) {
- const value = getStringLiteralValue(node.source);
- if (isLocalImport(value, path)) {
- set.add(resolvePath(path, value));
- } else {
- imports.push({name: value, type: "global"});
- // non-local imports don't need to be traversed
- }
- }
- }
-
- return {imports, features};
-}
-
-export function findImportFeatures(node: Node, path: string, input: string): Feature[] {
- const featureMap = getFeatureReferenceMap(node);
- const features: Feature[] = [];
-
- simple(node, {
- CallExpression(node) {
- const type = featureMap.get(node.callee as Identifier);
- if (type) features.push(getFeature(type, node, path, input));
- }
- });
-
- return features;
-}
-
-/** Rewrites import specifiers and FileAttachment calls in the specified ES module source. */
-export async function rewriteModule(input: string, path: string, resolver: ImportResolver): Promise {
- const body = Parser.parse(input, parseOptions);
- const featureMap = getFeatureReferenceMap(body);
- const output = new Sourcemap(input);
- const imports: (ImportNode | ExportNode)[] = [];
simple(body, {
- ImportDeclaration: rewriteImport,
- ImportExpression: rewriteImport,
- ExportAllDeclaration: rewriteImport,
- ExportNamedDeclaration: rewriteImport,
- CallExpression(node) {
- const type = featureMap.get(node.callee as Identifier);
- if (type) {
- const feature = getFeature(type, node, path, input); // validate syntax
- if (feature.type === "FileAttachment") {
- const arg = node.arguments[0];
- const result = JSON.stringify(relativeUrl(join("_import", path), feature.name));
- output.replaceLeft(arg.start, arg.end, `${result}, import.meta.url`);
- }
- }
- }
- });
-
- function rewriteImport(node: ImportNode | ExportNode) {
- imports.push(node);
- }
-
- for (const node of imports) {
- if (isStringLiteral(node.source)) {
- output.replaceLeft(
- node.source.start,
- node.source.end,
- JSON.stringify(await resolver(path, getStringLiteralValue(node.source)))
- );
- }
- }
-
- return String(output);
-}
-
-export function findImportDeclarations(cell: JavaScriptNode): ImportDeclaration[] {
- const declarations: ImportDeclaration[] = [];
-
- simple(cell.body, {
- ImportDeclaration(node) {
- if (isStringLiteral(node.source)) {
- declarations.push(node);
- }
- }
+ ImportDeclaration: findImport,
+ ImportExpression: findImport,
+ ExportAllDeclaration: findImport,
+ ExportNamedDeclaration: findImport
});
- return declarations;
-}
-
-/**
- * Rewrites import specifiers in the specified JavaScript fenced code block or
- * inline expression. TODO parallelize multiple static imports.
- */
-export async function rewriteImports(
- output: Sourcemap,
- cell: JavaScriptNode,
- sourcePath: string,
- resolver: ImportResolver
-): Promise {
- const expressions: ImportExpression[] = [];
- const declarations: ImportDeclaration[] = [];
-
- simple(cell.body, {
- ImportExpression(node) {
- if (isStringLiteral(node.source)) {
- expressions.push(node);
- }
- },
- ImportDeclaration(node) {
- if (isStringLiteral(node.source)) {
- declarations.push(node);
- }
+ function findImport(node: ImportNode | ExportNode) {
+ if (!node.source || !isStringLiteral(node.source)) return;
+ const name = decodeURIComponent(getStringLiteralValue(node.source));
+ const method = node.type === "ImportExpression" ? "dynamic" : "static";
+ if (isPathImport(name)) {
+ const localPath = resolveLocalPath(path, name);
+ if (!localPath) throw syntaxError(`non-local import: ${name}`, node, input); // prettier-ignore
+ imports.push({name: relativePath(path, localPath), type: "local", method});
+ } else {
+ imports.push({name, type: "global", method});
}
- });
-
- for (const node of expressions) {
- output.replaceLeft(
- node.source.start,
- node.source.end,
- JSON.stringify(await resolver(sourcePath, getStringLiteralValue(node.source)))
- );
- }
-
- const specifiers: string[] = [];
- const imports: string[] = [];
- for (const node of declarations) {
- output.delete(node.start, node.end + +(output.input[node.end] === "\n"));
- specifiers.push(
- node.specifiers.some(isNotNamespaceSpecifier)
- ? `{${node.specifiers.filter(isNotNamespaceSpecifier).map(rewriteImportSpecifier).join(", ")}}`
- : node.specifiers.find(isNamespaceSpecifier)?.local.name ?? "{}"
- );
- imports.push(`import(${JSON.stringify(await resolver(sourcePath, getStringLiteralValue(node.source)))})`);
}
- if (declarations.length > 1) {
- output.insertLeft(0, `const [${specifiers.join(", ")}] = await Promise.all([${imports.join(", ")}]);\n`);
- } else if (declarations.length === 1) {
- output.insertLeft(0, `const ${specifiers[0]} = await ${imports[0]};\n`);
- }
-}
-
-export type ImportResolver = (path: string, specifier: string) => Promise;
-
-export function createImportResolver(root: string, base: "." | "_import" = "."): ImportResolver {
- return async (path, specifier) => {
- return isLocalImport(specifier, path)
- ? relativeUrl(path, resolvePath(base, path, resolveImportHash(root, path, specifier)))
- : specifier === "npm:@observablehq/runtime"
- ? resolveBuiltin(base, path, "runtime.js")
- : specifier === "npm:@observablehq/stdlib"
- ? resolveBuiltin(base, path, "stdlib.js")
- : specifier === "npm:@observablehq/dot"
- ? resolveBuiltin(base, path, "stdlib/dot.js") // TODO publish to npm
- : specifier === "npm:@observablehq/duckdb"
- ? resolveBuiltin(base, path, "stdlib/duckdb.js") // TODO publish to npm
- : specifier === "npm:@observablehq/inputs"
- ? resolveBuiltin(base, path, "stdlib/inputs.js") // TODO publish to npm
- : specifier === "npm:@observablehq/mermaid"
- ? resolveBuiltin(base, path, "stdlib/mermaid.js") // TODO publish to npm
- : specifier === "npm:@observablehq/tex"
- ? resolveBuiltin(base, path, "stdlib/tex.js") // TODO publish to npm
- : specifier === "npm:@observablehq/sqlite"
- ? resolveBuiltin(base, path, "stdlib/sqlite.js") // TODO publish to npm
- : specifier === "npm:@observablehq/xlsx"
- ? resolveBuiltin(base, path, "stdlib/xlsx.js") // TODO publish to npm
- : specifier === "npm:@observablehq/zip"
- ? resolveBuiltin(base, path, "stdlib/zip.js") // TODO publish to npm
- : specifier.startsWith("npm:")
- ? await resolveNpmImport(specifier.slice("npm:".length))
- : specifier;
- };
-}
-
-function parseNpmSpecifier(specifier: string): {name: string; range?: string; path?: string} {
- const parts = specifier.split("/");
- const namerange = specifier.startsWith("@") ? [parts.shift()!, parts.shift()!].join("/") : parts.shift()!;
- const ranged = namerange.indexOf("@", 1);
- return {
- name: ranged > 0 ? namerange.slice(0, ranged) : namerange,
- range: ranged > 0 ? namerange.slice(ranged + 1) : undefined,
- path: parts.length > 0 ? parts.join("/") : undefined
- };
-}
-
-function formatNpmSpecifier({name, range, path}: {name: string; range?: string; path?: string}): string {
- return `${name}${range ? `@${range}` : ""}${path ? `/${path}` : ""}`;
-}
-
-// Like import, don’t fetch the same package more than once to ensure
-// consistency; restart the server if you want to clear the cache.
-const fetchCache = new Map>();
-
-async function cachedFetch(href: string): Promise<{headers: Headers; body: any}> {
- if (!remoteModulePreloadEnabled) throw new Error("remote module preload is not enabled");
- let promise = fetchCache.get(href);
- if (promise) return promise;
- promise = (async () => {
- const response = await fetch(href);
- if (!response.ok) throw new Error(`unable to fetch: ${href}`);
- const json = /^application\/json(;|$)/.test(response.headers.get("content-type")!);
- const body = await (json ? response.json() : response.text());
- return {headers: response.headers, body};
- })();
- promise.catch(() => fetchCache.delete(href)); // try again on error
- fetchCache.set(href, promise);
- return promise;
-}
-
-async function resolveNpmVersion({name, range}: {name: string; range?: string}): Promise {
- if (!npmVersionResolutionEnabled) throw new Error("npm version resolution is not enabled");
- if (range && /^\d+\.\d+\.\d+([-+].*)?$/.test(range)) return range; // exact version specified
- const specifier = formatNpmSpecifier({name, range});
- const search = range ? `?specifier=${range}` : "";
- const {version} = (await cachedFetch(`https://data.jsdelivr.com/v1/packages/npm/${name}/resolved${search}`)).body;
- if (!version) throw new Error(`unable to resolve version: ${specifier}`);
- return version;
-}
-
-export async function resolveNpmImport(specifier: string): Promise {
- let {name, range, path = "+esm"} = parseNpmSpecifier(specifier); // eslint-disable-line prefer-const
- if (name === "@duckdb/duckdb-wasm" && !range) range = "1.28.0"; // https://github.com/duckdb/duckdb-wasm/issues/1561
- if (name === "apache-arrow" && !range) range = "13.0.0"; // https://github.com/observablehq/framework/issues/750
- if (name === "parquet-wasm" && !range) range = "0.5.0"; // https://github.com/observablehq/framework/issues/733
- if (name === "echarts" && !range) range = "5.4.3"; // https://github.com/observablehq/framework/pull/811
- try {
- return `https://cdn.jsdelivr.net/npm/${name}@${await resolveNpmVersion({name, range})}/${path}`;
- } catch {
- return `https://cdn.jsdelivr.net/npm/${name}${range ? `@${range}` : ""}/${path}`;
- }
-}
-
-const preloadCache = new Map | undefined>>();
-
-/**
- * Fetches the module at the specified URL and returns a promise to any
- * transitive modules it imports (on the same host; only path-based imports are
- * considered), as well as its subresource integrity hash. Only static imports
- * are considered, and the fetched module must be have immutable public caching;
- * dynamic imports may not be used and hence are not preloaded.
- */
-async function fetchModulePreloads(href: string): Promise | undefined> {
- let promise = preloadCache.get(href);
- if (promise) return promise;
- promise = (async () => {
- let response: {headers: any; body: any};
- try {
- response = await cachedFetch(href);
- } catch {
- return;
- }
- const {headers, body} = response;
- const cache = headers.get("cache-control")?.split(/\s*,\s*/);
- if (!cache?.some((c) => c === "immutable") || !cache?.some((c) => c === "public")) return;
- const imports = new Set();
- let program: Program;
- try {
- program = Parser.parse(body, parseOptions);
- } catch (error) {
- if (!isEnoent(error) && !(error instanceof SyntaxError)) throw error;
- return;
- }
- simple(program, {
- ImportDeclaration: findImport,
- ExportAllDeclaration: findImport,
- ExportNamedDeclaration: findImport
- });
- function findImport(node: ImportNode | ExportNode) {
- if (isStringLiteral(node.source)) {
- const value = getStringLiteralValue(node.source);
- if (isPathImport(value)) imports.add(String(new URL(value, href)));
- }
- }
- // TODO integrityCache.set(href, `sha384-${createHash("sha384").update(body).digest("base64")}`);
- return imports;
- })();
- promise.catch(() => preloadCache.delete(href)); // try again on error
- preloadCache.set(href, promise);
- return promise;
-}
-
-const integrityCache = new Map();
-
-/**
- * Given a set of resolved module specifiers (URLs) to preload, fetches any
- * externally-hosted modules to compute the transitively-imported modules; also
- * precomputes the subresource integrity hash for each fetched module.
- */
-export async function resolveModulePreloads(hrefs: Set): Promise {
- if (!remoteModulePreloadEnabled) return;
- let resolve: () => void;
- const visited = new Set();
- const queue = new Set>();
-
- for (const href of hrefs) {
- if (href.startsWith("https:")) {
- enqueue(href);
- }
- }
-
- function enqueue(href: string) {
- if (visited.has(href)) return;
- visited.add(href);
- const promise = (async () => {
- const imports = await fetchModulePreloads(href);
- if (!imports) return;
- for (const i of imports) {
- hrefs.add(i);
- enqueue(i);
- }
- })();
- promise.finally(() => {
- queue.delete(promise);
- queue.size || resolve();
- });
- queue.add(promise);
- }
-
- if (queue.size) return new Promise((y) => (resolve = y));
-}
-
-/**
- * Given a specifier (URL) that was previously resolved by
- * resolveModulePreloads, returns the computed subresource integrity hash.
- */
-export function resolveModuleIntegrity(href: string): string | undefined {
- return integrityCache.get(href);
-}
-
-function resolveBuiltin(base: "." | "_import", path: string, specifier: string): string {
- return relativeUrl(join(base === "." ? "_import" : ".", path), join("_observablehq", specifier));
-}
-
-/**
- * Given the specified local import, applies the ?sha query string based on the
- * content hash of the imported module and its transitively imported modules.
- */
-function resolveImportHash(root: string, path: string, specifier: string): string {
- return `${specifier}?sha=${getModuleHash(root, resolvePath(path, specifier))}`;
-}
-
-/**
- * Resolves the content hash for the module at the specified path within the
- * given source root. This involves parsing the specified module to process
- * transitive imports.
- */
-function getModuleHash(root: string, path: string): string {
- const hash = createHash("sha256");
- try {
- hash.update(readFileSync(join(root, path), "utf-8"));
- } catch (error) {
- if (!isEnoent(error)) throw error;
- }
- // TODO can’t simply concatenate here; we need a delimiter
- const {imports, features} = parseLocalImports(root, [path]);
- for (const i of [...imports, ...features]) {
- if (i.type === "local" || i.type === "FileAttachment") {
- try {
- hash.update(readFileSync(join(root, i.name), "utf-8"));
- } catch (error) {
- if (!isEnoent(error)) throw error;
- continue;
- }
- }
- }
- return hash.digest("hex");
-}
-
-function rewriteImportSpecifier(node) {
- return node.type === "ImportDefaultSpecifier"
- ? `default: ${node.local.name}`
- : node.imported.name === node.local.name
- ? node.local.name
- : `${node.imported.name}: ${node.local.name}`;
-}
-
-export function isPathImport(specifier: string): boolean {
- return ["./", "../", "/"].some((prefix) => specifier.startsWith(prefix));
-}
-
-export function isLocalImport(specifier: string, path: string): boolean {
- return isPathImport(specifier) && !resolvePath(path, specifier).startsWith("../");
-}
-
-function isNamespaceSpecifier(node) {
- return node.type === "ImportNamespaceSpecifier";
-}
-
-function isNotNamespaceSpecifier(node) {
- return node.type !== "ImportNamespaceSpecifier";
+ return imports;
}
diff --git a/src/javascript/module.ts b/src/javascript/module.ts
new file mode 100644
index 000000000..4aa463d91
--- /dev/null
+++ b/src/javascript/module.ts
@@ -0,0 +1,165 @@
+import {createHash} from "node:crypto";
+import {existsSync, readFileSync, statSync} from "node:fs";
+import {join, relative} from "node:path";
+import type {Program} from "acorn";
+import {Parser} from "acorn";
+import {Loader} from "../dataloader.js";
+import {resolvePath} from "../path.js";
+import {findFiles} from "./files.js";
+import {findImports} from "./imports.js";
+import {parseOptions} from "./parse.js";
+
+export type FileInfo = {
+ /** The last-modified time of the file; used to invalidate the cache. */
+ mtimeMs: number;
+ /** The SHA-256 content hash of the file contents. */
+ hash: string;
+};
+
+export type ModuleInfo = {
+ /** The last-modified time of the module; used to invalidate the cache. */
+ mtimeMs: number;
+ /** The SHA-256 content hash of the module contents. */
+ hash: string;
+ /** The module’s local static imports (paths relative to the module). */
+ localStaticImports: Set;
+ /** The module’s local dynamic imports (paths relative to the module). */
+ localDynamicImports: Set;
+ /** The module’s global static imports (typically npm: protocol imports). */
+ globalStaticImports: Set;
+ /** The module’s global dynamic imports (typically npm: protocol imports). */
+ globalDynamicImports: Set;
+ /** The module’s attached file paths (relative to the module). */
+ files: Set;
+ /** The module’s attached file methods. */
+ fileMethods: Set;
+};
+
+const fileInfoCache = new Map();
+const moduleInfoCache = new Map();
+
+/**
+ * Resolves the (transitive) content hash for the module at the specified path
+ * within the given source root. This involves parsing modules to process
+ * transitive imports (both static and dynamic). If the module does not exist or
+ * has invalid syntax, returns the hash of empty content; likewise ignores any
+ * transitive imports or files that are invalid or do not exist.
+ */
+export function getModuleHash(root: string, path: string): string {
+ const hash = createHash("sha256");
+ const paths = new Set([path]);
+ for (const path of paths) {
+ const info = getModuleInfo(root, path);
+ if (!info) continue; // ignore missing file
+ hash.update(info.hash);
+ for (const i of info.localStaticImports) {
+ paths.add(resolvePath(path, i));
+ }
+ for (const i of info.localDynamicImports) {
+ paths.add(resolvePath(path, i));
+ }
+ for (const i of info.files) {
+ const f = getFileInfo(root, resolvePath(path, i));
+ if (!f) continue; // ignore missing file
+ hash.update(f.hash);
+ }
+ }
+ return hash.digest("hex");
+}
+
+/**
+ * Returns the information for the module at the specified path within the
+ * source root, or undefined if the module does not exist or has invalid syntax.
+ */
+export function getModuleInfo(root: string, path: string): ModuleInfo | undefined {
+ const key = join(root, path);
+ let mtimeMs: number;
+ try {
+ ({mtimeMs} = statSync(key));
+ } catch {
+ moduleInfoCache.delete(key); // delete stale entry
+ return; // ignore missing file
+ }
+ let info = moduleInfoCache.get(key);
+ if (!info || info.mtimeMs < mtimeMs) {
+ let source: string;
+ let body: Program;
+ try {
+ source = readFileSync(key, "utf-8");
+ body = Parser.parse(source, parseOptions);
+ } catch {
+ moduleInfoCache.delete(key); // delete stale entry
+ return; // ignore parse error
+ }
+ const hash = createHash("sha256").update(source).digest("hex");
+ const imports = findImports(body, path, source);
+ const files = findFiles(body, path, source);
+ const localStaticImports = new Set();
+ const localDynamicImports = new Set();
+ const globalStaticImports = new Set();
+ const globalDynamicImports = new Set();
+ for (const i of imports) {
+ (i.type === "local"
+ ? i.method === "static"
+ ? localStaticImports
+ : localDynamicImports
+ : i.method === "static"
+ ? globalStaticImports
+ : globalDynamicImports
+ ).add(i.name);
+ }
+ moduleInfoCache.set(
+ key,
+ (info = {
+ mtimeMs,
+ hash,
+ files: new Set(files.map((f) => f.name)),
+ fileMethods: new Set(files.map((f) => f.method).filter((m): m is string => m !== undefined)),
+ localStaticImports,
+ localDynamicImports,
+ globalStaticImports,
+ globalDynamicImports
+ })
+ );
+ }
+ return info;
+}
+
+/**
+ * Returns the content hash for the specified file within the source root. If
+ * the specified file does not exist, returns the hash of empty content. If the
+ * referenced file does not exist, we check for the corresponding data loader
+ * and return its hash instead.
+ *
+ * TODO During build, this needs to compute the hash of the generated file, not
+ * the data loader.
+ */
+export function getFileHash(root: string, path: string): string {
+ if (!existsSync(join(root, path))) {
+ const loader = Loader.find(root, path);
+ if (loader) path = relative(root, loader.path);
+ }
+ return getFileInfo(root, path)?.hash ?? createHash("sha256").digest("hex");
+}
+
+/**
+ * Returns the information for the file at the specified path within the source
+ * root, or undefined if the file does not exist.
+ */
+export function getFileInfo(root: string, path: string): FileInfo | undefined {
+ const key = join(root, path);
+ let mtimeMs: number;
+ try {
+ ({mtimeMs} = statSync(key));
+ } catch {
+ fileInfoCache.delete(key); // delete stale entry
+ return; // ignore missing file
+ }
+ let entry = fileInfoCache.get(key);
+ if (!entry || entry.mtimeMs < mtimeMs) {
+ const contents = readFileSync(key);
+ const hash = createHash("sha256").update(contents).digest("hex");
+ fileInfoCache.set(key, (entry = {mtimeMs, hash}));
+ }
+ return entry;
+}
diff --git a/src/javascript/parse.ts b/src/javascript/parse.ts
new file mode 100644
index 000000000..388bb670e
--- /dev/null
+++ b/src/javascript/parse.ts
@@ -0,0 +1,78 @@
+import {Parser, tokTypes} from "acorn";
+import type {Expression, Identifier, Options, Program} from "acorn";
+import {checkAssignments} from "./assignments.js";
+import {findAwaits} from "./awaits.js";
+import {findDeclarations} from "./declarations.js";
+import type {FileExpression} from "./files.js";
+import {findFiles} from "./files.js";
+import type {ImportReference} from "./imports.js";
+import {findExports, findImports} from "./imports.js";
+import {findReferences} from "./references.js";
+import {syntaxError} from "./syntaxError.js";
+
+export interface ParseOptions {
+ /** The path to the source within the source root. */
+ path: string;
+ /** If true, treat the input as an inline expression instead of a fenced code block. */
+ inline?: boolean;
+}
+
+export const parseOptions: Options = {
+ ecmaVersion: 13,
+ sourceType: "module"
+};
+
+export interface JavaScriptNode {
+ body: Program | Expression;
+ declarations: Identifier[] | null; // null for expressions that can’t declare top-level variables, a.k.a outputs
+ references: Identifier[]; // the unbound references, a.k.a. inputs
+ files: FileExpression[];
+ imports: ImportReference[];
+ expression: boolean; // is this an expression or a program cell?
+ async: boolean; // does this use top-level await?
+ inline: boolean;
+ input: string;
+}
+
+/**
+ * Parses the specified JavaScript code block, or if the inline option is true,
+ * the specified inline JavaScript expression.
+ */
+export function parseJavaScript(input: string, options: ParseOptions): JavaScriptNode {
+ const {inline = false, path} = options;
+ let expression = maybeParseExpression(input, parseOptions); // first attempt to parse as expression
+ if (expression?.type === "ClassExpression" && expression.id) expression = null; // treat named class as program
+ if (expression?.type === "FunctionExpression" && expression.id) expression = null; // treat named function as program
+ if (!expression && inline) throw new SyntaxError("invalid expression"); // inline code must be an expression
+ const body = expression ?? Parser.parse(input, parseOptions); // otherwise parse as a program
+ const exports = findExports(body);
+ if (exports.length) throw syntaxError("Unexpected token 'export'", exports[0], input); // disallow exports
+ const references = findReferences(body);
+ checkAssignments(body, references, input);
+ return {
+ body,
+ declarations: expression ? null : findDeclarations(body as Program, input),
+ references,
+ files: findFiles(body, path, input, ["FileAttachment"]),
+ imports: findImports(body, path, input),
+ expression: !!expression,
+ async: findAwaits(body).length > 0,
+ inline,
+ input
+ };
+}
+
+/**
+ * Parses a single expression; like parseExpressionAt, but returns null if
+ * additional input follows the expression.
+ */
+function maybeParseExpression(input: string, options: Options): Expression | null {
+ const parser = new (Parser as any)(options, input, 0); // private constructor
+ parser.nextToken();
+ try {
+ const node = parser.parseExpression();
+ return parser.type === tokTypes.eof ? node : null;
+ } catch {
+ return null;
+ }
+}
diff --git a/src/javascript/source.ts b/src/javascript/source.ts
new file mode 100644
index 000000000..489b8b320
--- /dev/null
+++ b/src/javascript/source.ts
@@ -0,0 +1,21 @@
+import type {Literal, Node, TemplateLiteral} from "acorn";
+
+export type StringLiteral =
+ | {type: "Literal"; value: string} // FileAttachment("foo.csv")
+ | {type: "TemplateLiteral"; quasis: {value: {cooked: string}}[]}; // FileAttachment(`foo.csv`)
+
+export function isLiteral(node: Node): node is Literal {
+ return node.type === "Literal";
+}
+
+export function isTemplateLiteral(node: Node): node is TemplateLiteral {
+ return node.type === "TemplateLiteral";
+}
+
+export function isStringLiteral(node: Node): node is StringLiteral & Node {
+ return isLiteral(node) ? /^['"]/.test(node.raw!) : isTemplateLiteral(node) ? node.expressions.length === 0 : false;
+}
+
+export function getStringLiteralValue(node: StringLiteral): string {
+ return node.type === "Literal" ? node.value : node.quasis[0].value.cooked;
+}
diff --git a/src/javascript/transpile.ts b/src/javascript/transpile.ts
new file mode 100644
index 000000000..f2c832f40
--- /dev/null
+++ b/src/javascript/transpile.ts
@@ -0,0 +1,169 @@
+import {join} from "node:path/posix";
+import type {ImportDeclaration, ImportDefaultSpecifier, ImportNamespaceSpecifier, ImportSpecifier, Node} from "acorn";
+import {Parser} from "acorn";
+import {simple} from "acorn-walk";
+import {relativePath, resolvePath} from "../path.js";
+import {getModuleResolver} from "../resolvers.js";
+import {Sourcemap} from "../sourcemap.js";
+import {findFiles} from "./files.js";
+import type {ExportNode, ImportNode} from "./imports.js";
+import {hasImportDeclaration} from "./imports.js";
+import type {JavaScriptNode} from "./parse.js";
+import {parseOptions} from "./parse.js";
+import type {StringLiteral} from "./source.js";
+import {getStringLiteralValue, isStringLiteral} from "./source.js";
+
+export interface TranspileOptions {
+ id: string;
+ resolveImport?: (specifier: string) => string;
+}
+
+export function transpileJavaScript(node: JavaScriptNode, {id, resolveImport}: TranspileOptions): string {
+ let async = node.async;
+ const inputs = Array.from(new Set(node.references.map((r) => r.name)));
+ const outputs = Array.from(new Set(node.declarations?.map((r) => r.name)));
+ const display = node.expression && !inputs.includes("display") && !inputs.includes("view");
+ if (display) inputs.push("display"), (async = true);
+ if (hasImportDeclaration(node.body)) async = true;
+ const output = new Sourcemap(node.input).trim();
+ rewriteImportDeclarations(output, node.body, resolveImport);
+ rewriteImportExpressions(output, node.body, resolveImport);
+ if (display) output.insertLeft(0, "display(await(\n").insertRight(node.input.length, "\n))");
+ output.insertLeft(0, `, body: ${async ? "async " : ""}(${inputs}) => {\n`);
+ if (outputs.length) output.insertLeft(0, `, outputs: ${JSON.stringify(outputs)}`);
+ if (inputs.length) output.insertLeft(0, `, inputs: ${JSON.stringify(inputs)}`);
+ if (node.inline) output.insertLeft(0, ", inline: true");
+ output.insertLeft(0, `define({id: ${JSON.stringify(id)}`);
+ if (outputs.length) output.insertRight(node.input.length, `\nreturn {${outputs}};`);
+ output.insertRight(node.input.length, "\n}});\n");
+ return String(output);
+}
+
+export interface TranspileModuleOptions {
+ root: string;
+ path: string;
+ resolveImport?: (specifier: string) => Promise;
+}
+
+/** Rewrites import specifiers and FileAttachment calls in the specified ES module. */
+export async function transpileModule(
+ input: string,
+ {root, path, resolveImport = getModuleResolver(root, path)}: TranspileModuleOptions
+): Promise {
+ const servePath = `/${join("_import", path)}`;
+ const body = Parser.parse(input, parseOptions); // TODO ignore syntax error?
+ const output = new Sourcemap(input);
+ const imports: (ImportNode | ExportNode)[] = [];
+
+ simple(body, {
+ ImportDeclaration: rewriteImport,
+ ImportExpression: rewriteImport,
+ ExportAllDeclaration: rewriteImport,
+ ExportNamedDeclaration: rewriteImport
+ });
+
+ function rewriteImport(node: ImportNode | ExportNode) {
+ imports.push(node);
+ }
+
+ for (const {name, node} of findFiles(body, path, input)) {
+ const p = relativePath(servePath, resolvePath(path, name));
+ output.replaceLeft(node.arguments[0].start, node.arguments[0].end, `${JSON.stringify(p)}, import.meta.url`);
+ }
+
+ for (const node of imports) {
+ if (node.source && isStringLiteral(node.source)) {
+ const specifier = getStringLiteralValue(node.source);
+ output.replaceLeft(node.source.start, node.source.end, JSON.stringify(await resolveImport(specifier)));
+ }
+ }
+
+ return String(output);
+}
+
+function rewriteImportExpressions(
+ output: Sourcemap,
+ body: Node,
+ resolve: (specifier: string) => string = String
+): void {
+ simple(body, {
+ ImportExpression(node) {
+ if (isStringLiteral(node.source)) {
+ output.replaceLeft(
+ node.source.start,
+ node.source.end,
+ JSON.stringify(resolve(getStringLiteralValue(node.source)))
+ );
+ }
+ }
+ });
+}
+
+function rewriteImportDeclarations(
+ output: Sourcemap,
+ body: Node,
+ resolve: (specifier: string) => string = String
+): void {
+ const declarations: ImportDeclaration[] = [];
+
+ simple(body, {
+ ImportDeclaration(node) {
+ if (isStringLiteral(node.source)) {
+ declarations.push(node);
+ }
+ }
+ });
+
+ const specifiers: string[] = [];
+ const imports: string[] = [];
+ for (const node of declarations) {
+ output.delete(node.start, node.end + +(output.input[node.end] === "\n"));
+ specifiers.push(rewriteImportSpecifiers(node));
+ imports.push(`import(${JSON.stringify(resolve(getStringLiteralValue(node.source as StringLiteral)))})`);
+ }
+ if (declarations.length > 1) {
+ output.insertLeft(0, `const [${specifiers.join(", ")}] = await Promise.all([${imports.join(", ")}]);\n`);
+ } else if (declarations.length === 1) {
+ output.insertLeft(0, `const ${specifiers[0]} = await ${imports[0]};\n`);
+ }
+}
+
+function rewriteImportSpecifiers(node: ImportDeclaration): string {
+ return node.specifiers.some(isNotNamespaceSpecifier)
+ ? `{${node.specifiers.filter(isNotNamespaceSpecifier).map(rewriteImportSpecifier).join(", ")}}`
+ : node.specifiers.find(isNamespaceSpecifier)?.local.name ?? "{}";
+}
+
+function rewriteImportSpecifier(node: ImportSpecifier | ImportDefaultSpecifier): string {
+ return isDefaultSpecifier(node)
+ ? `default: ${getLocalName(node)}`
+ : getImportedName(node) === getLocalName(node)
+ ? getLocalName(node)
+ : `${getImportedName(node)}: ${getLocalName(node)}`;
+}
+
+function getLocalName(node: ImportSpecifier | ImportDefaultSpecifier): string {
+ return node.local.name;
+}
+
+function getImportedName(node: ImportSpecifier): string {
+ return node.imported.type === "Identifier" ? node.imported.name : node.imported.raw!;
+}
+
+function isDefaultSpecifier(
+ node: ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier
+): node is ImportDefaultSpecifier {
+ return node.type === "ImportDefaultSpecifier";
+}
+
+function isNamespaceSpecifier(
+ node: ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier
+): node is ImportNamespaceSpecifier {
+ return node.type === "ImportNamespaceSpecifier";
+}
+
+function isNotNamespaceSpecifier(
+ node: ImportSpecifier | ImportDefaultSpecifier | ImportNamespaceSpecifier
+): node is ImportSpecifier | ImportDefaultSpecifier {
+ return node.type !== "ImportNamespaceSpecifier";
+}
diff --git a/src/libraries.ts b/src/libraries.ts
index 54da0f3c7..dffd7d280 100644
--- a/src/libraries.ts
+++ b/src/libraries.ts
@@ -1,38 +1,153 @@
-import {resolveNpmImport} from "./javascript/imports.js";
+export function getImplicitFileImports(methods: Iterable): Set {
+ const set = setof(methods);
+ const implicits = new Set();
+ if (set.has("csv") || set.has("tsv")) implicits.add("npm:d3-dsv");
+ if (set.has("arrow")) implicits.add("npm:apache-arrow");
+ if (set.has("parquet")) implicits.add("npm:apache-arrow").add("npm:parquet-wasm/esm/arrow1.js");
+ if (set.has("sqlite")) implicits.add("npm:@observablehq/sqlite");
+ if (set.has("xlsx")) implicits.add("npm:@observablehq/xlsx");
+ if (set.has("zip")) implicits.add("npm:@observablehq/zip");
+ return implicits;
+}
+
+export function getImplicitInputImports(inputs: Iterable): Set {
+ const set = setof(inputs);
+ const implicits = new Set();
+ if (set.has("d3")) implicits.add("npm:d3");
+ if (set.has("Plot")) implicits.add("npm:@observablehq/plot");
+ if (set.has("htl") || set.has("html") || set.has("svg")) implicits.add("npm:htl");
+ if (set.has("Inputs")) implicits.add("npm:@observablehq/inputs");
+ if (set.has("dot")) implicits.add("npm:@observablehq/dot");
+ if (set.has("duckdb")) implicits.add("npm:@duckdb/duckdb-wasm");
+ if (set.has("DuckDBClient")) implicits.add("npm:@observablehq/duckdb");
+ if (set.has("_")) implicits.add("npm:lodash");
+ if (set.has("aq")) implicits.add("npm:arquero");
+ if (set.has("Arrow")) implicits.add("npm:apache-arrow");
+ if (set.has("L")) implicits.add("npm:leaflet");
+ if (set.has("mapboxgl")) implicits.add("npm:mapbox-gl");
+ if (set.has("mermaid")) implicits.add("npm:@observablehq/mermaid");
+ if (set.has("SQLite") || set.has("SQLiteDatabaseClient")) implicits.add("npm:@observablehq/sqlite");
+ if (set.has("tex")) implicits.add("npm:@observablehq/tex");
+ if (set.has("topojson")) implicits.add("npm:topojson-client");
+ if (set.has("vl")) implicits.add("observablehq:stdlib/vega-lite");
+ return implicits;
+}
-export function getImplicitSpecifiers(inputs: Set): Set {
- return addImplicitSpecifiers(new Set(), inputs);
+/**
+ * These implicit stylesheets are added to the page on render so that the user
+ * doesn’t have to remember to add them. TODO Support versioned imports, too,
+ * such as "npm:leaflet@1".
+ */
+export function getImplicitStylesheets(imports: Iterable): Set {
+ const set = setof(imports);
+ const implicits = new Set();
+ if (set.has("npm:@observablehq/inputs")) implicits.add("observablehq:stdlib/inputs.css");
+ if (set.has("npm:katex")) implicits.add("npm:katex/dist/katex.min.css");
+ if (set.has("npm:leaflet")) implicits.add("npm:leaflet/dist/leaflet.css");
+ if (set.has("npm:mapbox-gl")) implicits.add("npm:mapbox-gl/dist/mapbox-gl.css");
+ return implicits;
}
-export function addImplicitSpecifiers(specifiers: Set, inputs: Set): typeof specifiers {
- if (inputs.has("d3")) specifiers.add("npm:d3");
- if (inputs.has("Plot")) specifiers.add("npm:d3").add("npm:@observablehq/plot");
- if (inputs.has("htl") || inputs.has("html") || inputs.has("svg")) specifiers.add("npm:htl");
- if (inputs.has("Inputs")) specifiers.add("npm:htl").add("npm:isoformat").add("npm:@observablehq/inputs");
- if (inputs.has("dot")) specifiers.add("npm:@observablehq/dot").add("npm:@viz-js/viz");
- if (inputs.has("duckdb")) specifiers.add("npm:@duckdb/duckdb-wasm");
- if (inputs.has("DuckDBClient")) specifiers.add("npm:@observablehq/duckdb").add("npm:@duckdb/duckdb-wasm");
- if (inputs.has("_")) specifiers.add("npm:lodash");
- if (inputs.has("aq")) specifiers.add("npm:arquero");
- if (inputs.has("Arrow")) specifiers.add("npm:apache-arrow");
- if (inputs.has("L")) specifiers.add("npm:leaflet");
- if (inputs.has("mapboxgl")) specifiers.add("npm:mapbox-gl");
- if (inputs.has("mermaid")) specifiers.add("npm:@observablehq/mermaid").add("npm:mermaid").add("npm:d3");
- if (inputs.has("SQLite") || inputs.has("SQLiteDatabaseClient")) specifiers.add("npm:@observablehq/sqlite");
- if (inputs.has("tex")) specifiers.add("npm:@observablehq/tex").add("npm:katex");
- if (inputs.has("topojson")) specifiers.add("npm:topojson-client");
- if (inputs.has("vl")) specifiers.add("npm:vega").add("npm:vega-lite").add("npm:vega-lite-api");
- return specifiers;
+/**
+ * While transitive imports of JavaScript modules are discovered via parsing,
+ * transitive dependencies on other supporting assets such as WebAssembly files
+ * are often not discoverable statically. Hence, for any recommended library
+ * (that is, any library provided by default in Markdown, including with any
+ * library used by FileAttachment) we manually enumerate the needed additional
+ * downloads here. TODO Support versioned imports, too, such as "npm:leaflet@1".
+ */
+export function getImplicitDownloads(imports: Iterable): Set {
+ const set = setof(imports);
+ const implicits = new Set();
+ if (set.has("npm:@observablehq/duckdb")) {
+ implicits.add("npm:@duckdb/duckdb-wasm/dist/duckdb-mvp.wasm");
+ implicits.add("npm:@duckdb/duckdb-wasm/dist/duckdb-browser-mvp.worker.js");
+ implicits.add("npm:@duckdb/duckdb-wasm/dist/duckdb-eh.wasm");
+ implicits.add("npm:@duckdb/duckdb-wasm/dist/duckdb-browser-eh.worker.js");
+ }
+ if (set.has("npm:@observablehq/sqlite")) {
+ implicits.add("npm:sql.js/dist/sql-wasm.js");
+ implicits.add("npm:sql.js/dist/sql-wasm.wasm");
+ }
+ if (set.has("npm:katex")) {
+ implicits.add("npm:katex/dist/fonts/KaTeX_AMS-Regular.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_AMS-Regular.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_AMS-Regular.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Caligraphic-Bold.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Caligraphic-Bold.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Caligraphic-Bold.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Caligraphic-Regular.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Caligraphic-Regular.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Caligraphic-Regular.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Fraktur-Bold.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Fraktur-Bold.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Fraktur-Bold.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Fraktur-Regular.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Fraktur-Regular.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Fraktur-Regular.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Main-Bold.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Main-Bold.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Main-Bold.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Main-BoldItalic.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Main-BoldItalic.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Main-BoldItalic.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Main-Italic.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Main-Italic.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Main-Italic.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Main-Regular.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Main-Regular.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Main-Regular.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Math-BoldItalic.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Math-BoldItalic.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Math-BoldItalic.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Math-Italic.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Math-Italic.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Math-Italic.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_SansSerif-Bold.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_SansSerif-Bold.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_SansSerif-Bold.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_SansSerif-Italic.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_SansSerif-Italic.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_SansSerif-Italic.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_SansSerif-Regular.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_SansSerif-Regular.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_SansSerif-Regular.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Script-Regular.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Script-Regular.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Script-Regular.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Size1-Regular.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Size1-Regular.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Size1-Regular.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Size2-Regular.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Size2-Regular.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Size2-Regular.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Size3-Regular.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Size3-Regular.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Size3-Regular.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Size4-Regular.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Size4-Regular.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Size4-Regular.woff2");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Typewriter-Regular.ttf");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Typewriter-Regular.woff");
+ implicits.add("npm:katex/dist/fonts/KaTeX_Typewriter-Regular.woff2");
+ }
+ return implicits;
}
-export async function getImplicitStylesheets(specifiers: Set): Promise> {
- return addImplicitStylesheets(new Set(), specifiers);
+export function getImplicitDependencies(imports: Iterable): Set {
+ const set = setof(imports);
+ const implicits = new Set();
+ if (set.has("npm:@observablehq/dot")) implicits.add("npm:@viz-js/viz");
+ if (set.has("npm:@observablehq/duckdb")) implicits.add("npm:@duckdb/duckdb-wasm");
+ if (set.has("npm:@observablehq/inputs")) implicits.add("npm:htl").add("npm:isoformat");
+ if (set.has("npm:@observablehq/mermaid")) implicits.add("npm:mermaid");
+ if (set.has("npm:@observablehq/tex")) implicits.add("npm:katex");
+ if (set.has("npm:@observablehq/xlsx")) implicits.add("npm:exceljs");
+ if (set.has("npm:@observablehq/zip")) implicits.add("npm:jszip");
+ if (set.has("observablehq:stdlib/vega-lite")) implicits.add("npm:vega-lite-api").add("npm:vega-lite").add("npm:vega");
+ return implicits;
}
-export async function addImplicitStylesheets(stylesheets: Set, specifiers: Set): Promise> {
- if (specifiers.has("npm:@observablehq/inputs")) stylesheets.add("observablehq:stdlib/inputs.css");
- if (specifiers.has("npm:katex")) stylesheets.add(await resolveNpmImport("katex/dist/katex.min.css"));
- if (specifiers.has("npm:leaflet")) stylesheets.add(await resolveNpmImport("leaflet/dist/leaflet.css"));
- if (specifiers.has("npm:mapbox-gl")) stylesheets.add(await resolveNpmImport("mapbox-gl/dist/mapbox-gl.css"));
- return stylesheets;
+function setof(iterable: Iterable): Set {
+ return iterable instanceof Set ? iterable : new Set(iterable);
}
diff --git a/src/markdown.ts b/src/markdown.ts
index 570acb452..d48dddc99 100644
--- a/src/markdown.ts
+++ b/src/markdown.ts
@@ -1,80 +1,47 @@
+/* eslint-disable import/no-named-as-default-member */
import {createHash} from "node:crypto";
import {readFile} from "node:fs/promises";
-import {type Patch, type PatchItem, getPatch} from "fast-array-diff";
-import equal from "fast-deep-equal";
import matter from "gray-matter";
-import hljs from "highlight.js";
-import {parseHTML} from "linkedom";
+import he from "he";
import MarkdownIt from "markdown-it";
-import {type RuleCore} from "markdown-it/lib/parser_core.js";
-import {type RuleInline} from "markdown-it/lib/parser_inline.js";
-import {type RenderRule, type default as Renderer} from "markdown-it/lib/renderer.js";
+import type {RuleCore} from "markdown-it/lib/parser_core.js";
+import type {RuleInline} from "markdown-it/lib/parser_inline.js";
+import type {RenderRule} from "markdown-it/lib/renderer.js";
import MarkdownItAnchor from "markdown-it-anchor";
-import {isEnoent} from "./error.js";
-import {fileReference, getLocalPath} from "./files.js";
-import {computeHash} from "./hash.js";
+import type {Config} from "./config.js";
+import {mergeStyle} from "./config.js";
import {parseInfo} from "./info.js";
-import type {FileReference, ImportReference, PendingTranspile, Transpile} from "./javascript.js";
-import {transpileJavaScript} from "./javascript.js";
+import type {JavaScriptNode} from "./javascript/parse.js";
+import {parseJavaScript} from "./javascript/parse.js";
+import {relativePath} from "./path.js";
import {transpileTag} from "./tag.js";
-import {resolvePath} from "./url.js";
+import {InvalidThemeError} from "./theme.js";
+import {red} from "./tty.js";
-export interface HtmlPiece {
- type: "html";
+export interface MarkdownCode {
id: string;
- html: string;
- cellIds?: string[];
-}
-
-export interface CellPiece extends Transpile {
- type: "cell";
+ node: JavaScriptNode;
}
-export type ParsePiece = HtmlPiece | CellPiece;
-
-export interface ParseResult {
+export interface MarkdownPage {
title: string | null;
html: string;
data: {[key: string]: any} | null;
- files: FileReference[];
- imports: ImportReference[];
- pieces: HtmlPiece[];
- cells: CellPiece[];
- hash: string;
+ style: string | null;
+ code: MarkdownCode[];
}
-interface RenderPiece {
- html: string;
- code: PendingTranspile[];
-}
-
-interface ParseContext {
- pieces: RenderPiece[];
- files: FileReference[];
- imports: ImportReference[];
+export interface ParseContext {
+ code: MarkdownCode[];
startLine: number;
currentLine: number;
}
-const ELEMENT_NODE = 1; // Node.ELEMENT_NODE
-const TEXT_NODE = 3; // Node.TEXT_NODE
-
-// Returns true if the given document contains exactly one top-level element,
-// ignoring any surrounding whitespace text nodes.
-function isSingleElement(document: Document): boolean {
- let {firstChild: first, lastChild: last} = document;
- while (first?.nodeType === TEXT_NODE && !first?.textContent?.trim()) first = first.nextSibling;
- while (last?.nodeType === TEXT_NODE && !last?.textContent?.trim()) last = last.previousSibling;
- return first !== null && first === last && first.nodeType === ELEMENT_NODE;
-}
-
function uniqueCodeId(context: ParseContext, content: string): string {
- const hash = computeHash(content).slice(0, 8);
+ const hash = createHash("sha256").update(content).digest("hex").slice(0, 8);
let id = hash;
let count = 1;
- while (context.pieces.some((piece) => piece.code.some((code) => code.id === id))) {
- id = `${hash}-${count++}`;
- }
+ while (context.code.some((code) => code.id === id)) id = `${hash}-${count++}`;
return id;
}
@@ -98,38 +65,51 @@ function getLiveSource(content: string, tag: string): string | undefined {
: undefined;
}
+// TODO sourceLine and remap syntax error position; consider showing a code
+// snippet along with the error. Also, consider whether we want to show the
+// file name here.
+//
+// const message = error.message;
+// if (verbose) {
+// let warning = error.message;
+// const match = /^(.+)\s\((\d+):(\d+)\)$/.exec(message);
+// if (match) {
+// const line = +match[2] + (options?.sourceLine ?? 0);
+// const column = +match[3] + 1;
+// warning = `${match[1]} at line ${line}, column ${column}`;
+// } else if (options?.sourceLine) {
+// warning = `${message} at line ${options.sourceLine + 1}`;
+// }
+// console.error(red(`${error.name}: ${warning}`));
+// }
+
function makeFenceRenderer(root: string, baseRenderer: RenderRule, sourcePath: string): RenderRule {
return (tokens, idx, options, context: ParseContext, self) => {
const token = tokens[idx];
const {tag, attributes} = parseInfo(token.info);
token.info = tag;
- let result = "";
- let count = 0;
+ let html = "";
const source = isFalse(attributes.run) ? undefined : getLiveSource(token.content, tag);
if (source != null) {
const id = uniqueCodeId(context, token.content);
- const sourceLine = context.startLine + context.currentLine;
- const transpile = transpileJavaScript(source, {
- id,
- root,
- sourcePath,
- sourceLine
- });
- extendPiece(context, {code: [transpile]});
- if (transpile.files) context.files.push(...transpile.files);
- if (transpile.imports) context.imports.push(...transpile.imports);
- result += `
\n`;
- count++;
+ try {
+ // TODO const sourceLine = context.startLine + context.currentLine;
+ const node = parseJavaScript(source, {path: sourcePath});
+ context.code.push({id, node});
+ html += `
\n`;
+ } catch (error) {
+ if (!(error instanceof SyntaxError)) throw error;
+ html += `
+
SyntaxError: ${he.escape(error.message)}
+
\n`;
+ }
}
if (attributes.echo == null ? source == null : !isFalse(attributes.echo)) {
- result += baseRenderer(tokens, idx, options, context, self);
- count++;
+ html += baseRenderer(tokens, idx, options, context, self);
}
- // Tokens should always be rendered as a single block element.
- if (count > 1) result = "" + result + "
";
- return result;
+ return html;
};
}
@@ -264,19 +244,21 @@ const transformPlaceholderCore: RuleCore = (state) => {
function makePlaceholderRenderer(root: string, sourcePath: string): RenderRule {
return (tokens, idx, options, context: ParseContext) => {
- const id = uniqueCodeId(context, tokens[idx].content);
const token = tokens[idx];
- const transpile = transpileJavaScript(token.content, {
- id,
- root,
- sourcePath,
- inline: true,
- sourceLine: context.startLine + context.currentLine
- });
- extendPiece(context, {code: [transpile]});
- if (transpile.files) context.files.push(...transpile.files);
- if (transpile.imports) context.imports.push(...transpile.imports);
- return ` `;
+ const id = uniqueCodeId(context, token.content);
+ try {
+ // TODO sourceLine: context.startLine + context.currentLine
+ const node = parseJavaScript(token.content, {path: sourcePath, inline: true});
+ context.code.push({id, node});
+ return ` `;
+ } catch (error) {
+ if (!(error instanceof SyntaxError)) throw error;
+ return `
+ SyntaxError: ${he.escape(
+ error.message
+ )}
+ `;
+ }
};
}
@@ -287,137 +269,16 @@ function makeSoftbreakRenderer(baseRenderer: RenderRule): RenderRule {
};
}
-function extendPiece(context: ParseContext, extend: Partial) {
- if (context.pieces.length === 0) context.pieces.push({html: "", code: []});
- const last = context.pieces[context.pieces.length - 1];
- context.pieces[context.pieces.length - 1] = {
- html: last.html + (extend.html ?? ""),
- code: [...last.code, ...(extend.code ? extend.code : [])]
- };
-}
-
-function renderIntoPieces(renderer: Renderer, root: string, sourcePath: string): Renderer["render"] {
- return (tokens, options, context: ParseContext) => {
- const rules = renderer.rules;
- for (let i = 0, len = tokens.length; i < len; i++) {
- const type = tokens[i].type;
- if (tokens[i].map) context.currentLine = tokens[i].map![0];
- let html = "";
- if (type === "inline") {
- html = renderer.renderInline(tokens[i].children!, options, context);
- } else if (typeof rules[type] !== "undefined") {
- if (tokens[i].level === 0 && tokens[i].nesting !== -1) context.pieces.push({html: "", code: []});
- html = rules[type]!(tokens, i, options, context, renderer);
- } else {
- if (tokens[i].level === 0 && tokens[i].nesting !== -1) context.pieces.push({html: "", code: []});
- html = renderer.renderToken(tokens, i, options);
- }
- extendPiece(context, {html});
- }
- let result = "";
- for (const piece of context.pieces) {
- result += piece.html = normalizePieceHtml(piece.html, sourcePath, context);
- }
- return result;
- };
-}
-
-const SUPPORTED_PROPERTIES: readonly {query: string; src: "href" | "src" | "srcset"}[] = Object.freeze([
- {query: "a[href][download]", src: "href"},
- {query: "audio[src]", src: "src"},
- {query: "audio source[src]", src: "src"},
- {query: "img[src]", src: "src"},
- {query: "img[srcset]", src: "srcset"},
- {query: "link[href]", src: "href"},
- {query: "picture source[srcset]", src: "srcset"},
- {query: "video[src]", src: "src"},
- {query: "video source[src]", src: "src"}
-]);
-
-export function normalizePieceHtml(html: string, sourcePath: string, context: ParseContext): string {
- const {document} = parseHTML(html);
-
- // Extracting references to files (such as from linked stylesheets).
- const filePaths = new Set();
- const resolvePath = (source: string): FileReference | undefined => {
- const path = getLocalPath(sourcePath, source);
- if (!path) return;
- const file = fileReference(path, sourcePath);
- if (!filePaths.has(file.path)) {
- filePaths.add(file.path);
- context.files.push(file);
- }
- return file;
- };
- for (const {query, src} of SUPPORTED_PROPERTIES) {
- for (const element of document.querySelectorAll(query)) {
- if (src === "srcset") {
- const srcset = element.getAttribute(src)!;
- const paths = srcset
- .split(",")
- .map((p) => {
- const parts = p.trim().split(/\s+/);
- const source = decodeURIComponent(parts[0]);
- const file = resolvePath(source);
- return file ? `${file.path} ${parts.slice(1).join(" ")}`.trim() : parts.join(" ");
- })
- .filter((p) => !!p);
- if (paths && paths.length > 0) element.setAttribute(src, paths.join(", "));
- } else {
- const source = decodeURIComponent(element.getAttribute(src)!);
- const file = resolvePath(source);
- if (file) element.setAttribute(src, file.path);
- }
- }
- }
-
- // Syntax highlighting for elements. The code could contain an inline
- // expression within, or other HTML, but we only highlight text nodes that are
- // direct children of code elements.
- for (const code of document.querySelectorAll("code[class*='language-']")) {
- const language = [...code.classList].find((c) => c.startsWith("language-"))?.slice("language-".length);
- if (!language) continue;
- if (code.parentElement?.tagName === "PRE") code.parentElement.setAttribute("data-language", language);
- if (!hljs.getLanguage(language)) continue;
- let html = "";
- code.normalize(); // coalesce adjacent text nodes
- for (const child of code.childNodes) {
- html += child.nodeType === TEXT_NODE ? hljs.highlight(child.textContent!, {language}).value : String(child);
- }
- code.innerHTML = html;
- }
-
- // Ensure that the HTML for each piece generates exactly one top-level
- // element. This is necessary for incremental update, and ensures that our
- // parsing of the Markdown is consistent with the resulting HTML structure.
- return isSingleElement(document) ? String(document) : `${document} `;
-}
-
-function toParsePieces(pieces: RenderPiece[]): HtmlPiece[] {
- return pieces.map((piece) => ({
- type: "html",
- id: "",
- cellIds: piece.code.map((code) => `${code.id}`),
- html: piece.html
- }));
-}
-
-async function toParseCells(pieces: RenderPiece[]): Promise {
- const cellPieces: CellPiece[] = [];
- for (const piece of pieces) {
- for (const {body, ...rest} of piece.code) {
- cellPieces.push({type: "cell", ...rest, body: await body()});
- }
- }
- return cellPieces;
-}
-
export interface ParseOptions {
root: string;
path: string;
+ style?: Config["style"];
}
-export async function parseMarkdown(sourcePath: string, {root, path}: ParseOptions): Promise {
+export async function parseMarkdown(
+ sourcePath: string,
+ {root, path, style: configStyle}: ParseOptions
+): Promise {
const source = await readFile(sourcePath, "utf-8");
const parts = matter(source, {});
const md = MarkdownIt({html: true});
@@ -427,41 +288,33 @@ export async function parseMarkdown(sourcePath: string, {root, path}: ParseOptio
md.renderer.rules.placeholder = makePlaceholderRenderer(root, path);
md.renderer.rules.fence = makeFenceRenderer(root, md.renderer.rules.fence!, path);
md.renderer.rules.softbreak = makeSoftbreakRenderer(md.renderer.rules.softbreak!);
- md.renderer.render = renderIntoPieces(md.renderer, root, path);
- const context: ParseContext = {files: [], imports: [], pieces: [], startLine: 0, currentLine: 0};
+ const code: MarkdownCode[] = [];
+ const context: ParseContext = {code, startLine: 0, currentLine: 0};
const tokens = md.parse(parts.content, context);
- const html = md.renderer.render(tokens, md.options, context); // Note: mutates context.pieces, context.files!
+ const html = md.renderer.render(tokens, md.options, context); // Note: mutates code, assets!
+ const style = getStylesheet(path, parts.data, configStyle);
return {
html,
data: isEmpty(parts.data) ? null : parts.data,
title: parts.data?.title ?? findTitle(tokens) ?? null,
- files: context.files,
- imports: context.imports,
- pieces: toParsePieces(context.pieces),
- cells: await toParseCells(context.pieces),
- hash: await computeMarkdownHash(source, root, path, context.imports)
+ style,
+ code
};
}
-async function computeMarkdownHash(
- contents: string,
- root: string,
- path: string,
- imports: ImportReference[]
-): Promise {
- const hash = createHash("sha256").update(contents);
- // TODO can’t simply concatenate here; we need a delimiter
- for (const i of imports) {
- if (i.type === "local") {
- try {
- hash.update(await readFile(resolvePath(root, path, i.name), "utf-8"));
- } catch (error) {
- if (!isEnoent(error)) throw error;
- continue;
- }
- }
+function getStylesheet(path: string, data: MarkdownPage["data"], style: Config["style"] = null): string | null {
+ try {
+ style = mergeStyle(path, data?.style, data?.theme, style);
+ } catch (error) {
+ if (!(error instanceof InvalidThemeError)) throw error;
+ console.error(red(String(error))); // TODO error during build
+ style = {theme: []};
}
- return hash.digest("hex");
+ return !style
+ ? null
+ : "path" in style
+ ? relativePath(path, style.path)
+ : `observablehq:theme-${style.theme.join(",")}.css`;
}
// TODO Use gray-matter’s parts.isEmpty, but only when it’s accurate.
@@ -487,51 +340,3 @@ function findTitle(tokens: ReturnType): string | undefined
}
}
}
-
-function diffReducer(patch: PatchItem) {
- // Remove body from remove updates, we just need the ids.
- if (patch.type === "remove") {
- return {
- ...patch,
- type: "remove",
- items: patch.items.map((item) => ({
- type: item.type,
- id: item.id,
- ...("cellIds" in item ? {cellIds: item.cellIds} : null)
- }))
- } as const;
- }
- return patch;
-}
-
-// Cells are unordered
-function getCellsPatch(prevCells: CellPiece[], nextCells: CellPiece[]): Patch {
- return prevCells
- .filter((prev) => !nextCells.some((next) => equal(prev, next)))
- .map(
- (cell): PatchItem => ({
- type: "remove",
- oldPos: prevCells.indexOf(cell),
- newPos: -1,
- items: [cell]
- })
- )
- .concat(
- nextCells
- .filter((next) => !prevCells.some((prev) => equal(next, prev)))
- .map(
- (cell): PatchItem => ({
- type: "add",
- oldPos: -1,
- newPos: nextCells.indexOf(cell),
- items: [cell]
- })
- )
- );
-}
-
-export function diffMarkdown(oldParse: ParseResult, newParse: ParseResult) {
- return getPatch(oldParse.pieces, newParse.pieces, equal)
- .concat(getCellsPatch(oldParse.cells, newParse.cells))
- .map(diffReducer);
-}
diff --git a/src/npm.ts b/src/npm.ts
new file mode 100644
index 000000000..04cde47a8
--- /dev/null
+++ b/src/npm.ts
@@ -0,0 +1,198 @@
+import {existsSync} from "node:fs";
+import {mkdir, readFile, readdir, writeFile} from "node:fs/promises";
+import {dirname, join} from "node:path";
+import {Parser} from "acorn";
+import {simple} from "acorn-walk";
+import {rsort, satisfies} from "semver";
+import {isEnoent} from "./error.js";
+import type {ExportNode, ImportNode, ImportReference} from "./javascript/imports.js";
+import {findImports} from "./javascript/imports.js";
+import {parseOptions} from "./javascript/parse.js";
+import {getStringLiteralValue, isStringLiteral} from "./javascript/source.js";
+import {relativePath} from "./path.js";
+import {Sourcemap} from "./sourcemap.js";
+import {faint} from "./tty.js";
+
+export interface NpmSpecifier {
+ name: string;
+ range?: string;
+ path?: string;
+}
+
+export function parseNpmSpecifier(specifier: string): NpmSpecifier {
+ const parts = specifier.split("/");
+ const namerange = specifier.startsWith("@") ? [parts.shift()!, parts.shift()!].join("/") : parts.shift()!;
+ const ranged = namerange.indexOf("@", 1);
+ return {
+ name: ranged > 0 ? namerange.slice(0, ranged) : namerange,
+ range: ranged > 0 ? namerange.slice(ranged + 1) : undefined,
+ path: parts.length > 0 ? parts.join("/") : undefined
+ };
+}
+
+export function formatNpmSpecifier({name, range, path}: NpmSpecifier): string {
+ return `${name}${range ? `@${range}` : ""}${path ? `/${path}` : ""}`;
+}
+
+/** Rewrites /npm/ import specifiers to be relative paths to /_npm/. */
+export function rewriteNpmImports(input: string, path: string): string {
+ const body = Parser.parse(input, parseOptions);
+ const output = new Sourcemap(input);
+
+ simple(body, {
+ ImportDeclaration: rewriteImport,
+ ImportExpression: rewriteImport,
+ ExportAllDeclaration: rewriteImport,
+ ExportNamedDeclaration: rewriteImport
+ });
+
+ function rewriteImport(node: ImportNode | ExportNode) {
+ if (node.source && isStringLiteral(node.source)) {
+ let value = getStringLiteralValue(node.source);
+ if (value.startsWith("/npm/")) {
+ value = `/_npm/${value.slice("/npm/".length)}`;
+ if (value.endsWith("/+esm")) value += ".js";
+ value = relativePath(path, value);
+ output.replaceLeft(node.source.start, node.source.end, JSON.stringify(value));
+ }
+ }
+ }
+
+ // TODO Preserve the source map, but download it too.
+ return String(output).replace(/^\/\/# sourceMappingURL=.*$\n?/m, "");
+}
+
+const npmRequests = new Map>();
+
+/** Note: path must start with "/_npm/". */
+export async function populateNpmCache(root: string, path: string): Promise {
+ if (!path.startsWith("/_npm/")) throw new Error(`invalid npm path: ${path}`);
+ const filePath = join(root, ".observablehq", "cache", path);
+ if (existsSync(filePath)) return filePath;
+ let promise = npmRequests.get(path);
+ if (promise) return promise; // coalesce concurrent requests
+ promise = (async function () {
+ const specifier = resolveNpmSpecifier(path);
+ const href = `https://cdn.jsdelivr.net/npm/${specifier}`;
+ process.stdout.write(`npm:${specifier} ${faint("→")} `);
+ const response = await fetch(href);
+ if (!response.ok) throw new Error(`unable to fetch: ${href}`);
+ process.stdout.write(`${filePath}\n`);
+ await mkdir(dirname(filePath), {recursive: true});
+ if (/^application\/javascript(;|$)/i.test(response.headers.get("content-type")!)) {
+ await writeFile(filePath, rewriteNpmImports(await response.text(), path), "utf-8");
+ } else {
+ await writeFile(filePath, Buffer.from(await response.arrayBuffer()));
+ }
+ return filePath;
+ })();
+ promise.catch(() => {}).then(() => npmRequests.delete(path));
+ npmRequests.set(path, promise);
+ return promise;
+}
+
+let npmVersionCache: Promise>;
+
+async function initializeNpmVersionCache(root: string): typeof npmVersionCache {
+ const cache = new Map();
+ const cacheDir = join(root, ".observablehq", "cache", "_npm");
+ try {
+ for (const entry of await readdir(cacheDir)) {
+ if (entry.startsWith("@")) {
+ for (const subentry of await readdir(join(cacheDir, entry))) {
+ const {name, range} = parseNpmSpecifier(`${entry}/${subentry}`);
+ const versions = cache.get(name);
+ if (versions) versions.push(range!);
+ else cache.set(name, [range!]);
+ }
+ } else {
+ const {name, range} = parseNpmSpecifier(entry);
+ const versions = cache.get(name);
+ if (versions) versions.push(range!);
+ else cache.set(name, [range!]);
+ }
+ }
+ } catch (error) {
+ if (!isEnoent(error)) throw error;
+ }
+ for (const [key, value] of cache) {
+ cache.set(key, rsort(value));
+ }
+ return cache;
+}
+
+const npmVersionRequests = new Map>();
+
+async function resolveNpmVersion(root: string, specifier: NpmSpecifier): Promise {
+ const {name, range} = specifier;
+ if (range && /^\d+\.\d+\.\d+([-+].*)?$/.test(range)) return range; // exact version specified
+ const cache = await (npmVersionCache ??= initializeNpmVersionCache(root));
+ const versions = cache.get(specifier.name);
+ if (versions) for (const version of versions) if (!range || satisfies(version, range)) return version;
+ const href = `https://data.jsdelivr.com/v1/packages/npm/${name}/resolved${range ? `?specifier=${range}` : ""}`;
+ let promise = npmVersionRequests.get(href);
+ if (promise) return promise; // coalesce concurrent requests
+ promise = (async function () {
+ process.stdout.write(`npm:${formatNpmSpecifier(specifier)} ${faint("→")} `);
+ const response = await fetch(href);
+ if (!response.ok) throw new Error(`unable to fetch: ${href}`);
+ const {version} = await response.json();
+ if (!version) throw new Error(`unable to resolve version: ${formatNpmSpecifier({name, range})}`);
+ const spec = formatNpmSpecifier({name, range: version});
+ process.stdout.write(`npm:${spec}\n`);
+ cache.set(specifier.name, versions ? rsort(versions.concat(version)) : [version]);
+ mkdir(join(root, ".observablehq", "cache", "_npm", spec), {recursive: true}); // disk cache
+ return version;
+ })();
+ promise.catch(() => {}).then(() => npmVersionRequests.delete(href));
+ npmVersionRequests.set(href, promise);
+ return promise;
+}
+
+export async function resolveNpmImport(root: string, specifier: string): Promise {
+ const {
+ name,
+ range = name === "@duckdb/duckdb-wasm"
+ ? "1.28.0" // https://github.com/duckdb/duckdb-wasm/issues/1561
+ : name === "apache-arrow"
+ ? "13.0.0" // https://github.com/observablehq/framework/issues/750
+ : name === "parquet-wasm"
+ ? "0.5.0" // https://github.com/observablehq/framework/issues/733
+ : name === "echarts"
+ ? "5.4.3" // https://github.com/observablehq/framework/pull/811
+ : undefined,
+ path = name === "mermaid"
+ ? "dist/mermaid.esm.min.mjs/+esm" // TODO
+ : "+esm"
+ } = parseNpmSpecifier(specifier);
+ return `/_npm/${name}@${await resolveNpmVersion(root, {name, range})}/${path.replace(/\+esm$/, "+esm.js")}`;
+}
+
+const npmImportsCache = new Map>();
+
+/**
+ * Resolves the direct dependencies of the specified npm path, such as
+ * "/_npm/d3@7.8.5/+esm.js", returning the corresponding set of npm paths.
+ */
+export async function resolveNpmImports(root: string, path: string): Promise {
+ if (!path.startsWith("/_npm/")) throw new Error(`invalid npm path: ${path}`);
+ let promise = npmImportsCache.get(path);
+ if (promise) return promise;
+ promise = (async function () {
+ try {
+ const filePath = await populateNpmCache(root, path);
+ const source = await readFile(filePath, "utf-8");
+ const body = Parser.parse(source, parseOptions);
+ return findImports(body, path, source);
+ } catch (error) {
+ console.warn(`unable to fetch or parse: ${path}`);
+ return [];
+ }
+ })();
+ npmImportsCache.set(path, promise);
+ return promise;
+}
+
+export function resolveNpmSpecifier(path: string): string {
+ return path.replace(/^\/_npm\//, "").replace(/\/\+esm\.js$/, "/+esm");
+}
diff --git a/src/path.ts b/src/path.ts
new file mode 100644
index 000000000..4cf5582bf
--- /dev/null
+++ b/src/path.ts
@@ -0,0 +1,53 @@
+import {dirname, join} from "node:path";
+
+/**
+ * Returns the normalized relative path from "/file/path/to/a" to
+ * "/file/path/of/b". To make relative imports work, paths to the same directory
+ * are prefixed with "./", and paths that start without a slash are considered
+ * from the root.
+ */
+export function relativePath(source: string, target: string): string {
+ if (/^\w+:/.test(target)) return target;
+ const from = join("/", source).split(/[/]+/g).slice(0, -1);
+ const to = join("/", target).split(/[/]+/g);
+ const f = to.pop()!;
+ const m = from.length;
+ const n = Math.min(m, to.length);
+ let i = 0;
+ while (i < n && from[i] === to[i]) ++i;
+ const k = m - i;
+ return (k ? "../".repeat(k) : "./") + to.slice(i).concat(f).join("/");
+}
+
+/**
+ * Returns the path to the specified target within the given source root, which
+ * defaults to ".", assuming that the target is a relative path such as an
+ * import or fetch from the specified source. Typically returns a path starting
+ * with a slash (indicating that it is relative to the source root), but may
+ * return a path start with dot-dot-slash (../) if the path goes outside the
+ * specified root (e.g., from _import to _npm).
+ */
+export function resolvePath(source: string, target: string): string;
+export function resolvePath(root: string, source: string, target: string): string;
+export function resolvePath(root: string, source: string, target?: string): string {
+ if (target === undefined) (target = source), (source = root), (root = ".");
+ const path = join(root, target.startsWith("/") ? "." : dirname(source), target);
+ return path.startsWith("../") ? path : `/${path}`;
+}
+
+/**
+ * Like resolvePath, except returns null if the specified target goes outside
+ * the source root, and treats paths that start with protocols (e.g., npm:) or
+ * hashes (e.g., #foo) as non-local paths and likewise returns null.
+ */
+export function resolveLocalPath(source: string, target: string): string | null {
+ if (/^\w+:/.test(target)) return null; // URL
+ if (target.startsWith("#")) return null; // anchor tag
+ const path = resolvePath(source, target);
+ if (path.startsWith("../")) return null; // goes above root
+ return path;
+}
+
+export function isPathImport(specifier: string): boolean {
+ return ["./", "../", "/"].some((prefix) => specifier.startsWith(prefix));
+}
diff --git a/src/preview.ts b/src/preview.ts
index 93185dedf..91a723ed2 100644
--- a/src/preview.ts
+++ b/src/preview.ts
@@ -5,30 +5,33 @@ import {access, constants, readFile, stat} from "node:fs/promises";
import {createServer} from "node:http";
import type {IncomingMessage, RequestListener, Server, ServerResponse} from "node:http";
import {basename, dirname, extname, join, normalize} from "node:path";
-import {fileURLToPath} from "node:url";
import {difference} from "d3-array";
+import type {PatchItem} from "fast-array-diff";
+import {getPatch} from "fast-array-diff";
+import mime from "mime";
import openBrowser from "open";
import send from "send";
-import {type WebSocket, WebSocketServer} from "ws";
+import type {WebSocket} from "ws";
+import {WebSocketServer} from "ws";
import {version} from "../package.json";
import type {Config} from "./config.js";
-import {mergeStyle} from "./config.js";
import {Loader} from "./dataloader.js";
import {HttpError, isEnoent, isHttpError, isSystemError} from "./error.js";
import {getClientPath} from "./files.js";
import {FileWatchers} from "./fileWatchers.js";
-import {createImportResolver, rewriteModule} from "./javascript/imports.js";
-import {getImplicitSpecifiers, getImplicitStylesheets} from "./libraries.js";
-import {diffMarkdown, parseMarkdown} from "./markdown.js";
-import type {ParseResult} from "./markdown.js";
-import {renderPreview, resolveStylesheet} from "./render.js";
+import {parseHtml, rewriteHtml} from "./html.js";
+import {transpileJavaScript, transpileModule} from "./javascript/transpile.js";
+import {parseMarkdown} from "./markdown.js";
+import type {MarkdownCode, MarkdownPage} from "./markdown.js";
+import {populateNpmCache} from "./npm.js";
+import {isPathImport} from "./path.js";
+import {renderPage} from "./render.js";
+import type {Resolvers} from "./resolvers.js";
+import {getResolvers} from "./resolvers.js";
import {bundleStyles, rollupClient} from "./rollup.js";
import {searchIndex} from "./search.js";
import {Telemetry} from "./telemetry.js";
-import {bold, faint, green, link, red} from "./tty.js";
-import {relativeUrl} from "./url.js";
-
-const publicRoot = join(dirname(fileURLToPath(import.meta.url)), "..", "public");
+import {bold, faint, green, link} from "./tty.js";
export interface PreviewOptions {
config: Config;
@@ -93,30 +96,21 @@ export class PreviewServer {
const url = new URL(req.url!, "http://localhost");
let pathname = decodeURIComponent(url.pathname);
let match: RegExpExecArray | null;
- if (pathname === "/_observablehq/runtime.js") {
- const root = join(fileURLToPath(import.meta.resolve("@observablehq/runtime")), "../../");
- send(req, "/dist/runtime.js", {root}).pipe(res);
- } else if (pathname.startsWith("/_observablehq/stdlib.js")) {
- end(req, res, await rollupClient(getClientPath("./src/client/stdlib.js")), "text/javascript");
- } else if (pathname.startsWith("/_observablehq/stdlib/")) {
- const path = getClientPath("./src/client/" + pathname.slice("/_observablehq/".length));
- if (pathname.endsWith(".js")) {
- end(req, res, await rollupClient(path), "text/javascript");
- } else if (pathname.endsWith(".css")) {
- end(req, res, await bundleStyles({path}), "text/css");
- } else {
- throw new HttpError(`Not found: ${pathname}`, 404);
- }
- } else if (pathname === "/_observablehq/client.js") {
- end(req, res, await rollupClient(getClientPath("./src/client/preview.js")), "text/javascript");
- } else if (pathname === "/_observablehq/search.js") {
- end(req, res, await rollupClient(getClientPath("./src/client/search.js")), "text/javascript");
+ if (pathname === "/_observablehq/client.js") {
+ end(req, res, await rollupClient(getClientPath("./src/client/preview.js"), root, pathname), "text/javascript");
} else if (pathname === "/_observablehq/minisearch.json") {
end(req, res, await searchIndex(config), "application/json");
} else if ((match = /^\/_observablehq\/theme-(?[\w-]+(,[\w-]+)*)?\.css$/.exec(pathname))) {
end(req, res, await bundleStyles({theme: match.groups!.theme?.split(",") ?? []}), "text/css");
- } else if (pathname.startsWith("/_observablehq/")) {
- send(req, pathname.slice("/_observablehq".length), {root: publicRoot}).pipe(res);
+ } else if (pathname.startsWith("/_observablehq/") && pathname.endsWith(".js")) {
+ const path = getClientPath("./src/client/" + pathname.slice("/_observablehq/".length));
+ end(req, res, await rollupClient(path, root, pathname), "text/javascript");
+ } else if (pathname.startsWith("/_observablehq/") && pathname.endsWith(".css")) {
+ const path = getClientPath("./src/client/" + pathname.slice("/_observablehq/".length));
+ end(req, res, await bundleStyles({path}), "text/css");
+ } else if (pathname.startsWith("/_npm/")) {
+ await populateNpmCache(root, pathname);
+ send(req, pathname, {root: join(root, ".observablehq", "cache")}).pipe(res);
} else if (pathname.startsWith("/_import/")) {
const path = pathname.slice("/_import".length);
const filepath = join(root, path);
@@ -126,8 +120,8 @@ export class PreviewServer {
end(req, res, await bundleStyles({path: filepath}), "text/css");
return;
} else if (pathname.endsWith(".js")) {
- const input = await readFile(filepath, "utf-8");
- const output = await rewriteModule(input, path, createImportResolver(root));
+ const input = await readFile(join(root, path), "utf-8");
+ const output = await transpileModule(input, {root, path});
end(req, res, output, "text/javascript");
return;
}
@@ -206,7 +200,9 @@ export class PreviewServer {
// Otherwise, serve the corresponding Markdown file, if it exists.
// Anything else should 404; static files should be matched above.
try {
- const {html} = await renderPreview(path + ".md", {path: pathname, ...config});
+ const options = {path: pathname, ...config, preview: true};
+ const parse = await parseMarkdown(path + ".md", options);
+ const html = await renderPage(parse, options);
end(req, res, html, "text/html");
} catch (error) {
if (!isEnoent(error)) throw error; // internal error
@@ -222,7 +218,9 @@ export class PreviewServer {
}
if (req.method === "GET" && res.statusCode === 404) {
try {
- const {html} = await renderPreview(join(root, "404.md"), {path: "/404", ...config});
+ const options = {path: "/404", ...config, preview: true};
+ const parse = await parseMarkdown(join(root, "404.md"), options);
+ const html = await renderPage(parse, options);
end(req, res, html, "text/html");
return;
} catch {
@@ -265,61 +263,40 @@ function end(req: IncomingMessage, res: ServerResponse, content: string, type: s
}
}
-function getWatchPaths(parseResult: ParseResult): string[] {
- const paths: string[] = [];
- const {files, imports} = parseResult;
- for (const f of files) paths.push(f.name);
- for (const i of imports) paths.push(i.name);
- return paths;
-}
-
-export function getPreviewStylesheet(path: string, data: ParseResult["data"], style: Config["style"]): string | null {
- try {
- style = mergeStyle(path, data?.style, data?.theme, style);
- } catch (error) {
- console.error(red(String(error)));
- return relativeUrl(path, "/_observablehq/theme-.css");
+// Note that while we appear to be watching the referenced files here,
+// FileWatchers will magically watch the corresponding data loader if a
+// referenced file does not exist!
+function getWatchFiles(resolvers: Resolvers): Iterable {
+ const files = new Set();
+ for (const specifier of resolvers.stylesheets) {
+ if (isPathImport(specifier)) {
+ files.add(specifier);
+ }
+ }
+ for (const specifier of resolvers.files) {
+ files.add(specifier);
}
- return !style
- ? null
- : "path" in style
- ? relativeUrl(path, `/_import/${style.path}`)
- : relativeUrl(path, `/_observablehq/theme-${style.theme.join(",")}.css`);
+ for (const specifier of resolvers.localImports) {
+ files.add(specifier);
+ }
+ return files;
}
-function handleWatch(socket: WebSocket, req: IncomingMessage, {root, style: defaultStyle}: Config) {
+function handleWatch(socket: WebSocket, req: IncomingMessage, {root, style}: Config) {
let path: string | null = null;
- let current: ParseResult | null = null;
- let stylesheets: Set | null = null;
+ let hash: string | null = null;
+ let html: string[] | null = null;
+ let code: Map | null = null;
+ let files: Map | null = null;
+ let stylesheets: string[] | null = null;
let markdownWatcher: FSWatcher | null = null;
let attachmentWatcher: FileWatchers | null = null;
let emptyTimeout: ReturnType | null = null;
console.log(faint("socket open"), req.url);
- async function getStylesheets({cells, data}: ParseResult): Promise> {
- const inputs = new Set();
- for (const cell of cells) cell.inputs?.forEach(inputs.add, inputs);
- const stylesheets = await getImplicitStylesheets(getImplicitSpecifiers(inputs));
- const style = getPreviewStylesheet(path!, data, defaultStyle);
- if (style) stylesheets.add(style);
- return new Set(Array.from(stylesheets, (href) => resolveStylesheet(path!, href)));
- }
-
- function refreshAttachment(name: string) {
- const {cells} = current!;
- if (cells.some((cell) => cell.imports?.some((i) => i.name === name))) {
- watcher("change"); // trigger re-compilation of JavaScript to get new import hashes
- } else {
- const affectedCells = cells.filter((cell) => cell.files?.some((f) => f.name === name));
- if (affectedCells.length > 0) {
- send({type: "refresh", cellIds: affectedCells.map((cell) => cell.id)});
- }
- }
- }
-
async function watcher(event: WatchEventType, force = false) {
- if (!path || !current) throw new Error("not initialized");
+ if (!path) throw new Error("not initialized");
switch (event) {
case "rename": {
markdownWatcher?.close();
@@ -335,9 +312,9 @@ function handleWatch(socket: WebSocket, req: IncomingMessage, {root, style: defa
break;
}
case "change": {
- const updated = await parseMarkdown(join(root, path), {root, path});
+ const page = await parseMarkdown(join(root, path), {root, path, style});
// delay to avoid a possibly-empty file
- if (!force && updated.html === "") {
+ if (!force && page.html === "") {
if (!emptyTimeout) {
emptyTimeout = setTimeout(() => {
emptyTimeout = null;
@@ -349,16 +326,30 @@ function handleWatch(socket: WebSocket, req: IncomingMessage, {root, style: defa
clearTimeout(emptyTimeout);
emptyTimeout = null;
}
- if (current.hash === updated.hash) break;
- const updatedStylesheets = await getStylesheets(updated);
- for (const href of difference(stylesheets, updatedStylesheets)) send({type: "remove-stylesheet", href});
- for (const href of difference(updatedStylesheets, stylesheets)) send({type: "add-stylesheet", href});
- stylesheets = updatedStylesheets;
- const diff = diffMarkdown(current, updated);
- send({type: "update", diff, previousHash: current.hash, updatedHash: updated.hash});
- current = updated;
+ const resolvers = await getResolvers(page, {root, path});
+ if (hash === resolvers.hash) break;
+ const previousHash = hash!;
+ const previousHtml = html!;
+ const previousCode = code!;
+ const previousFiles = files!;
+ const previousStylesheets = stylesheets!;
+ hash = resolvers.hash;
+ html = getHtml(page, resolvers);
+ code = getCode(page, resolvers);
+ files = getFiles(resolvers);
+ stylesheets = Array.from(resolvers.stylesheets, resolvers.resolveStylesheet);
+ send({
+ type: "update",
+ diffHtml: diffHtml(previousHtml, html),
+ diffCode: diffCode(previousCode, code),
+ diffFiles: diffFiles(previousFiles, files),
+ addedStylesheets: Array.from(difference(stylesheets, previousStylesheets)),
+ removedStylesheets: Array.from(difference(previousStylesheets, stylesheets)),
+ previousHash,
+ updatedHash: hash
+ });
attachmentWatcher?.close();
- attachmentWatcher = await FileWatchers.of(root, path, getWatchPaths(updated), refreshAttachment);
+ attachmentWatcher = await FileWatchers.of(root, path, getWatchFiles(resolvers), () => watcher("change"));
break;
}
}
@@ -370,10 +361,15 @@ function handleWatch(socket: WebSocket, req: IncomingMessage, {root, style: defa
if (!(path = normalize(path)).startsWith("/")) throw new Error("Invalid path: " + initialPath);
if (path.endsWith("/")) path += "index";
path += ".md";
- current = await parseMarkdown(join(root, path), {root, path});
- if (current.hash !== initialHash) return void send({type: "reload"});
- stylesheets = await getStylesheets(current);
- attachmentWatcher = await FileWatchers.of(root, path, getWatchPaths(current), refreshAttachment);
+ const page = await parseMarkdown(join(root, path), {root, path, style});
+ const resolvers = await getResolvers(page, {root, path});
+ if (resolvers.hash !== initialHash) return void send({type: "reload"});
+ hash = resolvers.hash;
+ html = getHtml(page, resolvers);
+ code = getCode(page, resolvers);
+ files = getFiles(resolvers);
+ stylesheets = Array.from(resolvers.stylesheets, resolvers.resolveStylesheet);
+ attachmentWatcher = await FileWatchers.of(root, path, getWatchFiles(resolvers), () => watcher("change"));
markdownWatcher = watch(join(root, path), (event) => watcher(event));
}
@@ -414,3 +410,75 @@ function handleWatch(socket: WebSocket, req: IncomingMessage, {root, style: defa
socket.send(JSON.stringify(message));
}
}
+
+function getHtml({html}: MarkdownPage, {resolveFile}: Resolvers): string[] {
+ return Array.from(parseHtml(rewriteHtml(html, resolveFile)).document.body.children, (d) => d.outerHTML);
+}
+
+function getCode({code}: MarkdownPage, resolvers: Resolvers): Map {
+ return new Map(code.map((code) => [code.id, transpileCode(code, resolvers)]));
+}
+
+// Including the file has as a comment ensures that the code changes when a
+// directly-referenced file changes, triggering re-evaluation. Note that when a
+// transitive import changes, or when a file referenced by a transitive import
+// changes, the sha is already included in the transpiled code, and hence will
+// likewise be re-evaluated.
+function transpileCode({id, node}: MarkdownCode, resolvers: Resolvers): string {
+ const hash = createHash("sha256");
+ for (const f of node.files) hash.update(resolvers.resolveFile(f.name));
+ return `${transpileJavaScript(node, {id, ...resolvers})} // ${hash.digest("hex")}`;
+}
+
+function getFiles({files, resolveFile}: Resolvers): Map {
+ return new Map(Array.from(files, (f) => [f, resolveFile(f)]));
+}
+
+type CodePatch = {removed: string[]; added: string[]};
+
+function diffCode(oldCode: Map, newCode: Map): CodePatch {
+ const patch: CodePatch = {removed: [], added: []};
+ for (const [id, body] of oldCode) {
+ if (newCode.get(id) !== body) {
+ patch.removed.push(id);
+ }
+ }
+ for (const [id, body] of newCode) {
+ if (oldCode.get(id) !== body) {
+ patch.added.push(body);
+ }
+ }
+ return patch;
+}
+
+type FileDeclaration = {name: string; mimeType: string | null; path: string};
+type FilePatch = {removed: string[]; added: FileDeclaration[]};
+
+function diffFiles(oldFiles: Map, newFiles: Map): FilePatch {
+ const patch: FilePatch = {removed: [], added: []};
+ for (const [name, path] of oldFiles) {
+ if (newFiles.get(name) !== path) {
+ patch.removed.push(name);
+ }
+ }
+ for (const [name, path] of newFiles) {
+ if (oldFiles.get(name) !== path) {
+ patch.added.push({name, mimeType: mime.getType(name), path});
+ }
+ }
+ return patch;
+}
+
+function diffHtml(oldHtml: string[], newHtml: string[]): RedactedPatch {
+ return getPatch(oldHtml, newHtml).map(redactPatch);
+}
+
+type RedactedPatch = RedactedPatchItem[];
+
+type RedactedPatchItem =
+ | {type: "add"; oldPos: number; newPos: number; items: T[]}
+ | {type: "remove"; oldPos: number; newPos: number; items: {length: number}};
+
+function redactPatch(patch: PatchItem): RedactedPatchItem {
+ return patch.type === "remove" ? {...patch, type: "remove", items: {length: patch.items.length}} : patch;
+}
diff --git a/src/render.ts b/src/render.ts
index 02e112a02..06b8fe73a 100644
--- a/src/render.ts
+++ b/src/render.ts
@@ -1,76 +1,43 @@
-import {parseHTML} from "linkedom";
+import mime from "mime";
import type {Config, Page, Script, Section} from "./config.js";
import {mergeToc} from "./config.js";
import {getClientPath} from "./files.js";
-import {type Html, html} from "./html.js";
-import type {ImportResolver} from "./javascript/imports.js";
-import {createImportResolver, resolveModuleIntegrity, resolveModulePreloads} from "./javascript/imports.js";
-import type {FileReference, ImportReference, Transpile} from "./javascript.js";
-import {addImplicitSpecifiers, addImplicitStylesheets} from "./libraries.js";
-import {type ParseResult, parseMarkdown} from "./markdown.js";
-import {type PageLink, findLink, normalizePath} from "./pager.js";
-import {getPreviewStylesheet} from "./preview.js";
+import type {Html} from "./html.js";
+import {html, parseHtml, rewriteHtml} from "./html.js";
+import {transpileJavaScript} from "./javascript/transpile.js";
+import type {MarkdownPage} from "./markdown.js";
+import type {PageLink} from "./pager.js";
+import {findLink, normalizePath} from "./pager.js";
+import {relativePath} from "./path.js";
+import type {Resolvers} from "./resolvers.js";
+import {getResolvers} from "./resolvers.js";
import {rollupClient} from "./rollup.js";
-import {relativeUrl} from "./url.js";
-
-export interface Render {
- html: string;
- files: FileReference[];
- imports: ImportReference[];
- data: ParseResult["data"];
-}
export interface RenderOptions extends Config {
root: string;
path: string;
-}
-
-export async function renderPreview(sourcePath: string, options: RenderOptions): Promise {
- const parseResult = await parseMarkdown(sourcePath, options);
- return {
- html: await render(parseResult, {...options, preview: true}),
- files: parseResult.files,
- imports: parseResult.imports,
- data: parseResult.data
- };
-}
-
-export async function renderServerless(sourcePath: string, options: RenderOptions): Promise {
- const parseResult = await parseMarkdown(sourcePath, options);
- return {
- html: await render(parseResult, options),
- files: parseResult.files,
- imports: parseResult.imports,
- data: parseResult.data
- };
-}
-
-export function renderDefineCell(cell: Transpile): string {
- const {id, inline, inputs, outputs, files, body} = cell;
- return `define({${Object.entries({id, inline, inputs, outputs, files})
- .filter((arg) => arg[1] !== undefined)
- .map((arg) => `${arg[0]}: ${JSON.stringify(arg[1])}`)
- .join(", ")}, body: ${body}});\n`;
+ resolvers?: Resolvers;
}
type RenderInternalOptions =
- | {preview?: false} // serverless
+ | {preview?: false} // build
| {preview: true}; // preview
-async function render(parseResult: ParseResult, options: RenderOptions & RenderInternalOptions): Promise {
- const {root, base, path, pages, title, preview, search} = options;
- const sidebar = parseResult.data?.sidebar !== undefined ? Boolean(parseResult.data.sidebar) : options.sidebar;
- const toc = mergeToc(parseResult.data?.toc, options.toc);
+export async function renderPage(page: MarkdownPage, options: RenderOptions & RenderInternalOptions): Promise {
+ const {root, base, path, pages, title, preview, search, resolvers = await getResolvers(page, options)} = options;
+ const sidebar = page.data?.sidebar !== undefined ? Boolean(page.data.sidebar) : options.sidebar;
+ const toc = mergeToc(page.data?.toc, options.toc);
+ const {files, resolveFile, resolveImport} = resolvers;
return String(html`
${path === "/404" ? html`\n ` : ""}
${
- parseResult.title || title
- ? html`${[parseResult.title, parseResult.title === title ? null : title]
+ page.title || title
+ ? html`${[page.title, page.title === title ? null : title]
.filter((title): title is string => !!title)
.join(" | ")} \n`
: ""
-}${await renderHead(parseResult, options, path, createImportResolver(root, "_import"))}${
+}${renderHead(page, resolvers, options)}${
path === "/404"
? html.unsafe(`\n${sidebar ? html`\n${await renderSidebar(title, pages, path, search)}` : ""}${
- toc.show ? html`\n${renderToc(findHeaders(parseResult), toc.label)}` : ""
+import ${preview || page.code.length ? `{${preview ? "open, " : ""}define} from ` : ""}${JSON.stringify(
+ resolveImport("observablehq:client")
+ )};${
+ files.size
+ ? `\nimport {registerFile} from ${JSON.stringify(resolveImport("observablehq:stdlib"))};
+${renderFiles(files, resolveFile)}`
+ : ""
+ }
+${preview ? `\nopen({hash: ${JSON.stringify(resolvers.hash)}, eval: (body) => eval(body)});\n` : ""}${page.code
+ .map(({node, id}) => `\n${transpileJavaScript(node, {id, resolveImport})}`)
+ .join("")}`)}
+${sidebar ? html`\n${await renderSidebar(title, pages, root, path, search)}` : ""}${
+ toc.show ? html`\n${renderToc(findHeaders(page), toc.label)}` : ""
}
-${renderHeader(options, parseResult.data)}
+
${renderHeader(options, page.data)}
-${html.unsafe(parseResult.html)} ${renderFooter(path, options, parseResult.data)}
+${html.unsafe(rewriteHtml(page.html, resolvers.resolveFile))}${renderFooter(path, options, page.data)}
`);
}
-async function renderSidebar(title = "Home", pages: (Page | Section)[], path: string, search: boolean): Promise {
+function renderFiles(files: Iterable
, resolve: (name: string) => string): string {
+ return Array.from(files)
+ .sort()
+ .map((f) => renderFile(f, resolve))
+ .join("");
+}
+
+function renderFile(name: string, resolve: (name: string) => string): string {
+ return `\nregisterFile(${JSON.stringify(name)}, ${JSON.stringify({
+ name,
+ mimeType: mime.getType(name),
+ path: resolve(name)
+ })});`;
+}
+
+async function renderSidebar(
+ title = "Home",
+ pages: (Page | Section)[],
+ root: string,
+ path: string,
+ search: boolean
+): Promise {
return html`
`;
}
@@ -151,18 +141,16 @@ interface Header {
href: string;
}
-const tocSelector = ["h1:not(:first-of-type)", "h2:not(h1 + h2)"];
+const tocSelector = "h1:not(:first-of-type), h2:first-child, :not(h1) + h2";
-function findHeaders(parseResult: ParseResult): Header[] {
- return Array.from(parseHTML(parseResult.html).document.querySelectorAll(tocSelector.join(", ")))
+function findHeaders(page: MarkdownPage): Header[] {
+ return Array.from(parseHtml(page.html).document.querySelectorAll(tocSelector))
.map((node) => ({label: node.textContent, href: node.firstElementChild?.getAttribute("href")}))
.filter((d): d is Header => !!d.label && !!d.href);
}
function renderToc(headers: Header[], label: string): Html {
- return html`
+ return html`
${
headers.length > 0
? html`
@@ -177,60 +165,36 @@ function renderToc(headers: Header[], label: string): Html {
`;
}
-function renderListItem(p: Page, path: string): Html {
+function renderListItem(page: Page, path: string): Html {
return html`\n ${p.name} `;
+ normalizePath(page.path) === path ? " observablehq-link-active" : ""
+ }">${page.name} `;
}
function prettyPath(path: string): string {
return path.replace(/\/index$/, "/") || "/";
}
-async function renderHead(
- parseResult: ParseResult,
- options: Pick,
- path: string,
- resolver: ImportResolver
-): Promise {
- const scripts = options.scripts;
- const head = parseResult.data?.head !== undefined ? parseResult.data.head : options.head;
- const stylesheets = new Set(["https://fonts.googleapis.com/css2?family=Source+Serif+Pro:ital,wght@0,400;0,600;0,700;1,400;1,600;1,700&display=swap"]); // prettier-ignore
- const style = getPreviewStylesheet(path, parseResult.data, options.style);
- if (style) stylesheets.add(style);
- const specifiers = new Set(["npm:@observablehq/runtime", "npm:@observablehq/stdlib"]);
- for (const {name} of parseResult.imports) specifiers.add(name);
- const inputs = new Set(parseResult.cells.flatMap((cell) => cell.inputs ?? []));
- addImplicitSpecifiers(specifiers, inputs);
- await addImplicitStylesheets(stylesheets, specifiers);
- const preloads = new Set([relativeUrl(path, "/_observablehq/client.js")]);
- for (const specifier of specifiers) preloads.add(await resolver(path, specifier));
- await resolveModulePreloads(preloads);
+function renderHead(parse: MarkdownPage, resolvers: Resolvers, {scripts, head, root}: RenderOptions): Html {
+ if (parse.data?.head !== undefined) head = parse.data.head;
+ const resolveScript = (src: string) => (/^\w+:/.test(src) ? src : resolvers.resolveImport(relativePath(root, src)));
return html` ${
- Array.from(stylesheets)
- .sort()
- .map((href) => resolveStylesheet(path, href))
- .map(renderStylesheetPreload) //
+ Array.from(resolvers.stylesheets, (i) => renderStylesheetPreload(resolvers.resolveStylesheet(i))) //
}${
- Array.from(stylesheets)
- .sort()
- .map((href) => resolveStylesheet(path, href))
- .map(renderStylesheet) //
+ Array.from(resolvers.stylesheets, (i) => renderStylesheet(resolvers.resolveStylesheet(i))) //
}${
- Array.from(preloads).sort().map(renderModulePreload) //
- }${head ? html`\n${html.unsafe(head)}` : null}${html.unsafe(scripts.map((s) => renderScript(s, path)).join(""))}`;
-}
-
-export function resolveStylesheet(path: string, href: string): string {
- return href.startsWith("observablehq:")
- ? relativeUrl(path, `/_observablehq/${href.slice("observablehq:".length)}`)
- : href;
+ Array.from(resolvers.staticImports, (i) => renderModulePreload(resolvers.resolveImport(i))) //
+ }${
+ head ? html`\n${html.unsafe(head)}` : null // arbitrary user content
+ }${
+ Array.from(scripts, (s) => renderScript(s, resolveScript)) // `;
+function renderScript(script: Script, resolve: (specifier: string) => string): Html {
+ return html`\n`;
}
function renderStylesheet(href: string): Html {
@@ -242,11 +206,10 @@ function renderStylesheetPreload(href: string): Html {
}
function renderModulePreload(href: string): Html {
- const integrity: string | undefined = resolveModuleIntegrity(href);
- return html`\n `;
+ return html`\n `;
}
-function renderHeader({header}: Pick, data: ParseResult["data"]): Html | null {
+function renderHeader({header}: Pick, data: MarkdownPage["data"]): Html | null {
if (data?.header !== undefined) header = data?.header;
return header ? html`\n` : null;
}
@@ -254,7 +217,7 @@ function renderHeader({header}: Pick, data: ParseResult["data"
function renderFooter(
path: string,
options: Pick,
- data: ParseResult["data"]
+ data: MarkdownPage["data"]
): Html | null {
let footer = options.footer;
if (data?.footer !== undefined) footer = data?.footer;
@@ -272,5 +235,5 @@ function renderPager(path: string, {prev, next}: PageLink): Html {
}
function renderRel(path: string, page: Page, rel: "prev" | "next"): Html {
- return html`${page.name} `;
+ return html`${page.name} `;
}
diff --git a/src/resolvers.ts b/src/resolvers.ts
new file mode 100644
index 000000000..78fab20ee
--- /dev/null
+++ b/src/resolvers.ts
@@ -0,0 +1,314 @@
+import {createHash} from "node:crypto";
+import {join} from "node:path/posix";
+import {findAssets} from "./html.js";
+import {defaultGlobals} from "./javascript/globals.js";
+import {getFileHash, getModuleHash, getModuleInfo} from "./javascript/module.js";
+import {getImplicitDependencies, getImplicitDownloads} from "./libraries.js";
+import {getImplicitFileImports, getImplicitInputImports} from "./libraries.js";
+import {getImplicitStylesheets} from "./libraries.js";
+import type {MarkdownPage} from "./markdown.js";
+import {populateNpmCache, resolveNpmImport, resolveNpmImports, resolveNpmSpecifier} from "./npm.js";
+import {isPathImport, relativePath, resolvePath} from "./path.js";
+
+export interface Resolvers {
+ hash: string;
+ assets: Set;
+ files: Set;
+ localImports: Set;
+ globalImports: Set;
+ staticImports: Set;
+ stylesheets: Set;
+ resolveFile(specifier: string): string;
+ resolveImport(specifier: string): string;
+ resolveStylesheet(specifier: string): string;
+}
+
+const defaultImports = [
+ "observablehq:client", // Framework client
+ "npm:@observablehq/runtime", // Runtime
+ "npm:@observablehq/stdlib" // Standard library
+];
+
+export const builtins = new Map([
+ ["npm:@observablehq/runtime", "/_observablehq/runtime.js"],
+ ["npm:@observablehq/stdlib", "/_observablehq/stdlib.js"],
+ ["npm:@observablehq/dot", "/_observablehq/stdlib/dot.js"], // TODO publish to npm
+ ["npm:@observablehq/duckdb", "/_observablehq/stdlib/duckdb.js"], // TODO publish to npm
+ ["npm:@observablehq/inputs", "/_observablehq/stdlib/inputs.js"], // TODO publish to npm
+ ["npm:@observablehq/mermaid", "/_observablehq/stdlib/mermaid.js"], // TODO publish to npm
+ ["npm:@observablehq/tex", "/_observablehq/stdlib/tex.js"], // TODO publish to npm
+ ["npm:@observablehq/sqlite", "/_observablehq/stdlib/sqlite.js"], // TODO publish to npm
+ ["npm:@observablehq/xlsx", "/_observablehq/stdlib/xlsx.js"], // TODO publish to npm
+ ["npm:@observablehq/zip", "/_observablehq/stdlib/zip.js"] // TODO publish to npm
+]);
+
+/**
+ * Resolves the dependencies (the other files) that the page needs. Dependencies
+ * are in three categories: imports (JavaScript modules), stylesheets (CSS), and
+ * files (referenced either by FileAttachment or by static HTML).
+ *
+ * For imports, we distinguish between local imports (to other JavaScript
+ * modules within the source root) and global imports (typically to libraries
+ * published to npm but also to modules that Framework itself provides such as
+ * stdlib; perhaps better called “non-local”). We also distinguish between
+ * static imports (import declarations) and dynamic imports (import
+ * expressions): only static imports are preloaded, but both static and dynamic
+ * imports are included in the published site (dist). Transitive static imports
+ * from dynamically-imported modules are treated as dynamic since they should
+ * not be preloaded. For example, Mermaid implements about a dozen chart types
+ * as dynamic imports, and we only want to load the ones in use.
+ *
+ * For stylesheets, we are only concerned with the config style option or the
+ * page-level front matter style option where the stylesheet is served out of
+ * _import by generating a bundle from a local file — along with implicit
+ * stylesheets referenced by recommended libraries such as Leaflet. (Stylesheets
+ * referenced in static HTML are treated as files.)
+ *
+ * For files, we collect all FileAttachment calls within local modules, adding
+ * them to any files referenced by static HTML.
+ */
+export async function getResolvers(page: MarkdownPage, {root, path}: {root: string; path: string}): Promise {
+ const hash = createHash("sha256").update(page.html);
+ const assets = findAssets(page.html, path);
+ const files = new Set();
+ const fileMethods = new Set();
+ const localImports = new Set();
+ const globalImports = new Set(defaultImports);
+ const staticImports = new Set(defaultImports);
+ const stylesheets = new Set();
+ const resolutions = new Map();
+
+ // Add stylesheets. TODO Instead of hard-coding Source Serif Pro, parse the
+ // page’s stylesheet to look for external imports.
+ stylesheets.add("https://fonts.googleapis.com/css2?family=Source+Serif+Pro:ital,wght@0,400;0,600;0,700;1,400;1,600;1,700&display=swap"); // prettier-ignore
+ if (page.style) stylesheets.add(page.style);
+
+ // Collect directly-attached files, local imports, and static imports.
+ for (const {node} of page.code) {
+ for (const f of node.files) {
+ files.add(f.name);
+ if (f.method) fileMethods.add(f.method);
+ }
+ for (const i of node.imports) {
+ (i.type === "local" ? localImports : globalImports).add(i.name);
+ if (i.method === "static") staticImports.add(i.name);
+ }
+ }
+
+ // Compute the content hash. TODO In build, this needs to consider the output
+ // of data loaders, rather than the source of data loaders.
+ for (const f of assets) hash.update(getFileHash(root, resolvePath(path, f)));
+ for (const f of files) hash.update(getFileHash(root, resolvePath(path, f)));
+ for (const i of localImports) hash.update(getModuleHash(root, resolvePath(path, i)));
+ if (page.style && isPathImport(page.style)) hash.update(getFileHash(root, resolvePath(path, page.style)));
+
+ // Collect transitively-attached files and imports.
+ for (const i of localImports) {
+ const p = resolvePath(path, i);
+ const info = getModuleInfo(root, p);
+ if (!info) continue;
+ for (const f of info.files) {
+ files.add(relativePath(path, resolvePath(p, f)));
+ }
+ for (const m of info.fileMethods) {
+ fileMethods.add(m);
+ }
+ for (const o of info.localStaticImports) {
+ const r = relativePath(path, resolvePath(p, o));
+ localImports.add(r);
+ staticImports.add(r);
+ }
+ for (const o of info.localDynamicImports) {
+ localImports.add(relativePath(path, resolvePath(p, o)));
+ }
+ for (const o of info.globalStaticImports) {
+ staticImports.add(o);
+ globalImports.add(o);
+ }
+ for (const o of info.globalDynamicImports) {
+ globalImports.add(o);
+ }
+ }
+
+ // Add implicit imports for files. These are technically implemented as
+ // dynamic imports, but we assume that referenced files will be loaded
+ // immediately and so treat them as static for preloads.
+ for (const i of getImplicitFileImports(fileMethods)) {
+ staticImports.add(i);
+ globalImports.add(i);
+ }
+
+ // Add implicit imports for standard library built-ins, such as d3 and Plot.
+ for (const i of getImplicitInputImports(findFreeInputs(page))) {
+ staticImports.add(i);
+ globalImports.add(i);
+ }
+
+ // Add transitive imports for built-in libraries.
+ for (const i of getImplicitDependencies(staticImports)) {
+ staticImports.add(i);
+ }
+ for (const i of getImplicitDependencies(globalImports)) {
+ globalImports.add(i);
+ }
+
+ // Resolve npm: imports.
+ for (const i of globalImports) {
+ if (i.startsWith("npm:") && !builtins.has(i)) {
+ resolutions.set(i, await resolveNpmImport(root, i.slice("npm:".length)));
+ }
+ }
+
+ // Follow transitive imports of npm imports. This has the side-effect of
+ // populating the npm cache.
+ for (const value of resolutions.values()) {
+ for (const i of await resolveNpmImports(root, value)) {
+ if (i.type === "local") {
+ const path = resolvePath(value, i.name);
+ const specifier = `npm:${resolveNpmSpecifier(path)}`;
+ globalImports.add(specifier);
+ resolutions.set(specifier, path);
+ }
+ }
+ }
+
+ // Resolve transitive static npm: imports.
+ const npmStaticResolutions = new Set();
+ for (const i of staticImports) {
+ const r = resolutions.get(i);
+ if (r) npmStaticResolutions.add(r);
+ }
+ for (const value of npmStaticResolutions) {
+ for (const i of await resolveNpmImports(root, value)) {
+ if (i.type === "local" && i.method === "static") {
+ const path = resolvePath(value, i.name);
+ const specifier = `npm:${resolveNpmSpecifier(path)}`;
+ staticImports.add(specifier);
+ }
+ }
+ }
+
+ // Add implicit stylesheets.
+ for (const specifier of getImplicitStylesheets(staticImports)) {
+ stylesheets.add(specifier);
+ if (specifier.startsWith("npm:")) {
+ const path = await resolveNpmImport(root, specifier.slice("npm:".length));
+ resolutions.set(specifier, path);
+ await populateNpmCache(root, path);
+ }
+ }
+
+ // Add implicit downloads. (This should be maybe be stored separately rather
+ // than being tossed into global imports, but it works for now.)
+ for (const specifier of getImplicitDownloads(globalImports)) {
+ globalImports.add(specifier);
+ if (specifier.startsWith("npm:")) {
+ const path = await resolveNpmImport(root, specifier.slice("npm:".length));
+ resolutions.set(specifier, path);
+ await populateNpmCache(root, path);
+ }
+ }
+
+ function resolveImport(specifier: string): string {
+ return isPathImport(specifier)
+ ? relativePath(path, resolveImportPath(root, resolvePath(path, specifier)))
+ : builtins.has(specifier)
+ ? relativePath(path, builtins.get(specifier)!)
+ : specifier.startsWith("observablehq:")
+ ? relativePath(path, `/_observablehq/${specifier.slice("observablehq:".length)}.js`)
+ : resolutions.has(specifier)
+ ? relativePath(path, resolutions.get(specifier)!)
+ : specifier.startsWith("npm:")
+ ? `https://cdn.jsdelivr.net/npm/${specifier.slice("npm:".length)}`
+ : specifier;
+ }
+
+ function resolveFile(specifier: string): string {
+ return relativePath(path, resolveFilePath(root, resolvePath(path, specifier)));
+ }
+
+ function resolveStylesheet(specifier: string): string {
+ return isPathImport(specifier)
+ ? relativePath(path, resolveStylesheetPath(root, resolvePath(path, specifier)))
+ : specifier.startsWith("observablehq:")
+ ? relativePath(path, `/_observablehq/${specifier.slice("observablehq:".length)}`)
+ : resolutions.has(specifier)
+ ? relativePath(path, resolutions.get(specifier)!)
+ : specifier.startsWith("npm:")
+ ? `https://cdn.jsdelivr.net/npm/${specifier.slice("npm:".length)}`
+ : specifier;
+ }
+
+ return {
+ hash: hash.digest("hex"),
+ assets,
+ files,
+ localImports,
+ globalImports,
+ staticImports,
+ stylesheets,
+ resolveFile,
+ resolveImport,
+ resolveStylesheet
+ };
+}
+
+/**
+ * Returns the import resolver used for transpiling local modules. Unlike
+ * getResolvers, this is independent of any specific page, and is done without
+ * knowing the transitive imports ahead of time. But it should be consistent
+ * with the resolveImport returned by getResolvers (assuming caching).
+ */
+export function getModuleResolver(root: string, path: string): (specifier: string) => Promise {
+ const servePath = `/${join("_import", path)}`;
+ return async (specifier) => {
+ return isPathImport(specifier)
+ ? relativePath(servePath, resolveImportPath(root, resolvePath(path, specifier)))
+ : builtins.has(specifier)
+ ? relativePath(servePath, builtins.get(specifier)!)
+ : specifier.startsWith("observablehq:")
+ ? relativePath(servePath, `/_observablehq/${specifier.slice("observablehq:".length)}.js`)
+ : specifier.startsWith("npm:")
+ ? relativePath(servePath, await resolveNpmImport(root, specifier.slice("npm:".length)))
+ : specifier;
+ };
+}
+
+export function resolveStylesheetPath(root: string, path: string): string {
+ return `/${join("_import", path)}?sha=${getFileHash(root, path)}`;
+}
+
+export function resolveImportPath(root: string, path: string): string {
+ return `/${join("_import", path)}?sha=${getModuleHash(root, path)}`;
+}
+
+export function resolveFilePath(root: string, path: string): string {
+ return `/${join("_file", path)}?sha=${getFileHash(root, path)}`;
+}
+
+// Returns any inputs that are not declared in outputs. These typically refer to
+// symbols provided by the standard library, such as d3 and Inputs.
+function findFreeInputs(page: MarkdownPage): Set {
+ const outputs = new Set(defaultGlobals).add("display").add("view").add("visibility").add("invalidation");
+ const inputs = new Set();
+
+ // Compute all declared variables.
+ for (const {node} of page.code) {
+ if (node.declarations) {
+ for (const {name} of node.declarations) {
+ outputs.add(name);
+ }
+ }
+ }
+
+ // Compute all unbound references.
+ for (const {node} of page.code) {
+ for (const {name} of node.references) {
+ if (!outputs.has(name)) {
+ inputs.add(name);
+ }
+ }
+ }
+
+ return inputs;
+}
diff --git a/src/rollup.ts b/src/rollup.ts
index 547a1126f..985353bf2 100644
--- a/src/rollup.ts
+++ b/src/rollup.ts
@@ -6,12 +6,13 @@ import type {AstNode, OutputChunk, Plugin, ResolveIdResult} from "rollup";
import {rollup} from "rollup";
import esbuild from "rollup-plugin-esbuild";
import {getClientPath} from "./files.js";
-import {getStringLiteralValue, isStringLiteral} from "./javascript/features.js";
-import {isPathImport, resolveNpmImport} from "./javascript/imports.js";
+import type {StringLiteral} from "./javascript/source.js";
+import {getStringLiteralValue, isStringLiteral} from "./javascript/source.js";
+import {resolveNpmImport} from "./npm.js";
import {getObservableUiOrigin} from "./observableApiClient.js";
+import {isPathImport, relativePath} from "./path.js";
import {Sourcemap} from "./sourcemap.js";
import {THEMES, renderTheme} from "./theme.js";
-import {relativeUrl} from "./url.js";
const STYLE_MODULES = {
"observablehq:default.css": getClientPath("./src/style/default.css"),
@@ -19,7 +20,13 @@ const STYLE_MODULES = {
};
// These libraries are currently bundled in to a wrapper.
-const BUNDLED_MODULES = ["@observablehq/inputs", "minisearch"];
+const BUNDLED_MODULES = [
+ "@observablehq/inputs", // observablehq:stdlib/inputs.js
+ "@observablehq/inspector", // observablehq:runtime.js
+ "@observablehq/runtime", // observablehq:runtime.js
+ "isoformat", // observablehq:runtime.js
+ "minisearch" // observablehq:search.js
+];
function rewriteInputsNamespace(code: string) {
return code.replace(/\b__ns__\b/g, "inputs-3a86ea");
@@ -36,23 +43,35 @@ export async function bundleStyles({path, theme}: {path?: string; theme?: string
return rewriteInputsNamespace(text); // TODO only for inputs
}
-export async function rollupClient(clientPath: string, {minify = false} = {}): Promise {
+export async function rollupClient(
+ input: string,
+ root: string,
+ path: string,
+ {define, minify}: {define?: {[key: string]: string}; minify?: boolean} = {}
+): Promise {
const bundle = await rollup({
- input: clientPath,
+ input,
external: [/^https:/],
plugins: [
nodeResolve({resolveOnly: BUNDLED_MODULES}),
- importResolve(clientPath),
+ importResolve(input, root, path),
esbuild({
target: "es2022",
exclude: [], // don’t exclude node_modules
+ keepNames: true,
minify,
define: {
- "process.env.OBSERVABLE_ORIGIN": JSON.stringify(String(getObservableUiOrigin()).replace(/\/$/, ""))
+ "global.__minisearch": '"./minisearch.json"',
+ "process.env.OBSERVABLE_ORIGIN": JSON.stringify(String(getObservableUiOrigin()).replace(/\/$/, "")),
+ ...define
}
}),
- importMetaResolve()
- ]
+ importMetaResolve(root, path)
+ ],
+ onwarn(message, warn) {
+ if (message.code === "CIRCULAR_DEPENDENCY") return;
+ warn(message);
+ }
});
try {
const output = await bundle.generate({format: "es"});
@@ -76,48 +95,46 @@ function rewriteTypeScriptImports(code: string): string {
return code.replace(/(?<=\bimport\(([`'"])[\w./]+)\.ts(?=\1\))/g, ".js");
}
-function importResolve(clientPath: string): Plugin {
+function importResolve(input: string, root: string, path: string): Plugin {
+ async function resolve(specifier: string | AstNode): Promise {
+ return typeof specifier !== "string" || specifier === input
+ ? null
+ : specifier.startsWith("observablehq:")
+ ? {id: relativePath(path, `/_observablehq/${specifier.slice("observablehq:".length)}.js`), external: true}
+ : specifier === "npm:@observablehq/runtime"
+ ? {id: relativePath(path, "/_observablehq/runtime.js"), external: true}
+ : specifier === "npm:@observablehq/stdlib" || specifier === "@observablehq/stdlib"
+ ? {id: relativePath(path, "/_observablehq/stdlib.js"), external: true}
+ : specifier === "npm:@observablehq/dot"
+ ? {id: relativePath(path, "/_observablehq/stdlib/dot.js"), external: true} // TODO publish to npm
+ : specifier === "npm:@observablehq/duckdb"
+ ? {id: relativePath(path, "/_observablehq/stdlib/duckdb.js"), external: true} // TODO publish to npm
+ : specifier === "npm:@observablehq/inputs"
+ ? {id: relativePath(path, "/_observablehq/stdlib/inputs.js"), external: true} // TODO publish to npm
+ : specifier === "npm:@observablehq/mermaid"
+ ? {id: relativePath(path, "/_observablehq/stdlib/mermaid.js"), external: true} // TODO publish to npm
+ : specifier === "npm:@observablehq/tex"
+ ? {id: relativePath(path, "/_observablehq/stdlib/tex.js"), external: true} // TODO publish to npm
+ : specifier === "npm:@observablehq/sqlite"
+ ? {id: relativePath(path, "/_observablehq/stdlib/sqlite.js"), external: true} // TODO publish to npm
+ : specifier === "npm:@observablehq/xlsx"
+ ? {id: relativePath(path, "/_observablehq/stdlib/xlsx.js"), external: true} // TODO publish to npm
+ : specifier === "npm:@observablehq/zip"
+ ? {id: relativePath(path, "/_observablehq/stdlib/zip.js"), external: true} // TODO publish to npm
+ : specifier.startsWith("npm:")
+ ? {id: relativePath(path, await resolveNpmImport(root, specifier.slice("npm:".length))), external: true}
+ : !isPathImport(specifier) && !BUNDLED_MODULES.includes(specifier) // e.g., inputs.js imports "htl"
+ ? {id: relativePath(path, await resolveNpmImport(root, specifier)), external: true}
+ : null;
+ }
return {
name: "resolve-import",
- resolveId: (specifier) => resolveImport(clientPath, specifier),
- resolveDynamicImport: (specifier) => resolveImport(clientPath, specifier)
+ resolveId: resolve,
+ resolveDynamicImport: resolve
};
}
-// TODO Consolidate with createImportResolver.
-async function resolveImport(source: string, specifier: string | AstNode): Promise {
- return typeof specifier !== "string"
- ? null
- : specifier.startsWith("observablehq:")
- ? {id: relativeUrl(source, getClientPath(`./src/client/${specifier.slice("observablehq:".length)}.js`)), external: true} // prettier-ignore
- : specifier === "npm:@observablehq/runtime"
- ? {id: relativeUrl(source, getClientPath("./src/client/runtime.js")), external: true}
- : specifier === "npm:@observablehq/stdlib"
- ? {id: relativeUrl(source, getClientPath("./src/client/stdlib.js")), external: true}
- : specifier === "npm:@observablehq/dot"
- ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/dot.js")), external: true} // TODO publish to npm
- : specifier === "npm:@observablehq/duckdb"
- ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/duckdb.js")), external: true} // TODO publish to npm
- : specifier === "npm:@observablehq/inputs"
- ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/inputs.js")), external: true} // TODO publish to npm
- : specifier === "npm:@observablehq/mermaid"
- ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/mermaid.js")), external: true} // TODO publish to npm
- : specifier === "npm:@observablehq/tex"
- ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/tex.js")), external: true} // TODO publish to npm
- : specifier === "npm:@observablehq/sqlite"
- ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/sqlite.js")), external: true} // TODO publish to npm
- : specifier === "npm:@observablehq/xlsx"
- ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/xlsx.js")), external: true} // TODO publish to npm
- : specifier === "npm:@observablehq/zip"
- ? {id: relativeUrl(source, getClientPath("./src/client/stdlib/zip.js")), external: true} // TODO publish to npm
- : specifier.startsWith("npm:")
- ? {id: await resolveNpmImport(specifier.slice("npm:".length))}
- : source !== specifier && !isPathImport(specifier) && !BUNDLED_MODULES.includes(specifier)
- ? {id: await resolveNpmImport(specifier), external: true}
- : null;
-}
-
-function importMetaResolve(): Plugin {
+function importMetaResolve(root: string, path: string): Plugin {
return {
name: "resolve-import-meta-resolve",
async transform(code) {
@@ -143,10 +160,11 @@ function importMetaResolve(): Plugin {
const output = new Sourcemap(code);
for (const node of resolves) {
- const specifier = getStringLiteralValue(node.arguments[0]);
+ const source = node.arguments[0];
+ const specifier = getStringLiteralValue(source as StringLiteral);
if (specifier.startsWith("npm:")) {
- const resolution = await resolveNpmImport(specifier.slice("npm:".length));
- output.replaceLeft(node.start, node.end, JSON.stringify(resolution));
+ const resolution = relativePath(path, await resolveNpmImport(root, specifier.slice("npm:".length)));
+ output.replaceLeft(source.start, source.end, JSON.stringify(resolution));
}
}
diff --git a/src/search.ts b/src/search.ts
index e833fe9fb..00aba9b37 100644
--- a/src/search.ts
+++ b/src/search.ts
@@ -5,7 +5,7 @@ import type {Config} from "./config.js";
import {visitMarkdownFiles} from "./files.js";
import type {Logger} from "./logger.js";
import {parseMarkdown} from "./markdown.js";
-import {faint} from "./tty.js";
+import {faint, strikethrough} from "./tty.js";
// Avoid reindexing too often in preview.
const indexCache = new WeakMap();
@@ -48,7 +48,7 @@ export async function searchIndex(config: Config, effects = defaultEffects): Pro
const listed = pagePaths.has(`/${file.slice(0, -3)}`);
const indexed = data?.index === undefined ? listed : Boolean(data.index);
if (!indexed) {
- if (listed) effects.logger.log(`${faint("skip")} ${file}`);
+ if (listed) effects.logger.log(`${faint("index")} ${strikethrough(path)} ${faint("(skipped)")}`);
continue;
}
@@ -68,7 +68,7 @@ export async function searchIndex(config: Config, effects = defaultEffects): Pro
.replaceAll(/[\u0300-\u036f]/g, "")
.replace(/[^\p{L}\p{N}]/gu, " "); // keep letters & numbers
- effects.logger.log(`${faint("search indexing")} ${path}`);
+ effects.logger.log(`${faint("index")} ${path}`);
index.add({id, title, text, keywords: normalizeKeywords(data?.keywords)});
}
diff --git a/src/sourcemap.ts b/src/sourcemap.ts
index 4f3c1349f..dd7fb1274 100644
--- a/src/sourcemap.ts
+++ b/src/sourcemap.ts
@@ -39,20 +39,20 @@ export class Sourcemap {
}
return lo;
}
- insertLeft(index: number, value: string): void {
- this.replaceLeft(index, index, value);
+ insertLeft(index: number, value: string): typeof this {
+ return this.replaceLeft(index, index, value);
}
- insertRight(index: number, value: string): void {
- this.replaceRight(index, index, value);
+ insertRight(index: number, value: string): typeof this {
+ return this.replaceRight(index, index, value);
}
- delete(start: number, end: number): void {
- this.replaceRight(start, end, "");
+ delete(start: number, end: number): typeof this {
+ return this.replaceRight(start, end, "");
}
- replaceLeft(start: number, end: number, value: string): void {
- this._edits.splice(this._bisectLeft(start), 0, {start, end, value});
+ replaceLeft(start: number, end: number, value: string): typeof this {
+ return this._edits.splice(this._bisectLeft(start), 0, {start, end, value}), this;
}
- replaceRight(start: number, end: number, value: string): void {
- this._edits.splice(this._bisectRight(start), 0, {start, end, value});
+ replaceRight(start: number, end: number, value: string): typeof this {
+ return this._edits.splice(this._bisectRight(start), 0, {start, end, value}), this;
}
translate(position: Position): Position {
let index = 0;
@@ -79,10 +79,11 @@ export class Sourcemap {
const l = positionSubtract(position, co);
return positionAdd(ci, l);
}
- trim(): void {
+ trim(): typeof this {
const input = this.input;
if (input.startsWith("\n")) this.delete(0, 1); // TODO better trim
if (input.endsWith("\n")) this.delete(input.length - 1, input.length); // TODO better trim
+ return this;
}
toString(): string {
let output = "";
diff --git a/src/theme.ts b/src/theme.ts
index 4ecb0f68e..9edae13a8 100644
--- a/src/theme.ts
+++ b/src/theme.ts
@@ -46,6 +46,8 @@ export function findTheme(name: string): Theme | undefined {
return THEMES.find((t) => t.name === name);
}
+export class InvalidThemeError extends Error {}
+
export function resolveTheme(names: string[]): string[] {
if (!names.length) return []; // preserve explicitly empty theme
const themes: Theme[] = [];
@@ -75,7 +77,7 @@ export function resolveTheme(names: string[]): string[] {
}
default: {
const theme = findTheme(name);
- if (!theme) throw new Error(`unknown theme: ${name}`);
+ if (!theme) throw new InvalidThemeError(`unknown theme: ${name}`);
themes.push(theme);
break;
}
diff --git a/src/tty.ts b/src/tty.ts
index 1b09aba11..b929a9f7a 100644
--- a/src/tty.ts
+++ b/src/tty.ts
@@ -7,6 +7,7 @@ export const faint = color(2, 22);
export const italic = color(3, 23);
export const underline = color(4, 24);
export const inverse = color(7, 27);
+export const strikethrough = color(9, 29);
export const red = color(31, 39);
export const green = color(32, 39);
export const yellow = color(33, 39);
diff --git a/src/url.ts b/src/url.ts
deleted file mode 100644
index cb554555a..000000000
--- a/src/url.ts
+++ /dev/null
@@ -1,32 +0,0 @@
-import {dirname, join} from "node:path";
-
-/**
- * Returns the normalized relative path from "/file/path/to/a" to
- * "/file/path/of/b". To make relative imports work, paths to the same directory
- * are prefixed with "./", and paths that start without a slash are considered
- * from the root.
- */
-export function relativeUrl(source: string, target: string): string {
- if (/^\w+:/.test(target)) return target;
- const from = join("/", source).split(/[/]+/g).slice(0, -1);
- const to = join("/", target).split(/[/]+/g);
- const f = to.pop()!;
- const m = from.length;
- const n = Math.min(m, to.length);
- let i = 0;
- while (i < n && from[i] === to[i]) ++i;
- const k = m - i;
- return (k ? "../".repeat(k) : "./") + to.slice(i).concat(f).join("/");
-}
-
-/**
- * Returns the path to the specified target within the given source root, which
- * defaults to ".", assuming that the target is a relative path such as an
- * import or fetch from the specified source.
- */
-export function resolvePath(source: string, target: string): string;
-export function resolvePath(root: string, source: string, target: string): string;
-export function resolvePath(root: string, source: string, target?: string): string {
- if (target === undefined) (target = source), (source = root), (root = ".");
- return join(root, target.startsWith("/") ? "." : dirname(source), target);
-}
diff --git a/test/deploy-test.ts b/test/deploy-test.ts
index 67364ffc3..b95e4f90e 100644
--- a/test/deploy-test.ts
+++ b/test/deploy-test.ts
@@ -21,26 +21,6 @@ import {
import {MockAuthEffects} from "./observableApiAuth-test.js";
import {MockConfigEffects} from "./observableApiConfig-test.js";
-// These files are implicitly generated. This may change over time, so they’re
-// enumerated here for clarity. TODO We should enforce that these files are
-// specifically uploaded, rather than just the number of files.
-const EXTRA_FILES: string[] = [
- "_observablehq/client.js",
- "_observablehq/runtime.js",
- "_observablehq/stdlib.js",
- "_observablehq/stdlib/dot.js",
- "_observablehq/stdlib/duckdb.js",
- "_observablehq/stdlib/inputs.css",
- "_observablehq/stdlib/inputs.js",
- "_observablehq/stdlib/mermaid.js",
- "_observablehq/stdlib/sqlite.js",
- "_observablehq/stdlib/tex.js",
- "_observablehq/stdlib/vega-lite.js",
- "_observablehq/stdlib/xlsx.js",
- "_observablehq/stdlib/zip.js",
- "_observablehq/style.css"
-];
-
interface MockDeployEffectsOptions {
apiKey?: string | null;
deployConfig?: DeployConfig | null;
@@ -148,7 +128,11 @@ describe("deploy", () => {
.handleGetCurrentUser()
.handleGetProject(DEPLOY_CONFIG)
.handlePostDeploy({projectId: DEPLOY_CONFIG.projectId, deployId})
- .handlePostDeployFile({deployId, repeat: EXTRA_FILES.length + 1})
+ .handlePostDeployFile({deployId, clientName: "index.html"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/theme-air,near-midnight.css"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/client.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/runtime.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/stdlib.js"})
.handlePostDeployUploaded({deployId})
.handleGetDeploy({deployId, deployStatus: "uploaded"})
.start();
@@ -165,13 +149,14 @@ describe("deploy", () => {
const oldTitle = `${TEST_CONFIG.title!} old`;
getCurrentObservableApi()
.handleGetCurrentUser()
- .handleGetProject({
- ...DEPLOY_CONFIG,
- title: oldTitle
- })
+ .handleGetProject({...DEPLOY_CONFIG, title: oldTitle})
.handleUpdateProject({projectId: DEPLOY_CONFIG.projectId, title: TEST_CONFIG.title!})
.handlePostDeploy({projectId: DEPLOY_CONFIG.projectId, deployId})
- .handlePostDeployFile({deployId, repeat: EXTRA_FILES.length + 1})
+ .handlePostDeployFile({deployId, clientName: "index.html"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/theme-air,near-midnight.css"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/client.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/runtime.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/stdlib.js"})
.handlePostDeployUploaded({deployId})
.handleGetDeploy({deployId})
.start();
@@ -191,7 +176,11 @@ describe("deploy", () => {
.handleGetCurrentUser()
.handleGetProject(deployConfig)
.handlePostDeploy({projectId: deployConfig.projectId, deployId})
- .handlePostDeployFile({deployId, repeat: EXTRA_FILES.length + 1})
+ .handlePostDeployFile({deployId, clientName: "index.html"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/theme-air,near-midnight.css"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/client.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/runtime.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/stdlib.js"})
.handlePostDeployUploaded({deployId})
.handleGetDeploy({deployId})
.start();
@@ -214,7 +203,11 @@ describe("deploy", () => {
})
.handlePostProject({projectId: DEPLOY_CONFIG.projectId})
.handlePostDeploy({projectId: DEPLOY_CONFIG.projectId, deployId})
- .handlePostDeployFile({deployId, repeat: EXTRA_FILES.length + 1})
+ .handlePostDeployFile({deployId, clientName: "index.html"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/theme-air,near-midnight.css"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/client.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/runtime.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/stdlib.js"})
.handlePostDeployUploaded({deployId})
.handleGetDeploy({deployId})
.start();
@@ -443,7 +436,11 @@ describe("deploy", () => {
.handleGetCurrentUser()
.handleGetProject(DEPLOY_CONFIG)
.handlePostDeploy({projectId: DEPLOY_CONFIG.projectId, deployId})
- .handlePostDeployFile({deployId, repeat: EXTRA_FILES.length + 1})
+ .handlePostDeployFile({deployId, clientName: "index.html"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/theme-air,near-midnight.css"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/client.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/runtime.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/stdlib.js"})
.handlePostDeployUploaded({deployId, status: 500})
.start();
const effects = new MockDeployEffects({deployConfig: DEPLOY_CONFIG});
@@ -554,7 +551,11 @@ describe("deploy", () => {
projectId: newProjectId
})
.handlePostDeploy({projectId: newProjectId, deployId})
- .handlePostDeployFile({deployId, repeat: EXTRA_FILES.length + 1})
+ .handlePostDeployFile({deployId, clientName: "index.html"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/theme-air,near-midnight.css"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/client.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/runtime.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/stdlib.js"})
.handlePostDeployUploaded({deployId})
.handleGetDeploy({deployId})
.start();
@@ -638,7 +639,11 @@ describe("deploy", () => {
.handleGetCurrentUser()
.handleGetProject(DEPLOY_CONFIG)
.handlePostDeploy({projectId: DEPLOY_CONFIG.projectId, deployId})
- .handlePostDeployFile({deployId, repeat: EXTRA_FILES.length + 1})
+ .handlePostDeployFile({deployId, clientName: "index.html"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/theme-air,near-midnight.css"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/client.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/runtime.js"})
+ .handlePostDeployFile({deployId, clientName: "_observablehq/stdlib.js"})
.handlePostDeployUploaded({deployId})
.handleGetDeploy({deployId})
.start();
diff --git a/test/files-test.ts b/test/files-test.ts
index 7348963ed..b640bdd68 100644
--- a/test/files-test.ts
+++ b/test/files-test.ts
@@ -1,6 +1,18 @@
import assert from "node:assert";
import {stat} from "node:fs/promises";
-import {maybeStat, prepareOutput, visitFiles, visitMarkdownFiles} from "../src/files.js";
+import {getClientPath, maybeStat, prepareOutput, visitFiles, visitMarkdownFiles} from "../src/files.js";
+
+describe("getClientPath(entry)", () => {
+ it("returns the relative path to the specified source", () => {
+ assert.strictEqual(getClientPath("src/client/main.js"), "src/client/main.js");
+ assert.strictEqual(getClientPath("./src/client/main.js"), "src/client/main.js");
+ assert.strictEqual(getClientPath("./src/client/main.ts"), "src/client/main.ts");
+ assert.strictEqual(getClientPath("./src/client/stdlib/resize.ts"), "src/client/stdlib/resize.ts");
+ });
+ it("returns a TypeScript (.ts) path if the JavaScript (.js) does not exist", () => {
+ assert.strictEqual(getClientPath("./src/client/stdlib/resize.js"), "src/client/stdlib/resize.ts");
+ });
+});
describe("prepareOutput(path)", () => {
it("does nothing if passed the current directory", async () => {
@@ -56,8 +68,11 @@ describe("visitMarkdownFiles(root)", () => {
});
});
-async function collect(generator: AsyncGenerator): Promise {
- const values: T[] = [];
- for await (const value of generator) values.push(value);
+async function collect(generator: AsyncGenerator): Promise {
+ const values: string[] = [];
+ for await (const value of generator) {
+ if (value.startsWith(".observablehq/cache/")) continue;
+ values.push(value);
+ }
return values;
}
diff --git a/test/hash-test.ts b/test/hash-test.ts
deleted file mode 100644
index f63375f63..000000000
--- a/test/hash-test.ts
+++ /dev/null
@@ -1,9 +0,0 @@
-import assert from "node:assert";
-import {computeHash} from "../src/hash.js";
-
-describe("computeHash(content)", () => {
- it("returns the expected result", () => {
- assert.deepStrictEqual(computeHash(""), "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
- assert.deepStrictEqual(computeHash("hello"), "2cf24dba5fb0a30e26e83b2ac5b9e29e1b161e5c1fa7425e73043362938b9824");
- });
-});
diff --git a/test/html-test.ts b/test/html-test.ts
index dfc0fbe19..ae6bc22c8 100644
--- a/test/html-test.ts
+++ b/test/html-test.ts
@@ -1,5 +1,6 @@
import assert from "node:assert";
-import {html} from "../src/html.js";
+import {createHash} from "node:crypto";
+import {findAssets, html, rewriteHtml} from "../src/html.js";
describe("html(strings, ...values)", () => {
it("returns an instance of Html", () => {
@@ -37,3 +38,82 @@ describe("html(strings, ...values)", () => {
assert.deepStrictEqual(String(html`${html.unsafe("dollar£")} `), "dollar£ ");
});
});
+
+describe("findAssets(html, path)", () => {
+ it("finds local files from img[src]", () => {
+ const html = ' ';
+ assert.deepStrictEqual(findAssets(html, "foo"), new Set(["./test.png"]));
+ });
+ it("finds local files from img[srcset]", () => {
+ const html = ' '; // prettier-ignore
+ assert.deepStrictEqual(findAssets(html, "foo"), new Set(["./large.jpg", "./small.jpg"]));
+ });
+ it("finds local files from video[src]", () => {
+ const html = ' '; // prettier-ignore
+ assert.deepStrictEqual(findAssets(html, "foo"), new Set(["./observable.mov"]));
+ });
+ it("finds local files from video source[src]", () => {
+ const html = ' '; // prettier-ignore
+ assert.deepStrictEqual(findAssets(html, "foo"), new Set(["./observable.mp4", "./observable.mov"]));
+ });
+ it("finds local files from picture source[srcset]", () => {
+ const html = ' '; // prettier-ignore
+ assert.deepStrictEqual(findAssets(html, "foo"), new Set(["./observable-logo-narrow.png", "./observable-logo-wide.png"])); // prettier-ignore
+ });
+ it("ignores non-local files from img[src]", () => {
+ const html = ' '; // prettier-ignore
+ assert.deepStrictEqual(findAssets(html, "foo"), new Set());
+ });
+ it("ignores non-local files from img[srcset]", () => {
+ const html = ' '; // prettier-ignore
+ assert.deepStrictEqual(findAssets(html, "foo"), new Set(["./small.jpg"]));
+ });
+ it("ignores non-local files from video source[src]", () => {
+ const html = ' '; // prettier-ignore
+ assert.deepStrictEqual(findAssets(html, "foo"), new Set(["./observable.mov"]));
+ });
+});
+
+describe("rewriteHtml(html, resolve)", () => {
+ const resolve = (s: string) => (/^\w+:/.test(s) ? s : `${s}?sha=${createHash("sha256").update(s).digest("hex")}`);
+ it("rewrites local files from img[src]", () => {
+ const html = ' ';
+ const expected = ' ';
+ assert.strictEqual(rewriteHtml(html, resolve), expected);
+ });
+ it("rewrites local files from img[srcset]", () => {
+ const html = ' '; // prettier-ignore
+ const expected = ' '; // prettier-ignore
+ assert.strictEqual(rewriteHtml(html, resolve), expected);
+ });
+ it("rewrites local files from video[src]", () => {
+ const html = ' '; // prettier-ignore
+ const expected = ' '; // prettier-ignore
+ assert.strictEqual(rewriteHtml(html, resolve), expected);
+ });
+ it("rewrites local files from video source[src]", () => {
+ const html = ' '; // prettier-ignore
+ const expected = ' '; // prettier-ignore
+ assert.strictEqual(rewriteHtml(html, resolve), expected);
+ });
+ it("rewrites local files from picture source[srcset]", () => {
+ const html = ' '; // prettier-ignore
+ const expected = ' '; // prettier-ignore
+ assert.strictEqual(rewriteHtml(html, resolve), expected);
+ });
+ it("ignores non-local files from img[src]", () => {
+ const html = ' '; // prettier-ignore
+ const expected = html;
+ assert.strictEqual(rewriteHtml(html, resolve), expected);
+ });
+ it("ignores non-local files from img[srcset]", () => {
+ const html = ' '; // prettier-ignore
+ const expected = ' '; // prettier-ignore
+ assert.strictEqual(rewriteHtml(html, resolve), expected);
+ });
+ it("ignores non-local files from video source[src]", () => {
+ const html = ' '; // prettier-ignore
+ const expected = ' '; // prettier-ignore
+ assert.strictEqual(rewriteHtml(html, resolve), expected);
+ });
+});
diff --git a/test/isLocalImport-test.ts b/test/isLocalImport-test.ts
deleted file mode 100644
index a19bb9c8a..000000000
--- a/test/isLocalImport-test.ts
+++ /dev/null
@@ -1,47 +0,0 @@
-import assert from "node:assert";
-import {isLocalImport} from "../src/javascript/imports.js";
-import {resolvePath} from "../src/url.js";
-
-describe("isLocalImport", () => {
- it("identifies a local import", async () => {
- const root = "docs";
- const sourcePath = "/hello.md";
- const importValue = "./helpers.js";
- assert.equal(resolvePath(root, sourcePath, importValue), "docs/helpers.js");
- assert(isLocalImport(importValue, sourcePath));
- });
- it("relative paths are correctly handled", async () => {
- const root = "docs";
- const sourcePath = "/subDocs/hello.md";
- const importValue = "./helpers.js";
- assert.equal(resolvePath(root, sourcePath, importValue), "docs/subDocs/helpers.js");
- assert(isLocalImport(importValue, sourcePath));
- });
- it("root and sourcePath arguments can correctly handle slashes", async () => {
- const root = "docs/";
- const sourcePath = "/hello.md/";
- const importValue = "./helpers.js";
- assert.equal(resolvePath(root, sourcePath, importValue), "docs/helpers.js");
- assert(isLocalImport(importValue, sourcePath));
- });
- it("identifies a local import from a nested sourcePath", async () => {
- const root = "docs";
- const sourcePath = "/subDocs/subDocs2/hello.md";
- const importValue = "../../random.js";
- assert.equal(resolvePath(root, sourcePath, importValue), "docs/random.js");
- assert(isLocalImport(importValue, sourcePath));
- });
- it("cannot go to an ancestor directory beyond the root", async () => {
- const root = "docs";
- const sourcePath = "/hello.md";
- const importValue1 = "../../../random.js";
- assert.equal(resolvePath(root, sourcePath, importValue1), "../../random.js");
- assert.equal(isLocalImport(importValue1, sourcePath), false);
- const importValue2 = "./../../random.js";
- assert.equal(resolvePath(root, sourcePath, importValue2), "../random.js");
- assert.equal(isLocalImport(importValue2, sourcePath), false);
- const importValue3 = "/../random.js";
- assert.equal(resolvePath(root, sourcePath, importValue3), "random.js");
- assert.equal(isLocalImport(importValue3, sourcePath), false);
- });
-});
diff --git a/test/javascript-test.ts b/test/javascript-test.ts
deleted file mode 100644
index 8355b3d31..000000000
--- a/test/javascript-test.ts
+++ /dev/null
@@ -1,128 +0,0 @@
-import assert from "node:assert";
-import {readdirSync, statSync} from "node:fs";
-import {mkdir, readFile, unlink, writeFile} from "node:fs/promises";
-import {basename, join, resolve} from "node:path";
-import {isEnoent} from "../src/error.js";
-import {transpileJavaScript} from "../src/javascript.js";
-import {renderDefineCell} from "../src/render.js";
-import {mockJsDelivr} from "./mocks/jsdelivr.js";
-
-function isJsFile(inputRoot: string, fileName: string) {
- if (!fileName.endsWith(".js")) return false;
- const path = join(inputRoot, fileName);
- return statSync(path).isFile();
-}
-
-function runTests({
- inputRoot,
- outputRoot,
- filter = () => true
-}: {
- inputRoot: string;
- outputRoot: string;
- filter?: (name: string) => boolean;
-}) {
- for (const name of readdirSync(inputRoot)) {
- if (!isJsFile(inputRoot, name) || !filter(name)) continue;
- const only = name.startsWith("only.");
- const skip = name.startsWith("skip.");
- const outname = only || skip ? name.slice(5) : name;
- const path = join(inputRoot, name);
- (only ? it.only : skip ? it.skip : it)(path, async () => {
- const outfile = resolve(outputRoot, `${basename(outname, ".js")}.js`);
- const diffile = resolve(outputRoot, `${basename(outname, ".js")}-changed.js`);
- const {body, ...transpile} = transpileJavaScript(await readFile(path, "utf8"), {
- id: "0",
- root: inputRoot,
- sourcePath: name,
- verbose: false
- });
- const actual = renderDefineCell({body: await body(), ...transpile});
- let expected;
-
- try {
- expected = await readFile(outfile, "utf8");
- } catch (error) {
- if (!isEnoent(error) || process.env.CI === "true") throw error;
- console.warn(`! generating ${outfile}`);
- await mkdir(outputRoot, {recursive: true});
- await writeFile(outfile, actual, "utf8");
- return;
- }
-
- const equal = expected === actual;
-
- if (equal) {
- if (process.env.CI !== "true") {
- try {
- await unlink(diffile);
- console.warn(`! deleted ${diffile}`);
- } catch (error) {
- if (!isEnoent(error)) throw error;
- }
- }
- } else {
- console.warn(`! generating ${diffile}`);
- await writeFile(diffile, actual, "utf8");
- }
-
- assert.ok(equal, `${name} must match snapshot`);
- });
- }
-}
-
-describe("transpileJavaScript(input, options)", () => {
- mockJsDelivr();
-
- runTests({
- inputRoot: "test/input",
- outputRoot: "test/output"
- });
-
- runTests({
- inputRoot: "test/input/imports",
- outputRoot: "test/output/imports",
- filter: (name) => name.endsWith("-import.js")
- });
-
- it("trims leading and trailing newlines", async () => {
- const {body} = transpileJavaScript("\ntest\n", {
- id: "0",
- root: "test/input",
- sourcePath: "index.js",
- verbose: false
- });
- assert.strictEqual(await body(), "async (test,display) => {\ndisplay(await(\ntest\n))\n}");
- });
- it("rethrows unexpected errors", () => {
- const expected = new Error();
- assert.throws(
- () =>
- transpileJavaScript(
- {
- toString(): string {
- throw expected;
- }
- } as string,
- {
- id: "0",
- root: "test/input",
- sourcePath: "index.js",
- verbose: false
- }
- ),
- expected
- );
- });
- it("respects the sourceLine option", async () => {
- const {body} = transpileJavaScript("foo,", {
- id: "0",
- root: "test/input",
- sourcePath: "index.js",
- sourceLine: 12,
- inline: true,
- verbose: false
- });
- assert.strictEqual(await body(), '() => { throw new SyntaxError("invalid expression"); }');
- });
-});
diff --git a/test/javascript/assignments-test.ts b/test/javascript/assignments-test.ts
new file mode 100644
index 000000000..1c445f777
--- /dev/null
+++ b/test/javascript/assignments-test.ts
@@ -0,0 +1,36 @@
+import assert from "node:assert";
+import type {Program} from "acorn";
+import {Parser} from "acorn";
+import {checkAssignments} from "../../src/javascript/assignments.js";
+import {findReferences} from "../../src/javascript/references.js";
+
+describe("checkAssignments(node, references, input)", () => {
+ it("disallows external assignments", () => {
+ const input = "foo = 1;";
+ const program = parse(input);
+ const references = findReferences(program);
+ assert.throws(() => checkAssignments(program, references, input), /external variable 'foo'/);
+ });
+ it("disallows global assignments", () => {
+ const input = "Array = 1;";
+ const program = parse(input);
+ const references = findReferences(program);
+ assert.throws(() => checkAssignments(program, references, input), /global 'Array'/);
+ });
+ it("allows local assignments", () => {
+ const input = "let foo = 1;\nfoo = 2;";
+ const program = parse(input);
+ const references = findReferences(program);
+ assert.strictEqual(checkAssignments(program, references, input), undefined);
+ });
+ it("allows member assignments", () => {
+ const input = "window.foo = 1;";
+ const program = parse(input);
+ const references = findReferences(program);
+ assert.strictEqual(checkAssignments(program, references, input), undefined);
+ });
+});
+
+function parse(input: string): Program {
+ return Parser.parse(input, {ecmaVersion: 13, sourceType: "module"});
+}
diff --git a/test/javascript/features-test.ts b/test/javascript/features-test.ts
deleted file mode 100644
index 352b1aab2..000000000
--- a/test/javascript/features-test.ts
+++ /dev/null
@@ -1,24 +0,0 @@
-import assert from "node:assert";
-import type {Program} from "acorn";
-import {Parser} from "acorn";
-import {findFeatures} from "../../src/javascript/features.js";
-import {findReferences} from "../../src/javascript/references.js";
-
-describe("findFeatures(node, path, references, input)", () => {
- it("finds FileAttachment", () => {
- assert.deepStrictEqual(features('FileAttachment("foo.json")'), [{type: "FileAttachment", name: "foo.json"}]);
- });
- it("finds imported FileAttachment", () => {
- assert.deepStrictEqual(features('import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("foo.json")'), [{type: "FileAttachment", name: "foo.json"}]); // prettier-ignore
- assert.deepStrictEqual(features('import {FileAttachment as F} from "npm:@observablehq/stdlib";\nF("foo.json")'), [{type: "FileAttachment", name: "foo.json"}]); // prettier-ignore
- });
-});
-
-function features(input) {
- const node = parse(input);
- return findFeatures(node, "index.js", findReferences(node), input);
-}
-
-function parse(input: string): Program {
- return Parser.parse(input, {ecmaVersion: 13, sourceType: "module"});
-}
diff --git a/test/javascript/files-test.ts b/test/javascript/files-test.ts
new file mode 100644
index 000000000..9f5f02a44
--- /dev/null
+++ b/test/javascript/files-test.ts
@@ -0,0 +1,133 @@
+import assert from "node:assert";
+import type {Program} from "acorn";
+import {Parser} from "acorn";
+import {findFiles} from "../../src/javascript/files.js";
+
+// prettier-ignore
+describe("findFiles(node, input)", () => {
+ it("finds FileAttachment", () => {
+ assert.deepStrictEqual(files('FileAttachment("foo.json")'), [{name: "./foo.json", method: "json"}]);
+ });
+ it("finds imported FileAttachment", () => {
+ assert.deepStrictEqual(files('import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("foo.json")'), [{name: "./foo.json", method: "json"}]);
+ assert.deepStrictEqual(files('import {FileAttachment as F} from "npm:@observablehq/stdlib";\nF("foo.json")'), [{name: "./foo.json", method: "json"}]);
+ });
+ it("allows relative paths", () => {
+ assert.deepStrictEqual(files('FileAttachment("./foo.json")', {path: "data/bar.js"}), [{name: "./foo.json", method: "json"}]);
+ assert.deepStrictEqual(files('FileAttachment("../foo.json")', {path: "data/bar.js"}), [{name: "../foo.json", method: "json"}]);
+ });
+ it("allows absolute paths", () => {
+ assert.deepStrictEqual(files('FileAttachment("/foo.json")'), [{name: "./foo.json", method: "json"}]);
+ assert.deepStrictEqual(files('FileAttachment("/foo/bar.json")'), [{name: "./foo/bar.json", method: "json"}]);
+ });
+ it("allows double-quoted literals", () => {
+ assert.deepStrictEqual(files('FileAttachment("foo.json")'), [{name: "./foo.json", method: "json"}]);
+ });
+ it("allows single-quoted literals", () => {
+ assert.deepStrictEqual(files("FileAttachment('foo.json')"), [{name: "./foo.json", method: "json"}]);
+ });
+ it("allows template-quoted literals", () => {
+ assert.deepStrictEqual(files("FileAttachment(`foo.json`)"), [{name: "./foo.json", method: "json"}]);
+ });
+ it("disallows multiple arguments", () => {
+ assert.throws(() => files("FileAttachment('foo.json', false)"), /requires a single literal string argument/);
+ });
+ it("disallows non-string arguments", () => {
+ assert.throws(() => files("FileAttachment(42)"), /requires a single literal string argument/);
+ });
+ it("disallows dynamic arguments", () => {
+ assert.throws(() => files("FileAttachment(`${42}`)"), /requires a single literal string argument/);
+ assert.throws(() => files("FileAttachment('foo' + 42 + '.json')"), /requires a single literal string argument/);
+ });
+ it("resolves the name relative to the source", () => {
+ assert.deepStrictEqual(files('FileAttachment("foo.json")', {path: "bar.js"}), [{name: "./foo.json", method: "json"}]);
+ assert.deepStrictEqual(files('FileAttachment("./foo.json")', {path: "bar.js"}), [{name: "./foo.json", method: "json"}]);
+ assert.deepStrictEqual(files('FileAttachment("/foo.json")', {path: "bar.js"}), [{name: "./foo.json", method: "json"}]);
+ assert.deepStrictEqual(files('FileAttachment("data/foo.json")', {path: "bar.js"}), [{name: "./data/foo.json", method: "json"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.json")', {path: "data/bar.js"}), [{name: "./foo.json", method: "json"}]);
+ assert.deepStrictEqual(files('FileAttachment("./foo.json")', {path: "data/bar.js"}), [{name: "./foo.json", method: "json"}]);
+ assert.deepStrictEqual(files('FileAttachment("../foo.json")', {path: "data/bar.js"}), [{name: "../foo.json", method: "json"}]);
+ assert.deepStrictEqual(files('FileAttachment("/foo.json")', {path: "data/bar.js"}), [{name: "../foo.json", method: "json"}]);
+ });
+ it("disallows paths outside the source root", () => {
+ assert.throws(() => files('FileAttachment("../foo.json")', {path: "bar.js"}), /non-local file path/);
+ assert.throws(() => files('FileAttachment("../../foo.json")', {path: "data/bar.js"}), /non-local file path/);
+ assert.throws(() => files('FileAttachment("/../foo.json")', {path: "bar.js"}), /non-local file path/);
+ assert.throws(() => files('FileAttachment("/../foo.json")', {path: "data/bar.js"}), /non-local file path/);
+ });
+ it("disallows non-paths", () => {
+ assert.throws(() => files('FileAttachment("https://example.com/foo.json")'), /non-local file path/);
+ assert.throws(() => files('FileAttachment("#foo.json")'), /non-local file path/);
+ });
+ it("sets the file method based on the member expression", () => {
+ assert.deepStrictEqual(files('FileAttachment("foo").arrayBuffer'), [{name: "./foo", method: "arrayBuffer"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").arrow'), [{name: "./foo", method: "arrow"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").arrow'), [{name: "./foo", method: "arrow"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").blob'), [{name: "./foo", method: "blob"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").csv'), [{name: "./foo", method: "csv"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").html'), [{name: "./foo", method: "html"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").image'), [{name: "./foo", method: "image"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").json'), [{name: "./foo", method: "json"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").parquet'), [{name: "./foo", method: "parquet"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").sqlite'), [{name: "./foo", method: "sqlite"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").stream'), [{name: "./foo", method: "stream"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").text'), [{name: "./foo", method: "text"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").tsv'), [{name: "./foo", method: "tsv"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").xlsx'), [{name: "./foo", method: "xlsx"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").xml'), [{name: "./foo", method: "xml"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo").zip'), [{name: "./foo", method: "zip"}]);
+ });
+ it("otherwise sets the file method based on the file extension", () => {
+ assert.deepStrictEqual(files('FileAttachment("foo.arrow")'), [{name: "./foo.arrow", method: "arrow"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.csv")'), [{name: "./foo.csv", method: "csv"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.db")'), [{name: "./foo.db", method: "sqlite"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.html")'), [{name: "./foo.html", method: "html"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.json")'), [{name: "./foo.json", method: "json"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.parquet")'), [{name: "./foo.parquet", method: "parquet"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.sqlite")'), [{name: "./foo.sqlite", method: "sqlite"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.tsv")'), [{name: "./foo.tsv", method: "tsv"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.txt")'), [{name: "./foo.txt", method: "text"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.xlsx")'), [{name: "./foo.xlsx", method: "xlsx"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.xml")'), [{name: "./foo.xml", method: "xml"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.zip")'), [{name: "./foo.zip", method: "zip"}]);
+ });
+ it("the file method takes priority over the file extension", () => {
+ assert.deepStrictEqual(files('FileAttachment("foo.txt").csv'), [{name: "./foo.txt", method: "csv"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.txt").json'), [{name: "./foo.txt", method: "json"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.csv").text'), [{name: "./foo.csv", method: "text"}]);
+ assert.deepStrictEqual(files('FileAttachment("foo.csv").json'), [{name: "./foo.csv", method: "json"}]);
+ });
+ it("respects the given aliases", () => {
+ assert.deepStrictEqual(files('FileAttachment("foo.txt").csv', {aliases: []}), []);
+ assert.deepStrictEqual(files('File("foo.txt").csv', {aliases: ["File"]}), [{name: "./foo.txt", method: "csv"}]);
+ });
+ it("finds the import declaration", () => {
+ assert.deepStrictEqual(files('import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("foo.txt").csv', {aliases: []}), [{name: "./foo.txt", method: "csv"}]);
+ });
+ it("finds the import declaration if aliased", () => {
+ assert.deepStrictEqual(files('import {FileAttachment as F} from "npm:@observablehq/stdlib";\nF("foo.txt").csv', {aliases: []}), [{name: "./foo.txt", method: "csv"}]);
+ });
+ it("finds the import declaration if aliased and masking a global", () => {
+ assert.deepStrictEqual(files('import {FileAttachment as File} from "npm:@observablehq/stdlib";\nFile("foo.txt").csv', {aliases: []}), [{name: "./foo.txt", method: "csv"}]);
+ });
+ it("finds the import declaration if multiple aliases", () => {
+ assert.deepStrictEqual(files('import {FileAttachment as F, FileAttachment as G} from "npm:@observablehq/stdlib";\nF("file1.txt");\nG("file2.txt");', {aliases: []}), [{name: "./file1.txt", method: "text"}, {name: "./file2.txt", method: "text"}]);
+ });
+ it("ignores import declarations from another module", () => {
+ assert.deepStrictEqual(files('import {FileAttachment as F} from "npm:@observablehq/not-stdlib";\nFileAttachment("file1.txt");', {aliases: []}), []);
+ });
+ it.skip("supports namespace imports", () => {
+ assert.deepStrictEqual(files('import * as O from "npm:@observablehq/stdlib";\nO.FileAttachment("foo.txt");', {aliases: []}), [{name: "./foo.txt", method: "text"}]);
+ });
+ it("ignores masked references", () => {
+ assert.deepStrictEqual(files('import {FileAttachment} from "npm:@observablehq/stdlib";\n((FileAttachment) => FileAttachment("file.txt"))(String);', {aliases: []}), []);
+ });
+});
+
+function files(input: string, {path = "index.md", aliases = ["FileAttachment"]} = {}) {
+ return findFiles(parse(input), path, input, aliases).map(({node, ...f}) => f);
+}
+
+function parse(input: string): Program {
+ return Parser.parse(input, {ecmaVersion: 13, sourceType: "module"});
+}
diff --git a/test/javascript/imports-test.ts b/test/javascript/imports-test.ts
index 8789ed808..23f25f0e3 100644
--- a/test/javascript/imports-test.ts
+++ b/test/javascript/imports-test.ts
@@ -1,201 +1,119 @@
import assert from "node:assert";
-import type {Node, Program} from "acorn";
+import type {Program} from "acorn";
import {Parser} from "acorn";
-import {ascending} from "d3-array";
-import {getFeatureReferenceMap} from "../../src/javascript/features.js";
-import {parseLocalImports, rewriteModule} from "../../src/javascript/imports.js";
-import type {Feature, ImportReference} from "../../src/javascript.js";
+import {findExports, findImports, hasImportDeclaration} from "../../src/javascript/imports.js";
-describe("parseLocalImports(root, paths)", () => {
- it("finds all local imports in one file", () => {
- assert.deepStrictEqual(parseLocalImports("test/input/build/imports", ["foo/foo.js"]).imports.sort(order), [
- {name: "npm:d3", type: "global"},
- {name: "bar/bar.js", type: "local"},
- {name: "bar/baz.js", type: "local"},
- {name: "foo/foo.js", type: "local"},
- {name: "top.js", type: "local"}
- ]);
+describe("findExports(body)", () => {
+ it("finds export all declarations", () => {
+ const program = parse("export * from 'foo.js';\nexport * from 'bar.js';\n");
+ assert.deepStrictEqual(findExports(program), program.body);
});
- it("finds all local imports in multiple files", () => {
- assert.deepStrictEqual(
- parseLocalImports("test/input/imports", ["transitive-static-import.js", "dynamic-import.js"]).imports.sort(order),
- [
- {name: "bar.js", type: "local"},
- {name: "dynamic-import.js", type: "local"},
- {name: "other/foo.js", type: "local"},
- {name: "transitive-static-import.js", type: "local"}
- ]
- );
+ it("finds named export declarations", () => {
+ const program = parse("export {foo} from 'foo.js';\nexport const bar = 2;\n");
+ assert.deepStrictEqual(findExports(program), program.body);
});
- it("ignores missing files", () => {
- assert.deepStrictEqual(
- parseLocalImports("test/input/imports", ["static-import.js", "does-not-exist.js"]).imports.sort(order),
- [
- {name: "bar.js", type: "local"},
- {name: "does-not-exist.js", type: "local"},
- {name: "static-import.js", type: "local"}
- ]
- );
+ it("returns the empty array if there are no exports", () => {
+ assert.deepStrictEqual(findExports(parse("1 + 2;\n")), []);
});
- it("find all local fetches in one file", () => {
- assert.deepStrictEqual(parseLocalImports("test/input/build/fetches", ["foo/foo.js"]).features.sort(order), [
- {name: "foo/foo-data.csv", type: "FileAttachment"},
- {name: "foo/foo-data.json", type: "FileAttachment"}
+});
+
+describe("findImports(body, path, input)", () => {
+ it("finds local static import declarations", () => {
+ const input = "import {foo} from './bar.js';\nimport '/baz.js';\nimport def from '../qux.js';\n";
+ const program = parse(input);
+ assert.deepStrictEqual(findImports(program, "foo/bar.js", input), [
+ {name: "./bar.js", type: "local", method: "static"},
+ {name: "../baz.js", type: "local", method: "static"},
+ {name: "../qux.js", type: "local", method: "static"}
]);
});
- it("find all local fetches via transitive import", () => {
- assert.deepStrictEqual(parseLocalImports("test/input/build/fetches", ["top.js"]).features.sort(order), [
- {name: "foo/foo-data.csv", type: "FileAttachment"},
- {name: "foo/foo-data.json", type: "FileAttachment"},
- {name: "top-data.csv", type: "FileAttachment"},
- {name: "top-data.json", type: "FileAttachment"}
+ it("finds local static import expressions", () => {
+ const input = "import('./bar.js');\nimport('/baz.js');\nimport('../qux.js');\n";
+ const program = parse(input);
+ assert.deepStrictEqual(findImports(program, "foo/bar.js", input), [
+ {name: "./bar.js", type: "local", method: "dynamic"},
+ {name: "../baz.js", type: "local", method: "dynamic"},
+ {name: "../qux.js", type: "local", method: "dynamic"}
]);
});
-});
-
-describe("findImportFeatureReferences(node)", () => {
- it("finds the import declaration", () => {
- const node = parse('import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("file.txt");');
- assert.deepStrictEqual(Array.from(getFeatureReferenceMap(node).keys(), object), [
- {type: "Identifier", name: "FileAttachment", start: 57, end: 71}
+ it("finds local static export all declarations", () => {
+ const input = "export * from './bar.js';\nexport * from '/baz.js';\nexport * from '../qux.js';\n";
+ const program = parse(input);
+ assert.deepStrictEqual(findImports(program, "foo/bar.js", input), [
+ {name: "./bar.js", type: "local", method: "static"},
+ {name: "../baz.js", type: "local", method: "static"},
+ {name: "../qux.js", type: "local", method: "static"}
]);
});
- it("finds the import declaration if aliased", () => {
- const node = parse('import {FileAttachment as F} from "npm:@observablehq/stdlib";\nF("file.txt");');
- assert.deepStrictEqual(Array.from(getFeatureReferenceMap(node).keys(), object), [
- {type: "Identifier", name: "F", start: 62, end: 63}
+ it("finds local static export named declarations", () => {
+ const input =
+ "export {foo} from './bar.js';\nexport {default as def} from '/baz.js';\nexport {} from '../qux.js';\n";
+ const program = parse(input);
+ assert.deepStrictEqual(findImports(program, "foo/bar.js", input), [
+ {name: "./bar.js", type: "local", method: "static"},
+ {name: "../baz.js", type: "local", method: "static"},
+ {name: "../qux.js", type: "local", method: "static"}
]);
});
- it("finds the import declaration if aliased and masking a global", () => {
- const node = parse('import {FileAttachment as File} from "npm:@observablehq/stdlib";\nFile("file.txt");');
- assert.deepStrictEqual(Array.from(getFeatureReferenceMap(node).keys(), object), [
- {type: "Identifier", name: "File", start: 65, end: 69}
+ it("finds global static import declarations", () => {
+ const input = "import {foo} from 'bar.js';\nimport '@observablehq/baz';\nimport def from 'npm:qux';\n";
+ const program = parse(input);
+ assert.deepStrictEqual(findImports(program, "foo/bar.js", input), [
+ {name: "bar.js", type: "global", method: "static"},
+ {name: "@observablehq/baz", type: "global", method: "static"},
+ {name: "npm:qux", type: "global", method: "static"}
]);
});
- it("finds the import declaration if multiple aliases", () => {
- const node = parse('import {FileAttachment as F, FileAttachment as G} from "npm:@observablehq/stdlib";\nF("file.txt");\nG("file.txt");'); // prettier-ignore
- assert.deepStrictEqual(Array.from(getFeatureReferenceMap(node).keys(), object), [
- {type: "Identifier", name: "F", start: 83, end: 84},
- {type: "Identifier", name: "G", start: 98, end: 99}
+ it("finds global static import expressions", () => {
+ const input = "import('bar.js');\nimport('@observablehq/baz');\nimport('npm:qux');\n";
+ const program = parse(input);
+ assert.deepStrictEqual(findImports(program, "foo/bar.js", input), [
+ {name: "bar.js", type: "global", method: "dynamic"},
+ {name: "@observablehq/baz", type: "global", method: "dynamic"},
+ {name: "npm:qux", type: "global", method: "dynamic"}
]);
});
- it("ignores import declarations from another module", () => {
- const node = parse('import {FileAttachment as F} from "npm:@observablehq/not-stdlib";\nF("file.txt");');
- assert.deepStrictEqual(Array.from(getFeatureReferenceMap(node).keys(), object), []);
+ it("finds global static export all declarations", () => {
+ const input = "export * from 'bar.js';\nexport * from '@observablehq/baz';\nexport * from 'npm:qux';\n";
+ const program = parse(input);
+ assert.deepStrictEqual(findImports(program, "foo/bar.js", input), [
+ {name: "bar.js", type: "global", method: "static"},
+ {name: "@observablehq/baz", type: "global", method: "static"},
+ {name: "npm:qux", type: "global", method: "static"}
+ ]);
});
- it.skip("supports namespace imports", () => {
- const node = parse('import * as O from "npm:@observablehq/stdlib";\nO.FileAttachment("file.txt");');
- assert.deepStrictEqual(Array.from(getFeatureReferenceMap(node).keys(), object), [
- {type: "Identifier", name: "FileAttachment", start: 49, end: 63}
+ it("finds global static export named declarations", () => {
+ const input =
+ "export {foo} from 'bar.js';\nexport {default as def} from '@observablehq/baz';\nexport {} from 'npm:qux';\n";
+ const program = parse(input);
+ assert.deepStrictEqual(findImports(program, "foo/bar.js", input), [
+ {name: "bar.js", type: "global", method: "static"},
+ {name: "@observablehq/baz", type: "global", method: "static"},
+ {name: "npm:qux", type: "global", method: "static"}
]);
});
- it("ignores masked references", () => {
- const node = parse('import {FileAttachment} from "npm:@observablehq/stdlib";\n((FileAttachment) => FileAttachment("file.txt"))(String);'); // prettier-ignore
- assert.deepStrictEqual(Array.from(getFeatureReferenceMap(node).keys(), object), []);
+ it("ignores import expressions with dynamic sources", () => {
+ const input = "import('./bar'+'.js');\nimport(`/${'baz'}.js`);\n";
+ const program = parse(input);
+ assert.deepStrictEqual(findImports(program, "foo/bar.js", input), []);
+ });
+ it("errors on non-local paths", () => {
+ const input = "import('../bar.js');\n";
+ const program = parse(input);
+ assert.throws(() => findImports(program, "foo.js", input), /non-local import/);
});
});
-async function testFile(target: string, path: string): Promise {
- const input = `import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment(${JSON.stringify(target)})`;
- const output = await rewriteModule(input, path, async (path, specifier) => specifier);
- return output.split("\n").pop()!;
-}
-
-describe("rewriteModule(input, path, resolver)", () => {
- it("rewrites relative files with import.meta.resolve", async () => {
- assert.strictEqual(await testFile("./test.txt", "test.js"), 'FileAttachment("../test.txt", import.meta.url)'); // prettier-ignore
- assert.strictEqual(await testFile("./sub/test.txt", "test.js"), 'FileAttachment("../sub/test.txt", import.meta.url)'); // prettier-ignore
- assert.strictEqual(await testFile("./test.txt", "sub/test.js"), 'FileAttachment("../../sub/test.txt", import.meta.url)'); // prettier-ignore
- assert.strictEqual(await testFile("../test.txt", "sub/test.js"), 'FileAttachment("../../test.txt", import.meta.url)'); // prettier-ignore
- });
- it("does not require paths to start with ./, ../, or /", async () => {
- assert.strictEqual(await testFile("test.txt", "test.js"), 'FileAttachment("../test.txt", import.meta.url)'); // prettier-ignore
- assert.strictEqual(await testFile("sub/test.txt", "test.js"), 'FileAttachment("../sub/test.txt", import.meta.url)'); // prettier-ignore
- assert.strictEqual(await testFile("test.txt", "sub/test.js"), 'FileAttachment("../../sub/test.txt", import.meta.url)'); // prettier-ignore
- });
- it("rewrites absolute files with meta", async () => {
- assert.strictEqual(await testFile("/test.txt", "test.js"), 'FileAttachment("../test.txt", import.meta.url)'); // prettier-ignore
- assert.strictEqual(await testFile("/sub/test.txt", "test.js"), 'FileAttachment("../sub/test.txt", import.meta.url)'); // prettier-ignore
- assert.strictEqual(await testFile("/test.txt", "sub/test.js"), 'FileAttachment("../../test.txt", import.meta.url)'); // prettier-ignore
- });
- it("ignores FileAttachment if masked by a reference", async () => {
- const input = 'import {FileAttachment} from "npm:@observablehq/stdlib";\n((FileAttachment) => FileAttachment("./test.txt"))(eval)'; // prettier-ignore
- const output = (await rewriteModule(input, "test.js", async (path, specifier) => specifier)).split("\n").pop()!;
- assert.strictEqual(output, '((FileAttachment) => FileAttachment("./test.txt"))(eval)');
- });
- it("ignores FileAttachment if not imported", async () => {
- const input = 'import {Generators} from "npm:@observablehq/stdlib";\nFileAttachment("./test.txt")';
- const output = (await rewriteModule(input, "test.js", async (path, specifier) => specifier)).split("\n").pop()!;
- assert.strictEqual(output, 'FileAttachment("./test.txt")');
- });
- it("ignores FileAttachment if a comma expression", async () => {
- const input = 'import {FileAttachment} from "npm:@observablehq/stdlib";\n(1, FileAttachment)("./test.txt")';
- const output = (await rewriteModule(input, "test.js", async (path, specifier) => specifier)).split("\n").pop()!;
- assert.strictEqual(output, '(1, FileAttachment)("./test.txt")');
+describe("hasImportDeclaration(body)", () => {
+ it("returns true if the body has import declarations", () => {
+ assert.strictEqual(hasImportDeclaration(parse("import 'foo.js';")), true);
});
- it("ignores FileAttachment if not imported from @observablehq/stdlib", async () => {
- const input = 'import {FileAttachment} from "npm:@observablehq/not-stdlib";\nFileAttachment("./test.txt")';
- const output = (await rewriteModule(input, "test.js", async (path, specifier) => specifier)).split("\n").pop()!;
- assert.strictEqual(output, 'FileAttachment("./test.txt")');
- });
- it("rewrites FileAttachment when aliased", async () => {
- const input = 'import {FileAttachment as F} from "npm:@observablehq/stdlib";\nF("./test.txt")';
- const output = (await rewriteModule(input, "test.js", async (path, specifier) => specifier)).split("\n").pop()!;
- assert.strictEqual(output, 'F("../test.txt", import.meta.url)');
- });
- it("rewrites FileAttachment when aliased to a global", async () => {
- const input = 'import {FileAttachment as File} from "npm:@observablehq/stdlib";\nFile("./test.txt")';
- const output = (await rewriteModule(input, "test.js", async (path, specifier) => specifier)).split("\n").pop()!;
- assert.strictEqual(output, 'File("../test.txt", import.meta.url)');
- });
- it.skip("rewrites FileAttachment when imported as a namespace", async () => {
- const input = 'import * as O from "npm:@observablehq/stdlib";\nO.FileAttachment("./test.txt")';
- const output = (await rewriteModule(input, "test.js", async (path, specifier) => specifier)).split("\n").pop()!;
- assert.strictEqual(output, 'O.FileAttachment("../test.txt", import.meta.url)');
- });
- it("ignores non-FileAttachment calls", async () => {
- const input = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFile("./test.txt")';
- const output = (await rewriteModule(input, "test.js", async (path, specifier) => specifier)).split("\n").pop()!;
- assert.strictEqual(output, 'File("./test.txt")');
- });
- it("rewrites single-quoted literals", async () => {
- const input = "import {FileAttachment} from \"npm:@observablehq/stdlib\";\nFileAttachment('./test.txt')";
- const output = (await rewriteModule(input, "test.js", async (path, specifier) => specifier)).split("\n").pop()!;
- assert.strictEqual(output, 'FileAttachment("../test.txt", import.meta.url)');
- });
- it("rewrites template-quoted literals", async () => {
- const input = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment(`./test.txt`)';
- const output = (await rewriteModule(input, "test.js", async (path, specifier) => specifier)).split("\n").pop()!;
- assert.strictEqual(output, 'FileAttachment("../test.txt", import.meta.url)');
- });
- it("throws a syntax error with non-literal calls", async () => {
- const input = "import {FileAttachment} from \"npm:@observablehq/stdlib\";\nFileAttachment(`./${'test'}.txt`)";
- await assert.rejects(() => rewriteModule(input, "test.js", async (path, specifier) => specifier), /FileAttachment requires a single literal string/); // prettier-ignore
- });
- it("throws a syntax error with URL fetches", async () => {
- const input = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("https://example.com")';
- await assert.rejects(() => rewriteModule(input, "test.js", async (path, specifier) => specifier), /non-local file path/); // prettier-ignore
- });
- it("ignores non-local path fetches", async () => {
- const input1 = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("../test.txt")';
- const input2 = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("./../test.txt")';
- const input3 = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("../../test.txt")';
- const input4 = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("./../../test.txt")';
- await assert.rejects(() => rewriteModule(input1, "test.js", async (path, specifier) => specifier), /non-local file path/); // prettier-ignore
- await assert.rejects(() => rewriteModule(input2, "test.js", async (path, specifier) => specifier), /non-local file path/); // prettier-ignore
- await assert.rejects(() => rewriteModule(input3, "sub/test.js", async (path, specifier) => specifier), /non-local file path/); // prettier-ignore
- await assert.rejects(() => rewriteModule(input4, "sub/test.js", async (path, specifier) => specifier), /non-local file path/); // prettier-ignore
+ it("returns false if the body does not have import declarations", () => {
+ assert.strictEqual(hasImportDeclaration(parse("1 + 2;")), false);
+ assert.strictEqual(hasImportDeclaration(parse("import('foo.js');")), false);
});
});
function parse(input: string): Program {
return Parser.parse(input, {ecmaVersion: 13, sourceType: "module"});
}
-
-function object(node: Node) {
- return {...node};
-}
-
-function order(a: ImportReference | Feature, b: ImportReference | Feature): number {
- return ascending(a.type, b.type) || ascending(a.name, b.name);
-}
diff --git a/test/javascript/module-test.ts b/test/javascript/module-test.ts
new file mode 100644
index 000000000..4da68e36c
--- /dev/null
+++ b/test/javascript/module-test.ts
@@ -0,0 +1,149 @@
+import assert from "node:assert";
+import type {FileInfo, ModuleInfo} from "../../src/javascript/module.js";
+import {getFileHash, getFileInfo, getModuleHash, getModuleInfo} from "../../src/javascript/module.js";
+
+const emptyHash = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
+
+// Note: if these hashes change, please verify that they’re correct by stepping
+// through the code and verifying that they consider all the relevant files.
+describe("getModuleHash(root, path)", () => {
+ it("returns the transitive content hash for the specified module", () => {
+ assert.strictEqual(getModuleHash("test/input/build/imports", "foo/foo.js"), "32f934a52fa34ba1b06aa6089fe5922dc442c9bf2dcddef864bc649a39d9eace"); // prettier-ignore
+ assert.strictEqual(getModuleHash("test/input/build/imports", "bar/bar.js"), "7fe009c8bb0049d9b84d53a00b29fb172bbf07d8232d2ace5f7c6f220b23eb16"); // prettier-ignore
+ assert.strictEqual(getModuleHash("test/input/build/imports", "top.js"), "160847a6b4890d59f8e8862911bfbe3b8066955d31f2708cafbe51945c3c57b6"); // prettier-ignore
+ assert.strictEqual(getModuleHash("test/input/build/fetches", "foo/foo.js"), "6fd063d5f14f3bb844cfdb599bf3bdd643c4d0f89841591f769960fd58104e6c"); // prettier-ignore
+ assert.strictEqual(getModuleHash("test/input/build/fetches", "top.js"), "d8f5cc36a8b359974a3aa89013db8d6c1dbb43b091bed4ea683a7c8c994b2a3d"); // prettier-ignore
+ });
+ it("returns the empty hash if the specified module does not exist", () => {
+ assert.strictEqual(getModuleHash("test/input/build/imports", "does-not-exist.js"), emptyHash);
+ });
+ it("returns the empty hash if the specified module is invalid", () => {
+ assert.strictEqual(getModuleHash("test/input/build/imports", "foo/foo.md"), emptyHash);
+ });
+});
+
+describe("getModuleInfo(root, path)", () => {
+ it("returns the information for the specified module", () => {
+ assert.deepStrictEqual(redactModuleInfo("test/input/build/imports", "foo/foo.js"), {
+ fileMethods: new Set(),
+ files: new Set(),
+ hash: "c77c2490ea7b9a89dce7bad39973995e5158921bf8576955ae4a596c47a5a2a4",
+ globalStaticImports: new Set(["npm:d3"]),
+ globalDynamicImports: new Set(),
+ localDynamicImports: new Set(),
+ localStaticImports: new Set(["../bar/bar.js", "../top.js"])
+ });
+ assert.deepStrictEqual(redactModuleInfo("test/input/build/imports", "bar/bar.js"), {
+ fileMethods: new Set(),
+ files: new Set(),
+ hash: "7b0c5b1eef9d9dd624a7529a160a667516182000e863d81e077d606214bf2341",
+ globalStaticImports: new Set(),
+ globalDynamicImports: new Set(),
+ localDynamicImports: new Set(),
+ localStaticImports: new Set(["./baz.js"])
+ });
+ assert.deepStrictEqual(redactModuleInfo("test/input/build/imports", "bar/baz.js"), {
+ fileMethods: new Set(),
+ files: new Set(),
+ hash: "2483067ede9abb8bc914e3275fb177cbe7898944a625d1da2226fbc16f833bec",
+ globalStaticImports: new Set(),
+ globalDynamicImports: new Set(),
+ localDynamicImports: new Set(),
+ localStaticImports: new Set(["../foo/foo.js"])
+ });
+ assert.deepStrictEqual(redactModuleInfo("test/input/build/imports", "top.js"), {
+ fileMethods: new Set(),
+ files: new Set(),
+ hash: "a53c5d5b8a2e2f483fb15a0ac2cdb1cd162da4b02f9f47b47f0fbe237f3b6382",
+ globalStaticImports: new Set(),
+ globalDynamicImports: new Set(),
+ localDynamicImports: new Set(),
+ localStaticImports: new Set()
+ });
+ assert.deepStrictEqual(redactModuleInfo("test/input/build/fetches", "foo/foo.js"), {
+ fileMethods: new Set(["json", "text"]),
+ files: new Set(["./foo-data.csv", "./foo-data.json"]),
+ hash: "13349148aade73f9c04f331956a8f48535958a7c7e640b025eebfb52156067fa",
+ globalStaticImports: new Set(["npm:@observablehq/stdlib"]),
+ globalDynamicImports: new Set(),
+ localDynamicImports: new Set(),
+ localStaticImports: new Set()
+ });
+ assert.deepStrictEqual(redactModuleInfo("test/input/build/fetches", "top.js"), {
+ fileMethods: new Set(["json", "text"]),
+ files: new Set(["./top-data.csv", "./top-data.json"]),
+ hash: "d755832694c7db24d3606bdef5ecfdfbd820b09576d5790feb310854f48b2228",
+ globalStaticImports: new Set(["npm:@observablehq/stdlib"]),
+ globalDynamicImports: new Set(),
+ localDynamicImports: new Set(),
+ localStaticImports: new Set(["./foo/foo.js"])
+ });
+ assert.deepStrictEqual(redactModuleInfo("test/input/imports", "dynamic-import.js"), {
+ fileMethods: new Set(),
+ files: new Set(),
+ hash: "798a3d6d848c7b5facdcd14f893cd74d03336d74991f192443ff88fd1dfff038",
+ globalStaticImports: new Set(),
+ globalDynamicImports: new Set(),
+ localDynamicImports: new Set(["./bar.js"]),
+ localStaticImports: new Set()
+ });
+ assert.deepStrictEqual(redactModuleInfo("test/input/imports", "dynamic-npm-import.js"), {
+ fileMethods: new Set(),
+ files: new Set(),
+ hash: "47b3d67acfc04995d3e58a8829855e583856010bb3c5d415747782da34d0dcc8",
+ globalStaticImports: new Set(),
+ globalDynamicImports: new Set(["npm:canvas-confetti"]),
+ localDynamicImports: new Set(),
+ localStaticImports: new Set()
+ });
+ });
+ it("returns undefined if the specified module does not exist", () => {
+ assert.strictEqual(getModuleInfo("test/input/build/imports", "does-not-exist.js"), undefined);
+ });
+ it("returns undefined if the specified module is invalid", () => {
+ assert.strictEqual(getModuleInfo("test/input/build/imports", "foo/foo.md"), undefined);
+ });
+});
+
+describe("getFileHash(root, path)", () => {
+ it("returns the content hash for the specified file", () => {
+ assert.strictEqual(getFileHash("test/input/build/files", "file-top.csv"), "01a7ce0aea79f9cddb22e772b2cc9a9f3229a64a5fd941eec8d8ddc41fb07c34"); // prettier-ignore
+ });
+ it("returns the content hash for the specified file’s data loader", () => {
+ assert.strictEqual(getFileHash("test/input/build/archives", "dynamic.zip.sh"), "68223db0bf721258cce808d45ff5c77e0f14b35240d25940f40ed9cd1ad74fde"); // prettier-ignore
+ assert.strictEqual(getFileHash("test/input/build/archives", "dynamic.zip"), "68223db0bf721258cce808d45ff5c77e0f14b35240d25940f40ed9cd1ad74fde"); // prettier-ignore
+ assert.strictEqual(getFileHash("test/input/build/archives", "dynamic/file.txt"), "68223db0bf721258cce808d45ff5c77e0f14b35240d25940f40ed9cd1ad74fde"); // prettier-ignore
+ assert.strictEqual(getFileHash("test/input/build/archives", "static.zip"), "e6afff224da77b900cfe3ab8789f2283883300e1497548c30af66dfe4c29b429"); // prettier-ignore
+ assert.strictEqual(getFileHash("test/input/build/archives", "static/file.txt"), "e6afff224da77b900cfe3ab8789f2283883300e1497548c30af66dfe4c29b429"); // prettier-ignore
+ });
+ it("returns the empty hash if the specified file does not exist", () => {
+ assert.strictEqual(getFileHash("test/input/build/files", "does-not-exist.csv"), emptyHash);
+ });
+});
+
+describe("getFileInfo(root, path)", () => {
+ it("returns the info for the specified file", () => {
+ assert.deepStrictEqual(redactFileInfo("test/input/build/files", "file-top.csv"), {hash: "01a7ce0aea79f9cddb22e772b2cc9a9f3229a64a5fd941eec8d8ddc41fb07c34"}); // prettier-ignore
+ assert.deepStrictEqual(redactFileInfo("test/input/build/archives", "dynamic.zip.sh"), {hash: "68223db0bf721258cce808d45ff5c77e0f14b35240d25940f40ed9cd1ad74fde"}); // prettier-ignore
+ assert.deepStrictEqual(redactFileInfo("test/input/build/archives", "static.zip"), {hash: "e6afff224da77b900cfe3ab8789f2283883300e1497548c30af66dfe4c29b429"}); // prettier-ignore
+ });
+ it("returns undefined if the specified file is created by a data loader", () => {
+ assert.strictEqual(getFileInfo("test/input/build/archives", "dynamic.zip"), undefined);
+ assert.strictEqual(getFileInfo("test/input/build/archives", "dynamic/file.txt"), undefined);
+ assert.strictEqual(getFileInfo("test/input/build/archives", "static/file.txt"), undefined);
+ });
+});
+
+function redactModuleInfo(root: string, path: string): Omit | undefined {
+ const info = getModuleInfo(root, path);
+ if (!info) return;
+ const {mtimeMs, ...rest} = info;
+ return rest;
+}
+
+function redactFileInfo(root: string, path: string): Omit | undefined {
+ const info = getFileInfo(root, path);
+ if (!info) return;
+ const {mtimeMs, ...rest} = info;
+ return rest;
+}
diff --git a/test/javascript/npm-test.ts b/test/javascript/npm-test.ts
new file mode 100644
index 000000000..c06fa0193
--- /dev/null
+++ b/test/javascript/npm-test.ts
@@ -0,0 +1,49 @@
+import assert from "node:assert";
+import {rewriteNpmImports} from "../../src/npm.js";
+
+// prettier-ignore
+describe("rewriteNpmImports(input, path)", () => {
+ it("rewrites /npm/ imports to /_npm/", () => {
+ assert.strictEqual(rewriteNpmImports('export * from "/npm/d3-array@3.2.4/dist/d3-array.js";\n', "/_npm/d3@7.8.5/dist/d3.js"), 'export * from "../../d3-array@3.2.4/dist/d3-array.js";\n');
+ });
+ it("rewrites /npm/…+esm imports to +esm.js", () => {
+ assert.strictEqual(rewriteNpmImports('export * from "/npm/d3-array@3.2.4/+esm";\n', "/_npm/d3@7.8.5/+esm.js"), 'export * from "../d3-array@3.2.4/+esm.js";\n');
+ });
+ it("rewrites /npm/ imports to a relative path", () => {
+ assert.strictEqual(rewriteNpmImports('import "/npm/d3-array@3.2.4/dist/d3-array.js";\n', "/_npm/d3@7.8.5/dist/d3.js"), 'import "../../d3-array@3.2.4/dist/d3-array.js";\n');
+ assert.strictEqual(rewriteNpmImports('import "/npm/d3-array@3.2.4/dist/d3-array.js";\n', "/_npm/d3@7.8.5/d3.js"), 'import "../d3-array@3.2.4/dist/d3-array.js";\n');
+ });
+ it("rewrites named imports", () => {
+ assert.strictEqual(rewriteNpmImports('import {sort} from "/npm/d3-array@3.2.4/+esm";\n', "/_npm/d3@7.8.5/+esm.js"), 'import {sort} from "../d3-array@3.2.4/+esm.js";\n');
+ });
+ it("rewrites empty imports", () => {
+ assert.strictEqual(rewriteNpmImports('import "/npm/d3-array@3.2.4/+esm";\n', "/_npm/d3@7.8.5/+esm.js"), 'import "../d3-array@3.2.4/+esm.js";\n');
+ });
+ it("rewrites default imports", () => {
+ assert.strictEqual(rewriteNpmImports('import d3 from "/npm/d3-array@3.2.4/+esm";\n', "/_npm/d3@7.8.5/+esm.js"), 'import d3 from "../d3-array@3.2.4/+esm.js";\n');
+ });
+ it("rewrites namespace imports", () => {
+ assert.strictEqual(rewriteNpmImports('import * as d3 from "/npm/d3-array@3.2.4/+esm";\n', "/_npm/d3@7.8.5/+esm.js"), 'import * as d3 from "../d3-array@3.2.4/+esm.js";\n');
+ });
+ it("rewrites named exports", () => {
+ assert.strictEqual(rewriteNpmImports('export {sort} from "/npm/d3-array@3.2.4/+esm";\n', "/_npm/d3@7.8.5/+esm.js"), 'export {sort} from "../d3-array@3.2.4/+esm.js";\n');
+ });
+ it("rewrites namespace exports", () => {
+ assert.strictEqual(rewriteNpmImports('export * from "/npm/d3-array@3.2.4/+esm";\n', "/_npm/d3@7.8.5/+esm.js"), 'export * from "../d3-array@3.2.4/+esm.js";\n');
+ });
+ it("rewrites dynamic imports with static module specifiers", () => {
+ assert.strictEqual(rewriteNpmImports('import("/npm/d3-array@3.2.4/+esm");\n', "/_npm/d3@7.8.5/+esm.js"), 'import("../d3-array@3.2.4/+esm.js");\n');
+ assert.strictEqual(rewriteNpmImports("import(`/npm/d3-array@3.2.4/+esm`);\n", "/_npm/d3@7.8.5/+esm.js"), 'import("../d3-array@3.2.4/+esm.js");\n');
+ assert.strictEqual(rewriteNpmImports("import('/npm/d3-array@3.2.4/+esm');\n", "/_npm/d3@7.8.5/+esm.js"), 'import("../d3-array@3.2.4/+esm.js");\n');
+ });
+ it("ignores dynamic imports with dynamic module specifiers", () => {
+ assert.strictEqual(rewriteNpmImports('import(`/npm/d3-array@${"3.2.4"}/+esm`);\n', "/_npm/d3@7.8.5/+esm.js"), 'import(`/npm/d3-array@${"3.2.4"}/+esm`);\n');
+ });
+ it("ignores dynamic imports with dynamic module specifiers", () => {
+ assert.strictEqual(rewriteNpmImports('import(`/npm/d3-array@${"3.2.4"}/+esm`);\n', "/_npm/d3@7.8.5/+esm.js"), 'import(`/npm/d3-array@${"3.2.4"}/+esm`);\n');
+ });
+ it("strips the sourceMappingURL declaration", () => {
+ assert.strictEqual(rewriteNpmImports('import(`/npm/d3-array@${"3.2.4"}/+esm`);\n//# sourceMappingURL=index.js.map', "/_npm/d3@7.8.5/+esm.js"), 'import(`/npm/d3-array@${"3.2.4"}/+esm`);\n');
+ assert.strictEqual(rewriteNpmImports('import(`/npm/d3-array@${"3.2.4"}/+esm`);\n//# sourceMappingURL=index.js.map\n', "/_npm/d3@7.8.5/+esm.js"), 'import(`/npm/d3-array@${"3.2.4"}/+esm`);\n');
+ });
+});
diff --git a/test/javascript/parse-test.ts b/test/javascript/parse-test.ts
new file mode 100644
index 000000000..613a96923
--- /dev/null
+++ b/test/javascript/parse-test.ts
@@ -0,0 +1,79 @@
+import assert from "node:assert";
+import {readdirSync, statSync} from "node:fs";
+import {mkdir, readFile, unlink, writeFile} from "node:fs/promises";
+import {join, resolve} from "node:path";
+import {isEnoent} from "../../src/error.js";
+import type {JavaScriptNode} from "../../src/javascript/parse.js";
+import {parseJavaScript} from "../../src/javascript/parse.js";
+
+function isJsFile(inputRoot: string, fileName: string) {
+ if (!fileName.endsWith(".js")) return false;
+ const path = join(inputRoot, fileName);
+ return statSync(path).isFile();
+}
+
+function redactJavaScript({input, ...node}: JavaScriptNode): Omit {
+ return node;
+}
+
+// Convert to a serializable representation.
+function stringify(key: string, value: any): any {
+ return typeof value === "bigint" ? value.toString() : value instanceof Map ? [...value] : value;
+}
+
+function runTests(inputRoot: string, outputRoot: string, filter: (name: string) => boolean = () => true) {
+ for (const name of readdirSync(inputRoot)) {
+ if (!isJsFile(inputRoot, name) || !filter(name)) continue;
+ const only = name.startsWith("only.");
+ const skip = name.startsWith("skip.");
+ const outname = only || skip ? name.slice(5) : name;
+ const path = join(inputRoot, name);
+ (only ? it.only : skip ? it.skip : it)(path, async () => {
+ const outfile = resolve(outputRoot, `${outname}.json`);
+ const diffile = resolve(outputRoot, `${outname}-changed.json`);
+ const input = await readFile(path, "utf8");
+ let actual: string;
+ let expected: string;
+
+ try {
+ actual = JSON.stringify(redactJavaScript(parseJavaScript(input, {path: name})), stringify, 2);
+ } catch (error) {
+ if (!(error instanceof SyntaxError)) throw error;
+ actual = JSON.stringify({error: error.message}, null, 2);
+ }
+
+ try {
+ expected = await readFile(outfile, "utf8");
+ } catch (error) {
+ if (!isEnoent(error) || process.env.CI === "true") throw error;
+ console.warn(`! generating ${outfile}`);
+ await mkdir(outputRoot, {recursive: true});
+ await writeFile(outfile, actual, "utf8");
+ return;
+ }
+
+ const equal = expected === actual;
+
+ if (equal) {
+ if (process.env.CI !== "true") {
+ try {
+ await unlink(diffile);
+ console.warn(`! deleted ${diffile}`);
+ } catch (error) {
+ if (!isEnoent(error)) throw error;
+ }
+ }
+ } else {
+ console.warn(`! generating ${diffile}`);
+ await writeFile(diffile, actual, "utf8");
+ }
+
+ assert.ok(equal, `${name} must match snapshot`);
+ });
+ }
+}
+
+describe("parseJavaScript(input, options)", () => {
+ runTests("test/input", "test/output");
+ runTests("test/input/imports", "test/output/imports", (name) => name.endsWith("-import.js"));
+});
diff --git a/test/javascript/source-test.ts b/test/javascript/source-test.ts
new file mode 100644
index 000000000..16812789b
--- /dev/null
+++ b/test/javascript/source-test.ts
@@ -0,0 +1,63 @@
+import assert from "node:assert";
+import type {Expression, Node} from "acorn";
+import {Parser} from "acorn";
+import {getStringLiteralValue} from "../../src/javascript/source.js";
+import {isLiteral, isStringLiteral, isTemplateLiteral} from "../../src/javascript/source.js";
+
+describe("isLiteral(node)", () => {
+ it("returns true for literals", () => {
+ assert.strictEqual(isLiteral(parseExpression("42")), true);
+ assert.strictEqual(isLiteral(parseExpression("true")), true);
+ assert.strictEqual(isLiteral(parseExpression("false")), true);
+ assert.strictEqual(isLiteral(parseExpression("null")), true);
+ assert.strictEqual(isLiteral(parseExpression("'foo'")), true);
+ assert.strictEqual(isLiteral(parseExpression('"foo"')), true);
+ assert.strictEqual(isLiteral(parseExpression("0n")), true);
+ });
+ it("returns false for other nodes", () => {
+ assert.strictEqual(isLiteral(parseExpression("undefined")), false); // Identifier
+ assert.strictEqual(isLiteral(parseExpression("1 + 2")), false);
+ assert.strictEqual(isLiteral(parseExpression("foo")), false);
+ assert.strictEqual(isLiteral(parseExpression("foo()")), false);
+ });
+});
+
+describe("isTemplateLiteral(node)", () => {
+ it("returns true for template literals", () => {
+ assert.strictEqual(isTemplateLiteral(parseExpression("`foo`")), true);
+ assert.strictEqual(isTemplateLiteral(parseExpression("`${42}`")), true);
+ });
+ it("returns false for other nodes", () => {
+ assert.strictEqual(isTemplateLiteral(parseExpression("'42'")), false);
+ assert.strictEqual(isTemplateLiteral(parseExpression("foo")), false);
+ });
+});
+
+describe("isStringLiteral(node)", () => {
+ it("returns true for static string literals", () => {
+ assert.strictEqual(isStringLiteral(parseExpression('"foo"')), true);
+ assert.strictEqual(isStringLiteral(parseExpression("'foo'")), true);
+ assert.strictEqual(isStringLiteral(parseExpression("`foo`")), true);
+ });
+ it("returns false for other nodes", () => {
+ assert.strictEqual(isStringLiteral(parseExpression("42")), false);
+ assert.strictEqual(isStringLiteral(parseExpression("'1' + '2'")), false);
+ assert.strictEqual(isStringLiteral(parseExpression("`${42}`")), false);
+ });
+});
+
+describe("getStringLiteralValue(node)", () => {
+ it("returns the static string literal value", () => {
+ assert.strictEqual(maybeStringLiteralValue(parseExpression('"foo"')), "foo");
+ assert.strictEqual(maybeStringLiteralValue(parseExpression("'foo'")), "foo");
+ assert.strictEqual(maybeStringLiteralValue(parseExpression("`foo`")), "foo");
+ });
+});
+
+function maybeStringLiteralValue(node: Node): string | undefined {
+ return isStringLiteral(node) ? getStringLiteralValue(node) : undefined;
+}
+
+function parseExpression(input: string): Expression {
+ return Parser.parseExpressionAt(input, 0, {ecmaVersion: 13, sourceType: "module"});
+}
diff --git a/test/javascript/transpile-test.ts b/test/javascript/transpile-test.ts
new file mode 100644
index 000000000..740c8f0bb
--- /dev/null
+++ b/test/javascript/transpile-test.ts
@@ -0,0 +1,190 @@
+import assert from "node:assert";
+import {readdirSync, statSync} from "node:fs";
+import {mkdir, readFile, unlink, writeFile} from "node:fs/promises";
+import {basename, join, resolve} from "node:path";
+import {isEnoent} from "../../src/error.js";
+import {parseJavaScript} from "../../src/javascript/parse.js";
+import type {TranspileModuleOptions} from "../../src/javascript/transpile.js";
+import {transpileJavaScript, transpileModule} from "../../src/javascript/transpile.js";
+
+function isJsFile(inputRoot: string, fileName: string) {
+ if (!fileName.endsWith(".js")) return false;
+ const path = join(inputRoot, fileName);
+ return statSync(path).isFile();
+}
+
+function runTests(inputRoot: string, outputRoot: string, filter: (name: string) => boolean = () => true) {
+ for (const name of readdirSync(inputRoot)) {
+ if (!isJsFile(inputRoot, name) || !filter(name)) continue;
+ const only = name.startsWith("only.");
+ const skip = name.startsWith("skip.");
+ const outname = only || skip ? name.slice(5) : name;
+ const path = join(inputRoot, name);
+ (only ? it.only : skip ? it.skip : it)(path, async () => {
+ const outfile = resolve(outputRoot, `${basename(outname, ".js")}.js`);
+ const diffile = resolve(outputRoot, `${basename(outname, ".js")}-changed.js`);
+ const input = await readFile(path, "utf8");
+ let actual: string;
+ let expected: string;
+
+ try {
+ const node = parseJavaScript(input, {path: name});
+ actual = transpileJavaScript(node, {id: "0"});
+ } catch (error) {
+ if (!(error instanceof SyntaxError)) throw error;
+ actual = `define({id: "0", body: () => { throw new SyntaxError(${JSON.stringify(error.message)}); }});\n`;
+ }
+
+ try {
+ expected = await readFile(outfile, "utf8");
+ } catch (error) {
+ if (!isEnoent(error) || process.env.CI === "true") throw error;
+ console.warn(`! generating ${outfile}`);
+ await mkdir(outputRoot, {recursive: true});
+ await writeFile(outfile, actual, "utf8");
+ return;
+ }
+
+ const equal = expected === actual;
+
+ if (equal) {
+ if (process.env.CI !== "true") {
+ try {
+ await unlink(diffile);
+ console.warn(`! deleted ${diffile}`);
+ } catch (error) {
+ if (!isEnoent(error)) throw error;
+ }
+ }
+ } else {
+ console.warn(`! generating ${diffile}`);
+ await writeFile(diffile, actual, "utf8");
+ }
+
+ assert.ok(equal, `${name} must match snapshot`);
+ });
+ }
+}
+
+describe("transpileJavaScript(input, options)", () => {
+ runTests("test/input", "test/output");
+ runTests("test/input/imports", "test/output/imports", (name) => name.endsWith("-import.js"));
+ it("trims leading and trailing newlines", async () => {
+ const node = parseJavaScript("\ntest\n", {path: "index.js"});
+ const body = transpileJavaScript(node, {id: "0"});
+ assert.strictEqual(body, 'define({id: "0", inputs: ["test","display"], body: async (test,display) => {\ndisplay(await(\ntest\n))\n}});\n'); // prettier-ignore
+ });
+});
+
+async function testFile(target: string, path: string): Promise {
+ const input = `import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment(${JSON.stringify(target)})`;
+ const output = await transpileModule(input, {root: "docs", path});
+ return output.split("\n").pop()!;
+}
+
+describe("transpileModule(input, root, path, sourcePath)", () => {
+ it("rewrites relative files with import.meta.resolve", async () => {
+ assert.strictEqual(await testFile("./test.txt", "test.js"), 'FileAttachment("../test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("./sub/test.txt", "test.js"), 'FileAttachment("../sub/test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("./test.txt", "sub/test.js"), 'FileAttachment("../../sub/test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("../test.txt", "sub/test.js"), 'FileAttachment("../../test.txt", import.meta.url)'); // prettier-ignore
+ });
+ it("does not require paths to start with ./, ../, or /", async () => {
+ assert.strictEqual(await testFile("test.txt", "test.js"), 'FileAttachment("../test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("sub/test.txt", "test.js"), 'FileAttachment("../sub/test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("test.txt", "sub/test.js"), 'FileAttachment("../../sub/test.txt", import.meta.url)'); // prettier-ignore
+ });
+ it("rewrites absolute files with meta", async () => {
+ assert.strictEqual(await testFile("/test.txt", "test.js"), 'FileAttachment("../test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("/sub/test.txt", "test.js"), 'FileAttachment("../sub/test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("/test.txt", "sub/test.js"), 'FileAttachment("../../test.txt", import.meta.url)'); // prettier-ignore
+ });
+});
+
+describe("transpileModule(input, root, path)", () => {
+ const options: TranspileModuleOptions = {root: "docs", path: "test.js"};
+ it("rewrites relative files with import.meta.resolve", async () => {
+ assert.strictEqual(await testFile("./test.txt", "test.js"), 'FileAttachment("../test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("./sub/test.txt", "test.js"), 'FileAttachment("../sub/test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("./test.txt", "sub/test.js"), 'FileAttachment("../../sub/test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("../test.txt", "sub/test.js"), 'FileAttachment("../../test.txt", import.meta.url)'); // prettier-ignore
+ });
+ it("does not require paths to start with ./, ../, or /", async () => {
+ assert.strictEqual(await testFile("test.txt", "test.js"), 'FileAttachment("../test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("sub/test.txt", "test.js"), 'FileAttachment("../sub/test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("test.txt", "sub/test.js"), 'FileAttachment("../../sub/test.txt", import.meta.url)'); // prettier-ignore
+ });
+ it("rewrites absolute files with meta", async () => {
+ assert.strictEqual(await testFile("/test.txt", "test.js"), 'FileAttachment("../test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("/sub/test.txt", "test.js"), 'FileAttachment("../sub/test.txt", import.meta.url)'); // prettier-ignore
+ assert.strictEqual(await testFile("/test.txt", "sub/test.js"), 'FileAttachment("../../test.txt", import.meta.url)'); // prettier-ignore
+ });
+ it("ignores FileAttachment if masked by a reference", async () => {
+ const input = 'import {FileAttachment} from "npm:@observablehq/stdlib";\n((FileAttachment) => FileAttachment("./test.txt"))(eval)'; // prettier-ignore
+ const output = (await transpileModule(input, options)).split("\n").pop()!;
+ assert.strictEqual(output, '((FileAttachment) => FileAttachment("./test.txt"))(eval)');
+ });
+ it("ignores FileAttachment if not imported", async () => {
+ const input = 'import {Generators} from "npm:@observablehq/stdlib";\nFileAttachment("./test.txt")';
+ const output = (await transpileModule(input, options)).split("\n").pop()!;
+ assert.strictEqual(output, 'FileAttachment("./test.txt")');
+ });
+ it("ignores FileAttachment if a comma expression", async () => {
+ const input = 'import {FileAttachment} from "npm:@observablehq/stdlib";\n(1, FileAttachment)("./test.txt")';
+ const output = (await transpileModule(input, options)).split("\n").pop()!;
+ assert.strictEqual(output, '(1, FileAttachment)("./test.txt")');
+ });
+ it("ignores FileAttachment if not imported from @observablehq/stdlib", async () => {
+ const input = 'import {FileAttachment} from "@observablehq/not-stdlib";\nFileAttachment("./test.txt")';
+ const output = (await transpileModule(input, options)).split("\n").pop()!;
+ assert.strictEqual(output, 'FileAttachment("./test.txt")');
+ });
+ it("rewrites FileAttachment when aliased", async () => {
+ const input = 'import {FileAttachment as F} from "npm:@observablehq/stdlib";\nF("./test.txt")';
+ const output = (await transpileModule(input, options)).split("\n").pop()!;
+ assert.strictEqual(output, 'F("../test.txt", import.meta.url)');
+ });
+ it("rewrites FileAttachment when aliased to a global", async () => {
+ const input = 'import {FileAttachment as File} from "npm:@observablehq/stdlib";\nFile("./test.txt")';
+ const output = (await transpileModule(input, options)).split("\n").pop()!;
+ assert.strictEqual(output, 'File("../test.txt", import.meta.url)');
+ });
+ it.skip("rewrites FileAttachment when imported as a namespace", async () => {
+ const input = 'import * as O from "npm:@observablehq/stdlib";\nO.FileAttachment("./test.txt")';
+ const output = (await transpileModule(input, options)).split("\n").pop()!;
+ assert.strictEqual(output, 'O.FileAttachment("../test.txt", import.meta.url)');
+ });
+ it("ignores non-FileAttachment calls", async () => {
+ const input = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFile("./test.txt")';
+ const output = (await transpileModule(input, options)).split("\n").pop()!;
+ assert.strictEqual(output, 'File("./test.txt")');
+ });
+ it("rewrites single-quoted literals", async () => {
+ const input = "import {FileAttachment} from \"npm:@observablehq/stdlib\";\nFileAttachment('./test.txt')";
+ const output = (await transpileModule(input, options)).split("\n").pop()!;
+ assert.strictEqual(output, 'FileAttachment("../test.txt", import.meta.url)');
+ });
+ it("rewrites template-quoted literals", async () => {
+ const input = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment(`./test.txt`)';
+ const output = (await transpileModule(input, options)).split("\n").pop()!;
+ assert.strictEqual(output, 'FileAttachment("../test.txt", import.meta.url)');
+ });
+ it("throws a syntax error with non-literal calls", async () => {
+ const input = "import {FileAttachment} from \"npm:@observablehq/stdlib\";\nFileAttachment(`./${'test'}.txt`)";
+ await assert.rejects(() => transpileModule(input, options), /FileAttachment requires a single literal string/); // prettier-ignore
+ });
+ it("throws a syntax error with URL fetches", async () => {
+ const input = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("https://example.com")';
+ await assert.rejects(() => transpileModule(input, options), /non-local file path/); // prettier-ignore
+ });
+ it("ignores non-local path fetches", async () => {
+ const input1 = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("../test.txt")';
+ const input2 = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("./../test.txt")';
+ const input3 = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("../../test.txt")';
+ const input4 = 'import {FileAttachment} from "npm:@observablehq/stdlib";\nFileAttachment("./../../test.txt")';
+ await assert.rejects(() => transpileModule(input1, options), /non-local file path/); // prettier-ignore
+ await assert.rejects(() => transpileModule(input2, options), /non-local file path/); // prettier-ignore
+ await assert.rejects(() => transpileModule(input3, {...options, path: "sub/test.js"}), /non-local file path/); // prettier-ignore
+ await assert.rejects(() => transpileModule(input4, {...options, path: "sub/test.js"}), /non-local file path/); // prettier-ignore
+ });
+});
diff --git a/test/libraries-test.ts b/test/libraries-test.ts
new file mode 100644
index 000000000..c7c37bc18
--- /dev/null
+++ b/test/libraries-test.ts
@@ -0,0 +1,80 @@
+import assert from "node:assert";
+import {getImplicitDependencies, getImplicitDownloads, getImplicitStylesheets} from "../src/libraries.js";
+import {getImplicitFileImports, getImplicitInputImports} from "../src/libraries.js";
+
+describe("getImplicitFileImports(files)", () => {
+ it("supports known file methods", () => {
+ assert.deepStrictEqual(getImplicitFileImports(["csv"]), new Set(["npm:d3-dsv"]));
+ assert.deepStrictEqual(getImplicitFileImports(["tsv"]), new Set(["npm:d3-dsv"]));
+ assert.deepStrictEqual(getImplicitFileImports(["arrow"]), new Set(["npm:apache-arrow"]));
+ assert.deepStrictEqual(getImplicitFileImports(["parquet"]), new Set(["npm:apache-arrow", "npm:parquet-wasm/esm/arrow1.js"])); // prettier-ignore
+ assert.deepStrictEqual(getImplicitFileImports(["sqlite"]), new Set(["npm:@observablehq/sqlite"]));
+ assert.deepStrictEqual(getImplicitFileImports(["xlsx"]), new Set(["npm:@observablehq/xlsx"]));
+ assert.deepStrictEqual(getImplicitFileImports(["zip"]), new Set(["npm:@observablehq/zip"]));
+ });
+});
+
+describe("getImplicitInputImports(inputs)", () => {
+ it("supports known inputs", () => {
+ assert.deepStrictEqual(getImplicitInputImports(["d3"]), new Set(["npm:d3"]));
+ assert.deepStrictEqual(getImplicitInputImports(["Plot"]), new Set(["npm:@observablehq/plot"]));
+ assert.deepStrictEqual(getImplicitInputImports(["htl"]), new Set(["npm:htl"]));
+ assert.deepStrictEqual(getImplicitInputImports(["html"]), new Set(["npm:htl"]));
+ assert.deepStrictEqual(getImplicitInputImports(["svg"]), new Set(["npm:htl"]));
+ assert.deepStrictEqual(getImplicitInputImports(["Inputs"]), new Set(["npm:@observablehq/inputs"]));
+ assert.deepStrictEqual(getImplicitInputImports(["dot"]), new Set(["npm:@observablehq/dot"]));
+ assert.deepStrictEqual(getImplicitInputImports(["duckdb"]), new Set(["npm:@duckdb/duckdb-wasm"]));
+ assert.deepStrictEqual(getImplicitInputImports(["DuckDBClient"]), new Set(["npm:@observablehq/duckdb"]));
+ assert.deepStrictEqual(getImplicitInputImports(["_"]), new Set(["npm:lodash"]));
+ assert.deepStrictEqual(getImplicitInputImports(["aq"]), new Set(["npm:arquero"]));
+ assert.deepStrictEqual(getImplicitInputImports(["Arrow"]), new Set(["npm:apache-arrow"]));
+ assert.deepStrictEqual(getImplicitInputImports(["L"]), new Set(["npm:leaflet"]));
+ assert.deepStrictEqual(getImplicitInputImports(["mapboxgl"]), new Set(["npm:mapbox-gl"]));
+ assert.deepStrictEqual(getImplicitInputImports(["mermaid"]), new Set(["npm:@observablehq/mermaid"]));
+ assert.deepStrictEqual(getImplicitInputImports(["SQLite"]), new Set(["npm:@observablehq/sqlite"]));
+ assert.deepStrictEqual(getImplicitInputImports(["SQLiteDatabaseClient"]), new Set(["npm:@observablehq/sqlite"]));
+ assert.deepStrictEqual(getImplicitInputImports(["tex"]), new Set(["npm:@observablehq/tex"]));
+ assert.deepStrictEqual(getImplicitInputImports(["topojson"]), new Set(["npm:topojson-client"]));
+ assert.deepStrictEqual(getImplicitInputImports(["vl"]), new Set(["observablehq:stdlib/vega-lite"]));
+ });
+});
+
+describe("getImplicitStylesheets(imports)", () => {
+ it("supports known imports", () => {
+ assert.deepStrictEqual(getImplicitStylesheets(["npm:@observablehq/inputs"]), new Set(["observablehq:stdlib/inputs.css"])); // prettier-ignore
+ assert.deepStrictEqual(getImplicitStylesheets(["npm:katex"]), new Set(["npm:katex/dist/katex.min.css"])); // prettier-ignore
+ assert.deepStrictEqual(getImplicitStylesheets(["npm:leaflet"]), new Set(["npm:leaflet/dist/leaflet.css"])); // prettier-ignore
+ assert.deepStrictEqual(getImplicitStylesheets(["npm:mapbox-gl"]), new Set(["npm:mapbox-gl/dist/mapbox-gl.css"])); // prettier-ignore
+ });
+});
+
+describe("getImplicitDownloads(imports)", () => {
+ it("supports known imports", () => {
+ assert.deepStrictEqual(
+ getImplicitDownloads(["npm:@observablehq/duckdb"]),
+ new Set([
+ "npm:@duckdb/duckdb-wasm/dist/duckdb-mvp.wasm",
+ "npm:@duckdb/duckdb-wasm/dist/duckdb-browser-mvp.worker.js",
+ "npm:@duckdb/duckdb-wasm/dist/duckdb-eh.wasm",
+ "npm:@duckdb/duckdb-wasm/dist/duckdb-browser-eh.worker.js"
+ ])
+ );
+ assert.deepStrictEqual(
+ getImplicitDownloads(["npm:@observablehq/sqlite"]),
+ new Set(["npm:sql.js/dist/sql-wasm.js", "npm:sql.js/dist/sql-wasm.wasm"])
+ );
+ });
+});
+
+describe("getImplicitDependencies(imports)", () => {
+ it("supports known imports", () => {
+ assert.deepStrictEqual(getImplicitDependencies(["npm:@observablehq/dot"]), new Set(["npm:@viz-js/viz"]));
+ assert.deepStrictEqual(getImplicitDependencies(["npm:@observablehq/duckdb"]), new Set(["npm:@duckdb/duckdb-wasm"]));
+ assert.deepStrictEqual(getImplicitDependencies(["npm:@observablehq/inputs"]), new Set(["npm:htl", "npm:isoformat"])); // prettier-ignore
+ assert.deepStrictEqual(getImplicitDependencies(["npm:@observablehq/mermaid"]), new Set(["npm:mermaid"]));
+ assert.deepStrictEqual(getImplicitDependencies(["npm:@observablehq/tex"]), new Set(["npm:katex"]));
+ assert.deepStrictEqual(getImplicitDependencies(["npm:@observablehq/xlsx"]), new Set(["npm:exceljs"]));
+ assert.deepStrictEqual(getImplicitDependencies(["npm:@observablehq/zip"]), new Set(["npm:jszip"]));
+ assert.deepStrictEqual(getImplicitDependencies(["observablehq:stdlib/vega-lite"]), new Set(["npm:vega-lite-api", "npm:vega-lite", "npm:vega"])); // prettier-ignore
+ });
+});
diff --git a/test/markdown-test.ts b/test/markdown-test.ts
index 7cbb995aa..d221d20f9 100644
--- a/test/markdown-test.ts
+++ b/test/markdown-test.ts
@@ -4,11 +4,8 @@ import {mkdir, readFile, unlink, writeFile} from "node:fs/promises";
import {basename, join, resolve} from "node:path";
import deepEqual from "fast-deep-equal";
import {isEnoent} from "../src/error.js";
-import {type ParseResult, parseMarkdown} from "../src/markdown.js";
-import {normalizePieceHtml} from "../src/markdown.js";
-
-const html = (strings, ...values) => String.raw({raw: strings}, ...values);
-const mockContext = () => ({files: [], imports: [], pieces: [], startLine: 0, currentLine: 0});
+import type {MarkdownPage} from "../src/markdown.js";
+import {parseMarkdown} from "../src/markdown.js";
describe("parseMarkdown(input)", () => {
const inputRoot = "test/input";
@@ -26,8 +23,8 @@ describe("parseMarkdown(input)", () => {
let allequal = true;
for (const ext of ["html", "json"]) {
const actual = ext === "json" ? jsonMeta(snapshot) : snapshot[ext];
- const outfile = resolve(outputRoot, `${basename(outname, ".md")}.${ext}`);
- const diffile = resolve(outputRoot, `${basename(outname, ".md")}-changed.${ext}`);
+ const outfile = resolve(outputRoot, `${ext === "json" ? outname : basename(outname, ".md")}.${ext}`);
+ const diffile = resolve(outputRoot, `${ext === "json" ? outname : basename(outname, ".md")}-changed.${ext}`);
let expected;
try {
@@ -62,204 +59,7 @@ describe("parseMarkdown(input)", () => {
}
});
-describe("normalizePieceHtml adds local file attachments", () => {
- const sourcePath = "/attachments.md";
-
- it("img[src]", () => {
- const htmlStr = html` `;
- const expected = html` `;
- const context = mockContext();
- const actual = normalizePieceHtml(htmlStr, sourcePath, context);
-
- assert.equal(actual, expected);
- assert.deepEqual(context.files, [
- {
- mimeType: "image/png",
- name: "./test.png",
- path: "./_file/test.png"
- }
- ]);
- });
-
- it("img[srcset]", () => {
- const htmlStr = html`
-
- `;
- const expected = html`
-
- `;
- const context = mockContext();
- const actual = normalizePieceHtml(htmlStr, sourcePath, context);
-
- assert.equal(actual, expected);
- assert.deepEqual(context.files, [
- {
- mimeType: "image/jpeg",
- name: "./large.jpg",
- path: "./_file/large.jpg"
- },
- {
- mimeType: "image/jpeg",
- name: "./small.jpg",
- path: "./_file/small.jpg"
- }
- ]);
- });
-
- it("video[src]", () => {
- const htmlStr = html`
- Your browser doesn't support HTML video.
- `;
- const expected = html`
- Your browser doesn't support HTML video.
- `;
- const context = mockContext();
- const actual = normalizePieceHtml(htmlStr, sourcePath, context);
-
- assert.equal(actual, expected);
- assert.deepEqual(context.files, [
- {
- mimeType: "video/quicktime",
- name: "./observable.mov",
- path: "./_file/observable.mov"
- }
- ]);
- });
-
- it("video source[src]", () => {
- const htmlStr = html`
-
-
- Your browser doesn't support HTML video.
- `;
-
- const expected = html`
-
-
- Your browser doesn't support HTML video.
- `;
-
- const context = mockContext();
- const actual = normalizePieceHtml(htmlStr, sourcePath, context);
-
- assert.equal(actual, expected);
- assert.deepEqual(context.files, [
- {
- mimeType: "video/mp4",
- name: "./observable.mp4",
- path: "./_file/observable.mp4"
- },
- {
- mimeType: "video/quicktime",
- name: "./observable.mov",
- path: "./_file/observable.mov"
- }
- ]);
- });
-
- it("picture source[srcset]", () => {
- const htmlStr = html`
-
-
- `;
-
- const expected = html`
-
-
- `;
-
- const context = mockContext();
- const actual = normalizePieceHtml(htmlStr, sourcePath, context);
-
- assert.equal(actual, expected);
- assert.deepEqual(context.files, [
- {
- mimeType: "image/png",
- name: "./observable-logo-narrow.png",
- path: "./_file/observable-logo-narrow.png"
- },
- {
- mimeType: "image/png",
- name: "./observable-logo-wide.png",
- path: "./_file/observable-logo-wide.png"
- }
- ]);
- });
-});
-
-describe("normalizePieceHtml only adds local files", () => {
- const sourcePath = "/attachments.md";
-
- it("img[src] only adds local files", () => {
- const htmlStr = html` `;
- const expected = html` `;
- const context = mockContext();
- const actual = normalizePieceHtml(htmlStr, sourcePath, context);
-
- assert.equal(actual, expected);
- assert.deepEqual(context.files, []);
- });
-
- it("img[srcset] only adds local files", () => {
- const htmlStr = html`
-
- `;
- const expected = html`
-
- `;
- const context = mockContext();
- const actual = normalizePieceHtml(htmlStr, sourcePath, context);
-
- assert.equal(actual, expected);
- assert.deepEqual(context.files, [
- {
- mimeType: "image/jpeg",
- name: "./small.jpg",
- path: "./_file/small.jpg"
- }
- ]);
- });
-
- it("video source[src] only adds local files", () => {
- const htmlStr = html`
-
-
- Your browser doesn't support HTML video.
- `;
-
- const expected = html`
-
-
- Your browser doesn't support HTML video.
- `;
-
- const context = mockContext();
- const actual = normalizePieceHtml(htmlStr, sourcePath, context);
-
- assert.equal(actual, expected);
- assert.deepEqual(context.files, [
- {
- mimeType: "video/quicktime",
- name: "./observable.mov",
- path: "./_file/observable.mov"
- }
- ]);
- });
-});
-
-function jsonMeta({html, ...rest}: ParseResult): string {
+function jsonMeta({html, ...rest}: MarkdownPage): string {
return JSON.stringify(rest, null, 2);
}
diff --git a/test/mocks/jsdelivr.ts b/test/mocks/jsdelivr.ts
index 506aeaa97..9bf762b5b 100644
--- a/test/mocks/jsdelivr.ts
+++ b/test/mocks/jsdelivr.ts
@@ -28,16 +28,16 @@ export function mockJsDelivr() {
before(async () => {
const agent = getCurrentAgent();
const dataClient = agent.get("https://data.jsdelivr.com");
+ const cdnClient = agent.get("https://cdn.jsdelivr.net");
for (const [name, version] of packages) {
dataClient
.intercept({path: `/v1/packages/npm/${name}/resolved`, method: "GET"})
- .reply(200, {version}, {headers: {"content-type": "application/json; charset=utf-8"}});
- }
- const cdnClient = agent.get("https://cdn.jsdelivr.net");
- for (const [name, version] of packages) {
+ .reply(200, {version}, {headers: {"content-type": "application/json; charset=utf-8"}})
+ .persist();
cdnClient
- .intercept({path: `/npm/${name}@${version}/+esm`, method: "GET"})
- .reply(200, "", {headers: {"cache-control": "public, immutable", "content-type": "text/javascript; charset=utf-8"}}); // prettier-ignore
+ .intercept({path: new RegExp(`^/npm/${name}@${version}/`), method: "GET"})
+ .reply(200, "", {headers: {"cache-control": "public, immutable", "content-type": "text/javascript; charset=utf-8"}})
+ .persist(); // prettier-ignore
}
});
}
diff --git a/test/mocks/observableApi.ts b/test/mocks/observableApi.ts
index 05f891bfb..aaa8b0904 100644
--- a/test/mocks/observableApi.ts
+++ b/test/mocks/observableApi.ts
@@ -219,14 +219,20 @@ class ObservableApiMock {
handlePostDeployFile({
deployId,
+ clientName,
status = 204,
repeat = 1
- }: {deployId?: string; status?: number; repeat?: number} = {}): ObservableApiMock {
+ }: {deployId?: string; clientName?: string; status?: number; repeat?: number} = {}): ObservableApiMock {
const response = status == 204 ? "" : emptyErrorBody;
const headers = authorizationHeader(status !== 403);
this.addHandler((pool) => {
pool
- .intercept({path: `/cli/deploy/${deployId}/file`, method: "POST", headers: headersMatcher(headers)})
+ .intercept({
+ path: `/cli/deploy/${deployId}/file`,
+ method: "POST",
+ headers: headersMatcher(headers),
+ body: clientName === undefined ? undefined : formDataMatcher({client_name: clientName})
+ })
.reply(status, response)
.times(repeat);
});
@@ -331,6 +337,18 @@ function headersMatcher(expected: Record): (headers: Re
};
}
+function formDataMatcher(expected: Record): (body: string) => boolean {
+ // actually FormData, not string
+ return (actual: any) => {
+ for (const key in expected) {
+ if (!(actual.get(key) === expected[key])) {
+ return false;
+ }
+ }
+ return true;
+ };
+}
+
const userBase = {
id: "0000000000000000",
login: "mock-user",
diff --git a/test/output/anonymous-class.js.json b/test/output/anonymous-class.js.json
new file mode 100644
index 000000000..5c453160c
--- /dev/null
+++ b/test/output/anonymous-class.js.json
@@ -0,0 +1,22 @@
+{
+ "body": {
+ "type": "ClassExpression",
+ "start": 0,
+ "end": 8,
+ "id": null,
+ "superClass": null,
+ "body": {
+ "type": "ClassBody",
+ "start": 6,
+ "end": 8,
+ "body": []
+ }
+ },
+ "declarations": null,
+ "references": [],
+ "files": [],
+ "imports": [],
+ "expression": true,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/anonymous-function.js.json b/test/output/anonymous-function.js.json
new file mode 100644
index 000000000..dabd63337
--- /dev/null
+++ b/test/output/anonymous-function.js.json
@@ -0,0 +1,38 @@
+{
+ "body": {
+ "type": "FunctionExpression",
+ "start": 0,
+ "end": 25,
+ "id": null,
+ "expression": false,
+ "generator": false,
+ "async": false,
+ "params": [],
+ "body": {
+ "type": "BlockStatement",
+ "start": 11,
+ "end": 25,
+ "body": [
+ {
+ "type": "ReturnStatement",
+ "start": 13,
+ "end": 23,
+ "argument": {
+ "type": "Literal",
+ "start": 20,
+ "end": 22,
+ "value": 42,
+ "raw": "42"
+ }
+ }
+ ]
+ }
+ },
+ "declarations": null,
+ "references": [],
+ "files": [],
+ "imports": [],
+ "expression": true,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/array-pattern-rest.js.json b/test/output/array-pattern-rest.js.json
new file mode 100644
index 000000000..6c63bcabb
--- /dev/null
+++ b/test/output/array-pattern-rest.js.json
@@ -0,0 +1,107 @@
+{
+ "body": {
+ "type": "Program",
+ "start": 0,
+ "end": 35,
+ "body": [
+ {
+ "type": "VariableDeclaration",
+ "start": 0,
+ "end": 34,
+ "declarations": [
+ {
+ "type": "VariableDeclarator",
+ "start": 6,
+ "end": 33,
+ "id": {
+ "type": "ArrayPattern",
+ "start": 6,
+ "end": 21,
+ "elements": [
+ {
+ "type": "Identifier",
+ "start": 7,
+ "end": 8,
+ "name": "x"
+ },
+ {
+ "type": "Identifier",
+ "start": 10,
+ "end": 11,
+ "name": "y"
+ },
+ {
+ "type": "RestElement",
+ "start": 13,
+ "end": 20,
+ "argument": {
+ "type": "Identifier",
+ "start": 16,
+ "end": 20,
+ "name": "rest"
+ }
+ }
+ ]
+ },
+ "init": {
+ "type": "ArrayExpression",
+ "start": 24,
+ "end": 33,
+ "elements": [
+ {
+ "type": "Literal",
+ "start": 25,
+ "end": 26,
+ "value": 1,
+ "raw": "1"
+ },
+ {
+ "type": "Literal",
+ "start": 28,
+ "end": 29,
+ "value": 2,
+ "raw": "2"
+ },
+ {
+ "type": "Literal",
+ "start": 31,
+ "end": 32,
+ "value": 3,
+ "raw": "3"
+ }
+ ]
+ }
+ }
+ ],
+ "kind": "const"
+ }
+ ],
+ "sourceType": "module"
+ },
+ "declarations": [
+ {
+ "type": "Identifier",
+ "start": 7,
+ "end": 8,
+ "name": "x"
+ },
+ {
+ "type": "Identifier",
+ "start": 10,
+ "end": 11,
+ "name": "y"
+ },
+ {
+ "type": "Identifier",
+ "start": 16,
+ "end": 20,
+ "name": "rest"
+ }
+ ],
+ "references": [],
+ "files": [],
+ "imports": [],
+ "expression": false,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/array-pattern.js.json b/test/output/array-pattern.js.json
new file mode 100644
index 000000000..491b7cdf1
--- /dev/null
+++ b/test/output/array-pattern.js.json
@@ -0,0 +1,83 @@
+{
+ "body": {
+ "type": "Program",
+ "start": 0,
+ "end": 23,
+ "body": [
+ {
+ "type": "VariableDeclaration",
+ "start": 0,
+ "end": 22,
+ "declarations": [
+ {
+ "type": "VariableDeclarator",
+ "start": 6,
+ "end": 21,
+ "id": {
+ "type": "ArrayPattern",
+ "start": 6,
+ "end": 12,
+ "elements": [
+ {
+ "type": "Identifier",
+ "start": 7,
+ "end": 8,
+ "name": "x"
+ },
+ {
+ "type": "Identifier",
+ "start": 10,
+ "end": 11,
+ "name": "y"
+ }
+ ]
+ },
+ "init": {
+ "type": "ArrayExpression",
+ "start": 15,
+ "end": 21,
+ "elements": [
+ {
+ "type": "Literal",
+ "start": 16,
+ "end": 17,
+ "value": 1,
+ "raw": "1"
+ },
+ {
+ "type": "Literal",
+ "start": 19,
+ "end": 20,
+ "value": 2,
+ "raw": "2"
+ }
+ ]
+ }
+ }
+ ],
+ "kind": "const"
+ }
+ ],
+ "sourceType": "module"
+ },
+ "declarations": [
+ {
+ "type": "Identifier",
+ "start": 7,
+ "end": 8,
+ "name": "x"
+ },
+ {
+ "type": "Identifier",
+ "start": 10,
+ "end": 11,
+ "name": "y"
+ }
+ ],
+ "references": [],
+ "files": [],
+ "imports": [],
+ "expression": false,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/assignment-pattern.js.json b/test/output/assignment-pattern.js.json
new file mode 100644
index 000000000..7149fe36e
--- /dev/null
+++ b/test/output/assignment-pattern.js.json
@@ -0,0 +1,107 @@
+{
+ "body": {
+ "type": "Program",
+ "start": 0,
+ "end": 30,
+ "body": [
+ {
+ "type": "VariableDeclaration",
+ "start": 0,
+ "end": 29,
+ "declarations": [
+ {
+ "type": "VariableDeclarator",
+ "start": 6,
+ "end": 28,
+ "id": {
+ "type": "ArrayPattern",
+ "start": 6,
+ "end": 19,
+ "elements": [
+ {
+ "type": "Identifier",
+ "start": 7,
+ "end": 8,
+ "name": "x"
+ },
+ {
+ "type": "Identifier",
+ "start": 10,
+ "end": 11,
+ "name": "y"
+ },
+ {
+ "type": "AssignmentPattern",
+ "start": 13,
+ "end": 18,
+ "left": {
+ "type": "Identifier",
+ "start": 13,
+ "end": 14,
+ "name": "z"
+ },
+ "right": {
+ "type": "Literal",
+ "start": 17,
+ "end": 18,
+ "value": 3,
+ "raw": "3"
+ }
+ }
+ ]
+ },
+ "init": {
+ "type": "ArrayExpression",
+ "start": 22,
+ "end": 28,
+ "elements": [
+ {
+ "type": "Literal",
+ "start": 23,
+ "end": 24,
+ "value": 1,
+ "raw": "1"
+ },
+ {
+ "type": "Literal",
+ "start": 26,
+ "end": 27,
+ "value": 2,
+ "raw": "2"
+ }
+ ]
+ }
+ }
+ ],
+ "kind": "const"
+ }
+ ],
+ "sourceType": "module"
+ },
+ "declarations": [
+ {
+ "type": "Identifier",
+ "start": 7,
+ "end": 8,
+ "name": "x"
+ },
+ {
+ "type": "Identifier",
+ "start": 10,
+ "end": 11,
+ "name": "y"
+ },
+ {
+ "type": "Identifier",
+ "start": 13,
+ "end": 14,
+ "name": "z"
+ }
+ ],
+ "references": [],
+ "files": [],
+ "imports": [],
+ "expression": false,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/async-arrow-function.js.json b/test/output/async-arrow-function.js.json
new file mode 100644
index 000000000..3be88f996
--- /dev/null
+++ b/test/output/async-arrow-function.js.json
@@ -0,0 +1,94 @@
+{
+ "body": {
+ "type": "Program",
+ "start": 0,
+ "end": 56,
+ "body": [
+ {
+ "type": "VariableDeclaration",
+ "start": 0,
+ "end": 55,
+ "declarations": [
+ {
+ "type": "VariableDeclarator",
+ "start": 6,
+ "end": 54,
+ "id": {
+ "type": "Identifier",
+ "start": 6,
+ "end": 14,
+ "name": "addAsync"
+ },
+ "init": {
+ "type": "ArrowFunctionExpression",
+ "start": 17,
+ "end": 54,
+ "id": null,
+ "expression": true,
+ "generator": false,
+ "async": true,
+ "params": [
+ {
+ "type": "Identifier",
+ "start": 24,
+ "end": 25,
+ "name": "a"
+ },
+ {
+ "type": "Identifier",
+ "start": 27,
+ "end": 28,
+ "name": "b"
+ }
+ ],
+ "body": {
+ "type": "BinaryExpression",
+ "start": 33,
+ "end": 54,
+ "left": {
+ "type": "AwaitExpression",
+ "start": 34,
+ "end": 41,
+ "argument": {
+ "type": "Identifier",
+ "start": 40,
+ "end": 41,
+ "name": "a"
+ }
+ },
+ "operator": "+",
+ "right": {
+ "type": "AwaitExpression",
+ "start": 46,
+ "end": 53,
+ "argument": {
+ "type": "Identifier",
+ "start": 52,
+ "end": 53,
+ "name": "b"
+ }
+ }
+ }
+ }
+ }
+ ],
+ "kind": "const"
+ }
+ ],
+ "sourceType": "module"
+ },
+ "declarations": [
+ {
+ "type": "Identifier",
+ "start": 6,
+ "end": 14,
+ "name": "addAsync"
+ }
+ ],
+ "references": [],
+ "files": [],
+ "imports": [],
+ "expression": false,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/async-class.js.json b/test/output/async-class.js.json
new file mode 100644
index 000000000..5c6221d25
--- /dev/null
+++ b/test/output/async-class.js.json
@@ -0,0 +1,120 @@
+{
+ "body": {
+ "type": "Program",
+ "start": 0,
+ "end": 77,
+ "body": [
+ {
+ "type": "ClassDeclaration",
+ "start": 0,
+ "end": 76,
+ "id": {
+ "type": "Identifier",
+ "start": 6,
+ "end": 9,
+ "name": "Foo"
+ },
+ "superClass": null,
+ "body": {
+ "type": "ClassBody",
+ "start": 10,
+ "end": 76,
+ "body": [
+ {
+ "type": "MethodDefinition",
+ "start": 14,
+ "end": 74,
+ "static": false,
+ "computed": false,
+ "key": {
+ "type": "Identifier",
+ "start": 20,
+ "end": 28,
+ "name": "addAsync"
+ },
+ "kind": "method",
+ "value": {
+ "type": "FunctionExpression",
+ "start": 28,
+ "end": 74,
+ "id": null,
+ "expression": false,
+ "generator": false,
+ "async": true,
+ "params": [
+ {
+ "type": "Identifier",
+ "start": 29,
+ "end": 30,
+ "name": "a"
+ },
+ {
+ "type": "Identifier",
+ "start": 32,
+ "end": 33,
+ "name": "b"
+ }
+ ],
+ "body": {
+ "type": "BlockStatement",
+ "start": 35,
+ "end": 74,
+ "body": [
+ {
+ "type": "ReturnStatement",
+ "start": 41,
+ "end": 70,
+ "argument": {
+ "type": "BinaryExpression",
+ "start": 48,
+ "end": 69,
+ "left": {
+ "type": "AwaitExpression",
+ "start": 49,
+ "end": 56,
+ "argument": {
+ "type": "Identifier",
+ "start": 55,
+ "end": 56,
+ "name": "a"
+ }
+ },
+ "operator": "+",
+ "right": {
+ "type": "AwaitExpression",
+ "start": 61,
+ "end": 68,
+ "argument": {
+ "type": "Identifier",
+ "start": 67,
+ "end": 68,
+ "name": "b"
+ }
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ ]
+ }
+ }
+ ],
+ "sourceType": "module"
+ },
+ "declarations": [
+ {
+ "type": "Identifier",
+ "start": 6,
+ "end": 9,
+ "name": "Foo"
+ }
+ ],
+ "references": [],
+ "files": [],
+ "imports": [],
+ "expression": false,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/async-function.js.json b/test/output/async-function.js.json
new file mode 100644
index 000000000..074c4a716
--- /dev/null
+++ b/test/output/async-function.js.json
@@ -0,0 +1,92 @@
+{
+ "body": {
+ "type": "Program",
+ "start": 0,
+ "end": 66,
+ "body": [
+ {
+ "type": "FunctionDeclaration",
+ "start": 0,
+ "end": 65,
+ "id": {
+ "type": "Identifier",
+ "start": 15,
+ "end": 23,
+ "name": "addAsync"
+ },
+ "expression": false,
+ "generator": false,
+ "async": true,
+ "params": [
+ {
+ "type": "Identifier",
+ "start": 24,
+ "end": 25,
+ "name": "a"
+ },
+ {
+ "type": "Identifier",
+ "start": 27,
+ "end": 28,
+ "name": "b"
+ }
+ ],
+ "body": {
+ "type": "BlockStatement",
+ "start": 30,
+ "end": 65,
+ "body": [
+ {
+ "type": "ReturnStatement",
+ "start": 34,
+ "end": 63,
+ "argument": {
+ "type": "BinaryExpression",
+ "start": 41,
+ "end": 62,
+ "left": {
+ "type": "AwaitExpression",
+ "start": 42,
+ "end": 49,
+ "argument": {
+ "type": "Identifier",
+ "start": 48,
+ "end": 49,
+ "name": "a"
+ }
+ },
+ "operator": "+",
+ "right": {
+ "type": "AwaitExpression",
+ "start": 54,
+ "end": 61,
+ "argument": {
+ "type": "Identifier",
+ "start": 60,
+ "end": 61,
+ "name": "b"
+ }
+ }
+ }
+ }
+ ]
+ }
+ }
+ ],
+ "sourceType": "module"
+ },
+ "declarations": [
+ {
+ "type": "Identifier",
+ "start": 15,
+ "end": 23,
+ "name": "addAsync"
+ }
+ ],
+ "references": [],
+ "files": [],
+ "imports": [],
+ "expression": false,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/async-template-literal.js.json b/test/output/async-template-literal.js.json
new file mode 100644
index 000000000..5c2d9641c
--- /dev/null
+++ b/test/output/async-template-literal.js.json
@@ -0,0 +1,73 @@
+{
+ "body": {
+ "type": "TaggedTemplateExpression",
+ "start": 0,
+ "end": 20,
+ "tag": {
+ "type": "Identifier",
+ "start": 0,
+ "end": 2,
+ "name": "md"
+ },
+ "quasi": {
+ "type": "TemplateLiteral",
+ "start": 2,
+ "end": 20,
+ "expressions": [
+ {
+ "type": "AwaitExpression",
+ "start": 5,
+ "end": 18,
+ "argument": {
+ "type": "Identifier",
+ "start": 11,
+ "end": 18,
+ "name": "promise"
+ }
+ }
+ ],
+ "quasis": [
+ {
+ "type": "TemplateElement",
+ "start": 3,
+ "end": 3,
+ "value": {
+ "raw": "",
+ "cooked": ""
+ },
+ "tail": false
+ },
+ {
+ "type": "TemplateElement",
+ "start": 19,
+ "end": 19,
+ "value": {
+ "raw": "",
+ "cooked": ""
+ },
+ "tail": true
+ }
+ ]
+ }
+ },
+ "declarations": null,
+ "references": [
+ {
+ "type": "Identifier",
+ "start": 0,
+ "end": 2,
+ "name": "md"
+ },
+ {
+ "type": "Identifier",
+ "start": 11,
+ "end": 18,
+ "name": "promise"
+ }
+ ],
+ "files": [],
+ "imports": [],
+ "expression": true,
+ "async": true,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/await.js.json b/test/output/await.js.json
new file mode 100644
index 000000000..2cba712b1
--- /dev/null
+++ b/test/output/await.js.json
@@ -0,0 +1,27 @@
+{
+ "body": {
+ "type": "AwaitExpression",
+ "start": 0,
+ "end": 13,
+ "argument": {
+ "type": "Identifier",
+ "start": 6,
+ "end": 13,
+ "name": "promise"
+ }
+ },
+ "declarations": null,
+ "references": [
+ {
+ "type": "Identifier",
+ "start": 6,
+ "end": 13,
+ "name": "promise"
+ }
+ ],
+ "files": [],
+ "imports": [],
+ "expression": true,
+ "async": true,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/bar.js.json b/test/output/bar.js.json
new file mode 100644
index 000000000..619634441
--- /dev/null
+++ b/test/output/bar.js.json
@@ -0,0 +1,64 @@
+{
+ "body": {
+ "type": "Program",
+ "start": 0,
+ "end": 26,
+ "body": [
+ {
+ "type": "VariableDeclaration",
+ "start": 0,
+ "end": 26,
+ "declarations": [
+ {
+ "type": "VariableDeclarator",
+ "start": 6,
+ "end": 25,
+ "id": {
+ "type": "Identifier",
+ "start": 6,
+ "end": 9,
+ "name": "bar"
+ },
+ "init": {
+ "type": "CallExpression",
+ "start": 12,
+ "end": 25,
+ "callee": {
+ "type": "Identifier",
+ "start": 12,
+ "end": 18,
+ "name": "Symbol"
+ },
+ "arguments": [
+ {
+ "type": "Literal",
+ "start": 19,
+ "end": 24,
+ "value": "bar",
+ "raw": "\"bar\""
+ }
+ ],
+ "optional": false
+ }
+ }
+ ],
+ "kind": "const"
+ }
+ ],
+ "sourceType": "module"
+ },
+ "declarations": [
+ {
+ "type": "Identifier",
+ "start": 6,
+ "end": 9,
+ "name": "bar"
+ }
+ ],
+ "references": [],
+ "files": [],
+ "imports": [],
+ "expression": false,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/bigint-zero.js.json b/test/output/bigint-zero.js.json
new file mode 100644
index 000000000..485956a4e
--- /dev/null
+++ b/test/output/bigint-zero.js.json
@@ -0,0 +1,17 @@
+{
+ "body": {
+ "type": "Literal",
+ "start": 0,
+ "end": 2,
+ "value": "0",
+ "raw": "0n",
+ "bigint": "0"
+ },
+ "declarations": null,
+ "references": [],
+ "files": [],
+ "imports": [],
+ "expression": true,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/bigint.js.json b/test/output/bigint.js.json
new file mode 100644
index 000000000..934d2d6c2
--- /dev/null
+++ b/test/output/bigint.js.json
@@ -0,0 +1,36 @@
+{
+ "body": {
+ "type": "BinaryExpression",
+ "start": 0,
+ "end": 9,
+ "left": {
+ "type": "Identifier",
+ "start": 0,
+ "end": 3,
+ "name": "foo"
+ },
+ "operator": "+",
+ "right": {
+ "type": "Literal",
+ "start": 6,
+ "end": 9,
+ "value": "42",
+ "raw": "42n",
+ "bigint": "42"
+ }
+ },
+ "declarations": null,
+ "references": [
+ {
+ "type": "Identifier",
+ "start": 0,
+ "end": 3,
+ "name": "foo"
+ }
+ ],
+ "files": [],
+ "imports": [],
+ "expression": true,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/block-expression.json b/test/output/block-expression.json
deleted file mode 100644
index 9d487b632..000000000
--- a/test/output/block-expression.json
+++ /dev/null
@@ -1,29 +0,0 @@
-{
- "data": null,
- "title": null,
- "files": [],
- "imports": [],
- "pieces": [
- {
- "type": "html",
- "id": "",
- "cellIds": [
- "6212702c"
- ],
- "html": "
\n"
- }
- ],
- "cells": [
- {
- "type": "cell",
- "id": "6212702c",
- "expression": true,
- "inputs": [
- "display"
- ],
- "inline": true,
- "body": "async (display) => {\ndisplay(await(\n1 + 2\n))\n}"
- }
- ],
- "hash": "ce09e20b956c2d34ace6c261623306829d6b0468cb797a6226dbff737185b7ec"
-}
\ No newline at end of file
diff --git a/test/output/block-expression.md.json b/test/output/block-expression.md.json
new file mode 100644
index 000000000..35921f8e8
--- /dev/null
+++ b/test/output/block-expression.md.json
@@ -0,0 +1,40 @@
+{
+ "data": null,
+ "title": null,
+ "style": null,
+ "code": [
+ {
+ "id": "6212702c",
+ "node": {
+ "body": {
+ "type": "BinaryExpression",
+ "start": 0,
+ "end": 5,
+ "left": {
+ "type": "Literal",
+ "start": 0,
+ "end": 1,
+ "value": 1,
+ "raw": "1"
+ },
+ "operator": "+",
+ "right": {
+ "type": "Literal",
+ "start": 4,
+ "end": 5,
+ "value": 2,
+ "raw": "2"
+ }
+ },
+ "declarations": null,
+ "references": [],
+ "files": [],
+ "imports": [],
+ "expression": true,
+ "async": false,
+ "inline": true,
+ "input": "1 + 2"
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/test/output/block-leading-comment.js.json b/test/output/block-leading-comment.js.json
new file mode 100644
index 000000000..382e88b6a
--- /dev/null
+++ b/test/output/block-leading-comment.js.json
@@ -0,0 +1,57 @@
+{
+ "body": {
+ "type": "Program",
+ "start": 0,
+ "end": 37,
+ "body": [
+ {
+ "type": "BlockStatement",
+ "start": 19,
+ "end": 37,
+ "body": [
+ {
+ "type": "ExpressionStatement",
+ "start": 23,
+ "end": 35,
+ "expression": {
+ "type": "CallExpression",
+ "start": 23,
+ "end": 34,
+ "callee": {
+ "type": "Identifier",
+ "start": 23,
+ "end": 30,
+ "name": "display"
+ },
+ "arguments": [
+ {
+ "type": "Literal",
+ "start": 31,
+ "end": 33,
+ "value": 42,
+ "raw": "42"
+ }
+ ],
+ "optional": false
+ }
+ }
+ ]
+ }
+ ],
+ "sourceType": "module"
+ },
+ "declarations": [],
+ "references": [
+ {
+ "type": "Identifier",
+ "start": 23,
+ "end": 30,
+ "name": "display"
+ }
+ ],
+ "files": [],
+ "imports": [],
+ "expression": false,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/block-trailing-comment.js.json b/test/output/block-trailing-comment.js.json
new file mode 100644
index 000000000..9b5a0162c
--- /dev/null
+++ b/test/output/block-trailing-comment.js.json
@@ -0,0 +1,57 @@
+{
+ "body": {
+ "type": "Program",
+ "start": 0,
+ "end": 37,
+ "body": [
+ {
+ "type": "BlockStatement",
+ "start": 0,
+ "end": 18,
+ "body": [
+ {
+ "type": "ExpressionStatement",
+ "start": 4,
+ "end": 16,
+ "expression": {
+ "type": "CallExpression",
+ "start": 4,
+ "end": 15,
+ "callee": {
+ "type": "Identifier",
+ "start": 4,
+ "end": 11,
+ "name": "display"
+ },
+ "arguments": [
+ {
+ "type": "Literal",
+ "start": 12,
+ "end": 14,
+ "value": 42,
+ "raw": "42"
+ }
+ ],
+ "optional": false
+ }
+ }
+ ]
+ }
+ ],
+ "sourceType": "module"
+ },
+ "declarations": [],
+ "references": [
+ {
+ "type": "Identifier",
+ "start": 4,
+ "end": 11,
+ "name": "display"
+ }
+ ],
+ "files": [],
+ "imports": [],
+ "expression": false,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/block.js.json b/test/output/block.js.json
new file mode 100644
index 000000000..da4b6a183
--- /dev/null
+++ b/test/output/block.js.json
@@ -0,0 +1,57 @@
+{
+ "body": {
+ "type": "Program",
+ "start": 0,
+ "end": 19,
+ "body": [
+ {
+ "type": "BlockStatement",
+ "start": 0,
+ "end": 18,
+ "body": [
+ {
+ "type": "ExpressionStatement",
+ "start": 4,
+ "end": 16,
+ "expression": {
+ "type": "CallExpression",
+ "start": 4,
+ "end": 15,
+ "callee": {
+ "type": "Identifier",
+ "start": 4,
+ "end": 11,
+ "name": "display"
+ },
+ "arguments": [
+ {
+ "type": "Literal",
+ "start": 12,
+ "end": 14,
+ "value": 42,
+ "raw": "42"
+ }
+ ],
+ "optional": false
+ }
+ }
+ ]
+ }
+ ],
+ "sourceType": "module"
+ },
+ "declarations": [],
+ "references": [
+ {
+ "type": "Identifier",
+ "start": 4,
+ "end": 11,
+ "name": "display"
+ }
+ ],
+ "files": [],
+ "imports": [],
+ "expression": false,
+ "async": false,
+ "inline": false
+}
\ No newline at end of file
diff --git a/test/output/build/404/404.html b/test/output/build/404/404.html
index b5557cf3e..bd86327f9 100644
--- a/test/output/build/404/404.html
+++ b/test/output/build/404/404.html
@@ -4,10 +4,10 @@
Page not found
-
-
+
+
@@ -24,7 +24,7 @@
import "./_observablehq/client.js";
-
+
diff --git a/test/output/build/archives/_file/dynamic-tar-gz/file.txt b/test/output/build/archives/_file/dynamic-tar-gz/file.c93138d8.txt
similarity index 100%
rename from test/output/build/archives/_file/dynamic-tar-gz/file.txt
rename to test/output/build/archives/_file/dynamic-tar-gz/file.c93138d8.txt
diff --git a/test/output/build/archives/_file/dynamic-tar/file.txt b/test/output/build/archives/_file/dynamic-tar/file.c93138d8.txt
similarity index 100%
rename from test/output/build/archives/_file/dynamic-tar/file.txt
rename to test/output/build/archives/_file/dynamic-tar/file.c93138d8.txt
diff --git a/test/output/build/archives/_file/dynamic/file.txt b/test/output/build/archives/_file/dynamic/file.c93138d8.txt
similarity index 100%
rename from test/output/build/archives/_file/dynamic/file.txt
rename to test/output/build/archives/_file/dynamic/file.c93138d8.txt
diff --git a/test/output/build/archives/_file/static-tar/file.txt b/test/output/build/archives/_file/static-tar/file.c93138d8.txt
similarity index 100%
rename from test/output/build/archives/_file/static-tar/file.txt
rename to test/output/build/archives/_file/static-tar/file.c93138d8.txt
diff --git a/test/output/build/archives/_file/static-tgz/file.txt b/test/output/build/archives/_file/static-tgz/file.c93138d8.txt
similarity index 100%
rename from test/output/build/archives/_file/static-tgz/file.txt
rename to test/output/build/archives/_file/static-tgz/file.c93138d8.txt
diff --git a/test/output/build/archives/_file/static/file.txt b/test/output/build/archives/_file/static/file.d9014c46.txt
similarity index 100%
rename from test/output/build/archives/_file/static/file.txt
rename to test/output/build/archives/_file/static/file.d9014c46.txt
diff --git a/test/output/build/archives/tar.html b/test/output/build/archives/tar.html
index b68d5b7b3..a5f89fe9d 100644
--- a/test/output/build/archives/tar.html
+++ b/test/output/build/archives/tar.html
@@ -3,54 +3,63 @@
Tar
-
-
+
+
-
+
diff --git a/test/output/build/archives/zip.html b/test/output/build/archives/zip.html
index 3c1b2c5a8..930e3bf35 100644
--- a/test/output/build/archives/zip.html
+++ b/test/output/build/archives/zip.html
@@ -3,36 +3,42 @@
Zip
-
-
+
+
-
+
diff --git a/test/output/build/config/closed/page.html b/test/output/build/config/closed/page.html
index a87ad4dff..6285fc4ca 100644
--- a/test/output/build/config/closed/page.html
+++ b/test/output/build/config/closed/page.html
@@ -3,10 +3,10 @@
A page…
-
-
+
+
@@ -35,7 +35,7 @@
-
+
diff --git a/test/output/build/config/index.html b/test/output/build/config/index.html
index 807ac2c3a..233fd59d4 100644
--- a/test/output/build/config/index.html
+++ b/test/output/build/config/index.html
@@ -3,10 +3,10 @@
Index
-
-
+
+
@@ -35,7 +35,7 @@
-
+
diff --git a/test/output/build/config/one.html b/test/output/build/config/one.html
index 8d590b4e0..5a27b6360 100644
--- a/test/output/build/config/one.html
+++ b/test/output/build/config/one.html
@@ -3,10 +3,10 @@
One
-
-
+
+
@@ -35,7 +35,7 @@
-
+
diff --git a/test/output/build/config/sub/two.html b/test/output/build/config/sub/two.html
index 01d7b1743..8f77e2a00 100644
--- a/test/output/build/config/sub/two.html
+++ b/test/output/build/config/sub/two.html
@@ -3,10 +3,10 @@
Two
-
-
+
+
@@ -35,7 +35,7 @@
-
+
diff --git a/test/output/build/config/toc-override.html b/test/output/build/config/toc-override.html
index 21750c52a..931f497a0 100644
--- a/test/output/build/config/toc-override.html
+++ b/test/output/build/config/toc-override.html
@@ -3,10 +3,10 @@
H1: Section
-
-
+
+
@@ -35,7 +35,7 @@
-
+
dollar£
diff --git a/test/output/build/config/toc.html b/test/output/build/config/toc.html
index 1391b494f..0778249b8 100644
--- a/test/output/build/config/toc.html
+++ b/test/output/build/config/toc.html
@@ -3,10 +3,10 @@
H1: Section
-
-
+
+
@@ -35,7 +35,7 @@
-
+
On this page
diff --git a/test/output/build/fetches/_file/foo/foo-data.csv b/test/output/build/fetches/_file/foo/foo-data.24ef4634.csv
similarity index 100%
rename from test/output/build/fetches/_file/foo/foo-data.csv
rename to test/output/build/fetches/_file/foo/foo-data.24ef4634.csv
diff --git a/test/output/build/fetches/_file/foo/foo-data.json b/test/output/build/fetches/_file/foo/foo-data.67358ed8.json
similarity index 100%
rename from test/output/build/fetches/_file/foo/foo-data.json
rename to test/output/build/fetches/_file/foo/foo-data.67358ed8.json
diff --git a/test/output/build/fetches/_file/top-data.csv b/test/output/build/fetches/_file/top-data.24ef4634.csv
similarity index 100%
rename from test/output/build/fetches/_file/top-data.csv
rename to test/output/build/fetches/_file/top-data.24ef4634.csv
diff --git a/test/output/build/fetches/_file/top-data.json b/test/output/build/fetches/_file/top-data.67358ed8.json
similarity index 100%
rename from test/output/build/fetches/_file/top-data.json
rename to test/output/build/fetches/_file/top-data.67358ed8.json
diff --git a/test/output/build/fetches/_import/foo/foo.js b/test/output/build/fetches/_import/foo/foo.6fd063d5.js
similarity index 100%
rename from test/output/build/fetches/_import/foo/foo.js
rename to test/output/build/fetches/_import/foo/foo.6fd063d5.js
diff --git a/test/output/build/fetches/_import/top.js b/test/output/build/fetches/_import/top.d8f5cc36.js
similarity index 66%
rename from test/output/build/fetches/_import/top.js
rename to test/output/build/fetches/_import/top.d8f5cc36.js
index 94d5dba46..a94611de3 100644
--- a/test/output/build/fetches/_import/top.js
+++ b/test/output/build/fetches/_import/top.d8f5cc36.js
@@ -1,4 +1,4 @@
import {FileAttachment} from "../_observablehq/stdlib.js";
-export {fooCsvData, fooJsonData} from "./foo/foo.js?sha=ddc538dfc10d83a59458d5893c89191ef3b2c9b1c02ef6da055423f37388ecf4";
+export {fooCsvData, fooJsonData} from "./foo/foo.6fd063d5.js";
export const topJsonData = await FileAttachment("../top-data.json", import.meta.url).json();
export const topCsvData = await FileAttachment("../top-data.csv", import.meta.url).text();
diff --git a/test/output/build/fetches/foo.html b/test/output/build/fetches/foo.html
index d7fea4b8d..380a14dcc 100644
--- a/test/output/build/fetches/foo.html
+++ b/test/output/build/fetches/foo.html
@@ -3,20 +3,24 @@
Top
-
-
+
-
+
+
-
+
diff --git a/test/output/build/fetches/top.html b/test/output/build/fetches/top.html
index 7b0baf937..762624aa0 100644
--- a/test/output/build/fetches/top.html
+++ b/test/output/build/fetches/top.html
@@ -3,21 +3,27 @@
Top
-
-
+
-
-
+
+
+
-
+
diff --git a/test/output/build/files/_file/custom-styles.css b/test/output/build/files/_file/custom-styles.b072c9c8.css
similarity index 100%
rename from test/output/build/files/_file/custom-styles.css
rename to test/output/build/files/_file/custom-styles.b072c9c8.css
diff --git a/test/output/build/files/_file/file-top.csv b/test/output/build/files/_file/file-top.01a7ce0a.csv
similarity index 100%
rename from test/output/build/files/_file/file-top.csv
rename to test/output/build/files/_file/file-top.01a7ce0a.csv
diff --git a/test/output/build/files/_file/observable logo small.png b/test/output/build/files/_file/observable logo small.8a915536.png
similarity index 100%
rename from test/output/build/files/_file/observable logo small.png
rename to test/output/build/files/_file/observable logo small.8a915536.png
diff --git a/test/output/build/files/_file/observable logo.png b/test/output/build/files/_file/observable logo.b620bd08.png
similarity index 100%
rename from test/output/build/files/_file/observable logo.png
rename to test/output/build/files/_file/observable logo.b620bd08.png
diff --git a/test/output/build/files/_file/subsection/additional-styles.css b/test/output/build/files/_file/subsection/additional-styles.3a854b3a.css
similarity index 100%
rename from test/output/build/files/_file/subsection/additional-styles.css
rename to test/output/build/files/_file/subsection/additional-styles.3a854b3a.css
diff --git a/test/output/build/files/_file/subsection/file-sub.csv b/test/output/build/files/_file/subsection/file-sub.72c2c61c.csv
similarity index 100%
rename from test/output/build/files/_file/subsection/file-sub.csv
rename to test/output/build/files/_file/subsection/file-sub.72c2c61c.csv
diff --git a/test/output/build/search-public/_observablehq/stdlib/dot.js b/test/output/build/files/_npm/d3-dsv@3.0.1/+esm.js
similarity index 100%
rename from test/output/build/search-public/_observablehq/stdlib/dot.js
rename to test/output/build/files/_npm/d3-dsv@3.0.1/+esm.js
diff --git a/test/output/build/files/files.html b/test/output/build/files/files.html
index fe62adf77..1e97b85d4 100644
--- a/test/output/build/files/files.html
+++ b/test/output/build/files/files.html
@@ -2,30 +2,36 @@
-
-
+
+
+
-
+
-
-
+
+
-
+
-
+