diff --git a/src/axes.js b/src/axes.js
index 21266f300d..5e390e5d9e 100644
--- a/src/axes.js
+++ b/src/axes.js
@@ -8,15 +8,15 @@ export function Axes(
let {axis: yAxis = true} = y;
let {axis: fxAxis = true} = fx;
let {axis: fyAxis = true} = fy;
- if (xAxis === true) xAxis = "bottom";
- if (yAxis === true) yAxis = "left";
- if (fxAxis === true) fxAxis = xAxis === "bottom" ? "top" : "bottom";
- if (fyAxis === true) fyAxis = yAxis === "left" ? "right" : "left";
+ if (!xScale) xAxis = null; else if (xAxis === true) xAxis = "bottom";
+ if (!yScale) yAxis = null; else if (yAxis === true) yAxis = "left";
+ if (!fxScale) fxAxis = null; else if (fxAxis === true) fxAxis = xAxis === "bottom" ? "top" : "bottom";
+ if (!fyScale) fyAxis = null; else if (fyAxis === true) fyAxis = yAxis === "left" ? "right" : "left";
return {
- ...xScale && xAxis && {x: new AxisX({grid, ...x, axis: xAxis})},
- ...yScale && yAxis && {y: new AxisY({grid, ...y, axis: yAxis})},
- ...fxScale && fxAxis && {fx: new AxisX({name: "fx", grid: facetGrid, ...fx, axis: fxAxis})},
- ...fyScale && fyAxis && {fy: new AxisY({name: "fy", grid: facetGrid, ...fy, axis: fyAxis})}
+ ...xAxis && {x: new AxisX({grid, ...x, axis: xAxis})},
+ ...yAxis && {y: new AxisY({grid, ...y, axis: yAxis})},
+ ...fxAxis && {fx: new AxisX({name: "fx", grid: facetGrid, ...fx, axis: fxAxis})},
+ ...fyAxis && {fy: new AxisY({name: "fy", grid: facetGrid, ...fy, axis: fyAxis})}
};
}
@@ -102,10 +102,11 @@ function inferLabel(channels = [], scale, axis, key) {
else if (candidate !== label) return;
}
if (candidate !== undefined) {
- const {invert} = scale;
+ const {percent, invert} = scale;
// Ignore the implicit label for temporal scales if it’s simply “date”.
if (scale.type === "temporal" && /^(date|time|year)$/i.test(candidate)) return;
if (scale.type !== "ordinal" && (key === "x" || key === "y")) {
+ if (percent) candidate = `${candidate} (%)`;
if (axis.labelAnchor === "center") {
candidate = `${candidate} →`;
} else if (key === "x") {
diff --git a/src/facet.js b/src/facet.js
index 91ab781d49..52f60163f7 100644
--- a/src/facet.js
+++ b/src/facet.js
@@ -60,7 +60,7 @@ class Facet extends Mark {
}
const named = Object.create(null);
for (const [name, channel] of channels) {
- if (name !== undefined) named[name] = channel.value;
+ if (name !== undefined) Object.defineProperty(named, name, {get: () => channel.value}); // scale transform
subchannels.push([undefined, channel]);
}
marksChannels.push(named);
diff --git a/src/index.js b/src/index.js
index 5f646f3628..0b7294f1f0 100644
--- a/src/index.js
+++ b/src/index.js
@@ -11,11 +11,10 @@ export {Rect, rect, rectX, rectY} from "./marks/rect.js";
export {RuleX, RuleY, ruleX, ruleY} from "./marks/rule.js";
export {Text, text, textX, textY} from "./marks/text.js";
export {TickX, TickY, tickX, tickY} from "./marks/tick.js";
-export {bin, binX, binY, binXMid, binYMid, binR} from "./transforms/bin.js";
-export {group, groupX, groupY, groupR, groupZ, groupZX, groupZY, groupZR} from "./transforms/group.js";
+export {bin, binMid, binX, binY, binXMid, binYMid} from "./transforms/bin.js";
+export {group, groupX, groupY, groupZ} from "./transforms/group.js";
export {normalizeX, normalizeY} from "./transforms/normalize.js";
export {map, mapX, mapY} from "./transforms/map.js";
-export {reduce, reduceX, reduceY} from "./transforms/reduce.js";
export {windowX, windowY} from "./transforms/window.js";
export {selectFirst, selectLast, selectMaxX, selectMaxY, selectMinX, selectMinY} from "./transforms/select.js";
export {stackX, stackX1, stackX2, stackXMid, stackY, stackY1, stackY2, stackYMid} from "./transforms/stack.js";
diff --git a/src/plot.js b/src/plot.js
index cc6f2bce26..e32214a123 100644
--- a/src/plot.js
+++ b/src/plot.js
@@ -33,10 +33,8 @@ export function plot(options = {}) {
const {scale} = channel;
if (scale !== undefined) {
const scaled = scaleChannels.get(scale);
- const {transform} = options[scale] || {};
- if (transform !== undefined) {
- channel.value = Array.from(channel.value, transform);
- }
+ const {percent, transform = percent ? x => x * 100 : undefined} = options[scale] || {};
+ if (transform !== undefined) channel.value = Array.from(channel.value, transform);
if (scaled) scaled.push(channel);
else scaleChannels.set(scale, [channel]);
}
diff --git a/src/scales/quantitative.js b/src/scales/quantitative.js
index 1bdde21862..d376c09229 100644
--- a/src/scales/quantitative.js
+++ b/src/scales/quantitative.js
@@ -137,6 +137,7 @@ export function ScaleQ(key, scale, channels, {
clamp,
zero,
domain = (registry.get(key) === radius ? inferRadialDomain : inferDomain)(channels),
+ percent,
round,
range = registry.get(key) === radius ? inferRadialRange(channels, domain) : undefined,
scheme,
@@ -167,7 +168,7 @@ export function ScaleQ(key, scale, channels, {
if (range !== undefined) scale.range(range);
if (clamp) scale.clamp(clamp);
- return {type: "quantitative", invert, domain, range, scale, inset};
+ return {type: "quantitative", invert, domain, range, scale, inset, percent};
}
export function ScaleLinear(key, channels, options) {
diff --git a/src/transforms/bin.js b/src/transforms/bin.js
index 0c1a11b070..bd4c00c770 100644
--- a/src/transforms/bin.js
+++ b/src/transforms/bin.js
@@ -1,233 +1,216 @@
-import {bin as binner, cross, group, sum} from "d3";
-import {firstof} from "../defined.js";
-import {valueof, first, second, range, identity, lazyChannel, maybeLazyChannel, maybeTransform, maybeColor, maybeValue, mid, take, labelof} from "../mark.js";
+import {bin as binner} from "d3";
+import {valueof, range, identity, maybeLazyChannel, maybeTransform, maybeTuple, maybeColor, maybeValue, mid, labelof} from "../mark.js";
import {offset} from "../style.js";
-import {groups} from "./group.js";
+import {maybeGroup, maybeOutputs, maybeReduce, maybeSubgroup, reduceIdentity} from "./group.js";
-// Group on y, z, fill, or stroke, if any, then bin on x.
-export function binX({x, y, out = y == null ? "y" : "fill", inset, insetLeft, insetRight, ...options} = {}) {
+// Group on {z, fill, stroke}, then optionally on y, then bin x.
+export function binX(outputs, {inset, insetLeft, insetRight, ...options} = {}) {
+ let {x, y} = options;
+ x = maybeBinValue(x, options, identity);
([insetLeft, insetRight] = maybeInset(inset, insetLeft, insetRight));
- const [transform, x1, x2, l] = bin1(x, "y", {y, ...options});
- return {x1, x2, ...transform, inset, insetLeft, insetRight, [out]: l};
+ return binn(x, null, null, y, outputs, {inset, insetLeft, insetRight, ...options});
}
-// Group on y, z, fill, or stroke, if any, then bin on x.
-export function binXMid({x, out = "r", ...options} = {}) {
- const [transform, x1, x2, l] = bin1(x, "y", options);
- return {x: mid(x1, x2), ...transform, [out]: l};
+// Group on {z, fill, stroke}, then optionally on y, then bin x.
+export function binXMid(outputs, options = {}) {
+ let {x, y} = options;
+ x = maybeBinValue(x, options, identity);
+ const {x1, x2, ...transform} = binn(x, null, null, y, outputs, options);
+ return {...transform, x: mid(x1, x2)};
}
-// Group on x, z, fill, or stroke, if any, then bin on y.
-export function binY({y, x, out = x == null ? "x" : "fill", inset, insetTop, insetBottom, ...options} = {}) {
+// Group on {z, fill, stroke}, then optionally on x, then bin y.
+export function binY(outputs, {inset, insetTop, insetBottom, ...options} = {}) {
+ let {x, y} = options;
+ y = maybeBinValue(y, options, identity);
([insetTop, insetBottom] = maybeInset(inset, insetTop, insetBottom));
- const [transform, y1, y2, l] = bin1(y, "x", {x, ...options});
- return {y1, y2, ...transform, inset, insetTop, insetBottom, [out]: l};
+ return binn(null, y, x, null, outputs, {inset, insetTop, insetBottom, ...options});
}
-// Group on y, z, fill, or stroke, if any, then bin on x.
-export function binYMid({y, out = "r", ...options} = {}) {
- const [transform, y1, y2, l] = bin1(y, "x", options);
- return {y: mid(y1, y2), ...transform, [out]: l};
+// Group on {z, fill, stroke}, then optionally on x, then bin y.
+export function binYMid(outputs, options = {}) {
+ let {x, y} = options;
+ y = maybeBinValue(y, options, identity);
+ const {y1, y2, ...transform} = binn(null, x, y, null, outputs, options);
+ return {...transform, y: mid(y1, y2)};
}
-// Group on z, fill, or stroke, if any, then bin on x and y.
-export function binR({x, y, ...options} = {}) {
- const [transform, x1, x2, y1, y2, r] = bin2(x, y, options);
- return {x: mid(x1, x2), y: mid(y1, y2), r, ...transform};
-}
-
-// Group on z, fill, or stroke, if any, then bin on x and y.
-export function bin({x, y, out = "fill", inset, insetTop, insetRight, insetBottom, insetLeft, ...options} = {}) {
+// Group on {z, fill, stroke}, then bin on x and y.
+export function bin(outputs, {inset, insetTop, insetRight, insetBottom, insetLeft, ...options} = {}) {
+ const {x, y} = maybeBinValueTuple(options);
([insetTop, insetBottom] = maybeInset(inset, insetTop, insetBottom));
([insetLeft, insetRight] = maybeInset(inset, insetLeft, insetRight));
- const [transform, x1, x2, y1, y2, l] = bin2(x, y, options);
- return {x1, x2, y1, y2, ...transform, inset, insetTop, insetRight, insetBottom, insetLeft, [out]: l};
+ return binn(x, y, null, null, outputs, {inset, insetTop, insetRight, insetBottom, insetLeft, ...options});
}
-function bin1(x, key, {[key]: k, z, fill, stroke, weight, domain, thresholds, normalize, cumulative, ...options} = {}) {
- const m = normalize === true || normalize === "z" ? 100 : +normalize;
- const bin = binof(identity, {value: x, domain, thresholds});
- const [X1, setX1] = lazyChannel(x);
- const [X2, setX2] = lazyChannel(x);
- const [L, setL] = lazyChannel(`${labelof(weight, "Frequency")}${m === 100 ? " (%)" : ""}`);
- const [vfill] = maybeColor(fill);
- const [vstroke] = maybeColor(stroke);
- const [BK, setBK] = maybeLazyChannel(k);
- const [BZ, setBZ] = maybeLazyChannel(z);
- const [BF = fill, setBF] = maybeLazyChannel(vfill);
- const [BS = stroke, setBS] = maybeLazyChannel(vstroke);
- return [
- {
- ...key && {[key]: BK},
- z: BZ,
- fill: BF,
- stroke: BS,
- ...options,
- transform: maybeTransform(options, (data, facets) => {
- const B = bin(data);
- const K = valueof(data, k);
- const Z = valueof(data, z);
- const F = valueof(data, vfill);
- const S = valueof(data, vstroke);
- const W = valueof(data, weight);
- const binFacets = [];
- const binData = [];
- const X1 = setX1([]);
- const X2 = setX2([]);
- const L = setL([]);
- const G = firstof(K, Z, F, S);
- const BK = K && setBK([]);
- const BZ = Z && setBZ([]);
- const BF = F && setBF([]);
- const BS = S && setBS([]);
- let n = W ? sum(W) : data.length;
- let i = 0;
- if (cumulative < 0) B.reverse();
- for (const facet of facets) {
- const binFacet = [];
- for (const I of G ? group(facet, i => G[i]).values() : [facet]) {
- if (normalize === "z") n = W ? sum(I, i => W[i]) : I.length;
- const set = new Set(I);
- let f;
- for (const b of B) {
- const s = b.filter(i => set.has(i));
- f = cumulative && f !== undefined ? f.concat(s) : s;
- const l = W ? sum(f, i => W[i]) : f.length;
- if (l > 0) {
- binFacet.push(i++);
- binData.push(take(data, f));
- X1.push(b.x0);
- X2.push(b.x1);
- L.push(m ? l * m / n : l);
- if (K) BK.push(K[f[0]]);
- if (Z) BZ.push(Z[f[0]]);
- if (F) BF.push(F[f[0]]);
- if (S) BS.push(S[f[0]]);
- }
- }
- }
- binFacets.push(binFacet);
- }
- return {data: binData, facets: binFacets};
- })
- },
- X1,
- X2,
- L
- ];
+// Group on {z, fill, stroke}, then bin on x and y.
+export function binMid(outputs, options) {
+ const {x, y} = maybeBinValueTuple(options);
+ const {x1, x2, y1, y2, ...transform} = binn(x, y, null, null, outputs, options);
+ return {...transform, x: mid(x1, x2), y: mid(y1, y2)};
}
-// Here x and y may each either be a standalone value (e.g., a string
-// representing a field name, a function, an array), or the value and some
-// additional per-dimension binning options as an objects of the form {value,
-// domain?, thresholds?}.
-function bin2(x, y, {weight, domain, thresholds, normalize, z, fill, stroke, ...options} = {}) {
- const m = normalize === true || normalize === "z" ? 100 : +normalize;
- const binX = binof(first, {domain, thresholds, ...maybeValue(x)});
- const binY = binof(second, {domain, thresholds, ...maybeValue(y)});
- const bin = data => cross(binX(data).filter(nonempty), binY(data).filter(nonempty).map(binset2), (x, y) => y(x));
- const [X1, setX1] = lazyChannel(x);
- const [X2, setX2] = lazyChannel(x);
- const [Y1, setY1] = lazyChannel(y);
- const [Y2, setY2] = lazyChannel(y);
- const [L, setL] = lazyChannel(`${labelof(weight, "Frequency")}${m === 100 ? " (%)" : ""}`);
+function binn(
+ bx, // optionally bin on x (exclusive with gx)
+ by, // optionally bin on y (exclusive with gy)
+ gx, // optionally group on x (exclusive with bx and gy)
+ gy, // optionally group on y (exclusive with by and gx)
+ {data: reduceData = reduceIdentity, ...outputs} = {}, // output channel definitions
+ inputs = {} // input channels and options
+) {
+ bx = maybeBin(bx);
+ by = maybeBin(by);
+ reduceData = maybeReduce(reduceData, identity);
+ outputs = maybeOutputs(outputs, inputs);
+
+ // Produce x1, x2, y1, and y2 output channels as appropriate (when binning).
+ const [BX1, setBX1] = maybeLazyChannel(bx);
+ const [BX2, setBX2] = maybeLazyChannel(bx);
+ const [BY1, setBY1] = maybeLazyChannel(by);
+ const [BY2, setBY2] = maybeLazyChannel(by);
+
+ // Produce x or y output channels as appropriate (when grouping).
+ const [k, gk] = gx != null ? [gx, "x"] : gy != null ? [gy, "y"] : [];
+ const [GK, setGK] = maybeLazyChannel(k);
+
+ // Greedily materialize the z, fill, and stroke channels (if channels and not
+ // constants) so that we can reference them for subdividing groups without
+ // computing them more than once.
+ const {x, y, z, fill, stroke, ...options} = inputs;
+ const [GZ, setGZ] = maybeLazyChannel(z);
const [vfill] = maybeColor(fill);
const [vstroke] = maybeColor(stroke);
- const [BZ, setBZ] = maybeLazyChannel(z);
- const [BF = fill, setBF] = maybeLazyChannel(vfill);
- const [BS = stroke, setBS] = maybeLazyChannel(vstroke);
- return [
- {
- z: BZ,
- fill: BF,
- stroke: BS,
- ...options,
- transform: maybeTransform(options, (data, facets) => {
- const B = bin(data);
- const Z = valueof(data, z);
- const F = valueof(data, vfill);
- const S = valueof(data, vstroke);
- const W = valueof(data, weight);
- const binFacets = [];
- const binData = [];
- const X1 = setX1([]);
- const X2 = setX2([]);
- const Y1 = setY1([]);
- const Y2 = setY2([]);
- const L = setL([]);
- const G = firstof(Z, F, S);
- const BZ = Z && setBZ([]);
- const BF = F && setBF([]);
- const BS = S && setBS([]);
- let n = W ? sum(W) : data.length;
- let i = 0;
- for (const facet of facets) {
- const binFacet = [];
- for (const [, I] of groups(facet, G)) {
- if (normalize === "z") n = W ? sum(I, i => W[i]) : I.length;
- const set = new Set(I);
- for (const b of B) {
- const f = b.filter(i => set.has(i));
- const l = W ? sum(f, i => W[i]) : f.length;
- if (l > 0) {
- binFacet.push(i++);
- binData.push(take(data, f));
- X1.push(b.x0);
- X2.push(b.x1);
- Y1.push(b.y0);
- Y2.push(b.y1);
- L.push(m ? l * m / n : l);
- if (Z) BZ.push(Z[f[0]]);
- if (F) BF.push(F[f[0]]);
- if (S) BS.push(S[f[0]]);
+ const [GF = fill, setGF] = maybeLazyChannel(vfill);
+ const [GS = stroke, setGS] = maybeLazyChannel(vstroke);
+
+ return {
+ z: GZ,
+ fill: GF,
+ stroke: GS,
+ ...options,
+ ...BX1 ? {x1: BX1, x2: BX2} : {x},
+ ...BY1 ? {y1: BY1, y2: BY2} : {y},
+ ...GK && {[gk]: GK},
+ ...Object.fromEntries(outputs.map(({name, output}) => [name, output])),
+ transform: maybeTransform(options, (data, facets) => {
+ const K = valueof(data, k);
+ const Z = valueof(data, z);
+ const F = valueof(data, vfill);
+ const S = valueof(data, vstroke);
+ const G = maybeSubgroup(outputs, Z, F, S);
+ const groupFacets = [];
+ const groupData = [];
+ const GK = K && setGK([]);
+ const GZ = Z && setGZ([]);
+ const GF = F && setGF([]);
+ const GS = S && setGS([]);
+ const BX = bx ? bx(data) : [[,, I => I]];
+ const BY = by ? by(data) : [[,, I => I]];
+ const BX1 = bx && setBX1([]);
+ const BX2 = bx && setBX2([]);
+ const BY1 = by && setBY1([]);
+ const BY2 = by && setBY2([]);
+ let i = 0;
+ for (const o of outputs) o.initialize(data);
+ for (const facet of facets) {
+ const groupFacet = [];
+ for (const o of outputs) o.scope("facet", facet);
+ for (const [, I] of maybeGroup(facet, G)) {
+ for (const [k, g] of maybeGroup(I, K)) {
+ for (const [x1, x2, fx] of BX) {
+ const bb = fx(g);
+ if (bb.length === 0) continue;
+ for (const [y1, y2, fy] of BY) {
+ const b = fy(bb);
+ if (b.length === 0) continue;
+ groupFacet.push(i++);
+ groupData.push(reduceData.reduce(b, data));
+ if (K) GK.push(k);
+ if (Z) GZ.push(Z[b[0]]);
+ if (F) GF.push(F[b[0]]);
+ if (S) GS.push(S[b[0]]);
+ if (BX1) BX1.push(x1), BX2.push(x2);
+ if (BY1) BY1.push(y1), BY2.push(y2);
+ for (const o of outputs) o.reduce(b);
}
}
}
- binFacets.push(binFacet);
}
- return {data: binData, facets: binFacets};
- })
- },
- X1,
- X2,
- Y1,
- Y2,
- L
- ];
+ groupFacets.push(groupFacet);
+ }
+ return {data: groupData, facets: groupFacets};
+ })
+ };
+}
+
+function maybeBinValue(value, {cumulative, domain, thresholds} = {}, defaultValue) {
+ value = {...maybeValue(value)};
+ if (value.domain === undefined) value.domain = domain;
+ if (value.cumulative === undefined) value.cumulative = cumulative;
+ if (value.thresholds === undefined) value.thresholds = thresholds;
+ if (value.value === undefined) value.value = defaultValue;
+ return value;
+}
+
+function maybeBinValueTuple(options = {}) {
+ let {x, y} = options;
+ x = maybeBinValue(x, options);
+ y = maybeBinValue(y, options);
+ ([x.value, y.value] = maybeTuple(x.value, y.value));
+ return {x, y};
}
-function binof(defaultValue, {value = defaultValue, domain, thresholds}) {
- return data => {
- const values = valueof(data, value);
- const bin = binner().value(i => values[i]);
+function maybeBin(options) {
+ if (options == null) return;
+ const {value, cumulative, domain, thresholds} = options;
+ const bin = data => {
+ const V = valueof(data, value);
+ const bin = binner().value(i => V[i]);
if (domain !== undefined) bin.domain(domain);
if (thresholds !== undefined) bin.thresholds(thresholds);
- return bin(range(data));
+ let bins = bin(range(data)).map(binset);
+ if (cumulative) bins = (cumulative < 0 ? bins.reverse() : bins).map(bincumset);
+ return bins.filter(nonempty2).map(binfilter);
};
+ bin.label = labelof(value);
+ return bin;
}
-function binset2(biny) {
- const y = new Set(biny);
- const {x0: y0, x1: y1} = biny;
- return binx => {
- const subbin = binx.filter(i => y.has(i));
- subbin.x0 = binx.x0;
- subbin.x1 = binx.x1;
- subbin.y0 = y0;
- subbin.y1 = y1;
- return subbin;
- };
+function binset(bin) {
+ return [bin, new Set(bin)];
}
-function nonempty({length}) {
- return length > 0;
+function bincumset([bin], j, bins) {
+ return [
+ bin,
+ {
+ get size() {
+ for (let k = 0; k <= j; ++k) {
+ if (bins[k][1].size) {
+ return 1; // a non-empty value
+ }
+ }
+ return 0;
+ },
+ has(i) {
+ for (let k = 0; k <= j; ++k) {
+ if (bins[k][1].has(i)) {
+ return true;
+ }
+ }
+ return false;
+ }
+ }
+ ];
}
-function length1({length}) {
- return length;
+function binfilter([{x0, x1}, set]) {
+ return [x0, x1, I => I.filter(set.has, set)]; // TODO optimize
}
-length1.label = "Frequency";
+function nonempty2([, {size}]) {
+ return size > 0;
+}
function maybeInset(inset, inset1, inset2) {
return inset === undefined && inset1 === undefined && inset2 === undefined
diff --git a/src/transforms/group.js b/src/transforms/group.js
index 5f89f41ff0..b715406bd3 100644
--- a/src/transforms/group.js
+++ b/src/transforms/group.js
@@ -1,138 +1,211 @@
-import {group as grouper, sort, sum, InternSet} from "d3";
-import {defined, firstof} from "../defined.js";
-import {valueof, maybeColor, maybeTransform, maybeValue, maybeLazyChannel, lazyChannel, first, identity, take, maybeTuple, labelof} from "../mark.js";
+import {group as grouper, sort, sum, deviation, min, max, mean, median, variance} from "d3";
+import {firstof} from "../defined.js";
+import {valueof, maybeColor, maybeInput, maybeTransform, maybeTuple, maybeLazyChannel, lazyChannel, first, identity, take, labelof, range} from "../mark.js";
// Group on {z, fill, stroke}.
-export function groupZ({out = "fill", ...options} = {}) {
- const [transform, L] = group2(null, null, options);
- return {...transform, [out]: L};
+export function groupZ(outputs, options) {
+ return groupn(null, null, outputs, options);
}
-export function groupZX(options) {
- return groupZ({...options, out: "x"});
+// Group on {z, fill, stroke}, then on x.
+export function groupX(outputs, options = {}) {
+ const {x = identity} = options;
+ if (x == null) throw new Error("missing channel: x");
+ return groupn(x, null, outputs, options);
}
-export function groupZY(options) {
- return groupZ({...options, out: "y"});
+// Group on {z, fill, stroke}, then on y.
+export function groupY(outputs, options = {}) {
+ const {y = identity} = options;
+ if (y == null) throw new Error("missing channel: y");
+ return groupn(null, y, outputs, options);
}
-export function groupZR(options) {
- return groupZ({...options, out: "r"});
-}
-
-// Group on {z, fill, stroke}, then on x (optionally).
-export function groupX({x = identity, out = "y", ...options} = {}) {
- const [transform, L, X] = group2(x, null, options);
- return {...transform, x: X, [out]: L};
-}
-
-// Group on {z, fill, stroke}, then on y (optionally).
-export function groupY({y = identity, out = "x", ...options} = {}) {
- const [transform, L,, Y] = group2(null, y, options);
- return {...transform, y: Y, [out]: L};
-}
-
-// Group on {z, fill, stroke}, then on x and y (optionally).
-export function group({x, y, out = "fill", ...options} = {}) {
+// Group on {z, fill, stroke}, then on x and y.
+export function group(outputs, options = {}) {
+ let {x, y} = options;
([x, y] = maybeTuple(x, y));
- const [transform, L, X, Y] = group2(x, y, options);
- return {...transform, x: X, y: Y, [out]: L};
+ if (x == null) throw new Error("missing channel: x");
+ if (y == null) throw new Error("missing channel: y");
+ return groupn(x, y, outputs, options);
}
-export function groupR(options) {
- return group({...options, out: "r"});
-}
+function groupn(
+ x, // optionally group on x
+ y, // optionally group on y
+ {data: reduceData = reduceIdentity, ...outputs} = {}, // output channel definitions
+ inputs = {} // input channels and options
+) {
+ reduceData = maybeReduce(reduceData, identity);
+ outputs = maybeOutputs(outputs, inputs);
-function group2(xv, yv, {z, fill, stroke, weight, domain, normalize, ...options} = {}) {
- let {value: x, domain: xdomain} = {domain, ...maybeValue(xv)};
- let {value: y, domain: ydomain} = {domain, ...maybeValue(yv)};
- ([x, y] = maybeTuple(x, y));
- const m = maybeNormalize(normalize);
- const [BL, setBL] = lazyChannel(`${labelof(weight, "Frequency")}${m === 100 ? " (%)" : ""}`);
- const [BX, setBX] = maybeLazyChannel(x);
- const [BY, setBY] = maybeLazyChannel(y);
- const [BZ, setBZ] = maybeLazyChannel(z);
+ // Produce x and y output channels as appropriate.
+ const [GX, setGX] = maybeLazyChannel(x);
+ const [GY, setGY] = maybeLazyChannel(y);
+
+ // Greedily materialize the z, fill, and stroke channels (if channels and not
+ // constants) so that we can reference them for subdividing groups without
+ // computing them more than once.
+ const {z, fill, stroke, ...options} = inputs;
+ const [GZ, setGZ] = maybeLazyChannel(z);
const [vfill] = maybeColor(fill);
const [vstroke] = maybeColor(stroke);
- const [BF = fill, setBF] = maybeLazyChannel(vfill);
- const [BS = stroke, setBS] = maybeLazyChannel(vstroke);
- const xdefined = BX && maybeDomain(xdomain);
- const ydefined = BY && maybeDomain(ydomain);
- return [
- {
- z: BZ,
- fill: BF,
- stroke: BS,
- ...options,
- transform: maybeTransform(options, (data, facets) => {
- const X = valueof(data, x);
- const Y = valueof(data, y);
- const Z = valueof(data, z);
- const F = valueof(data, vfill);
- const S = valueof(data, vstroke);
- const W = valueof(data, weight);
- const groupFacets = [];
- const groupData = [];
- const G = firstof(Z, F, S);
- const BL = setBL([]);
- const BX = X && setBX([]);
- const BY = Y && setBY([]);
- const BZ = Z && setBZ([]);
- const BF = F && setBF([]);
- const BS = S && setBS([]);
- let n = W ? sum(W) : data.length;
- let i = 0;
- for (const facet of facets) {
- const groupFacet = [];
- if (normalize === "facet") n = W ? sum(facet, i => W[i]) : facet.length;
- for (const [, I] of groups(facet, G, defined1)) {
- if (normalize === "z") n = W ? sum(I, i => W[i]) : I.length;
- for (const [y, fy] of groups(I, Y, ydefined)) {
- for (const [x, f] of groups(fy, X, xdefined)) {
- const l = W ? sum(f, i => W[i]) : f.length;
- groupFacet.push(i++);
- groupData.push(take(data, f));
- BL.push(m ? l * m / n : l);
- if (X) BX.push(x);
- if (Y) BY.push(y);
- if (Z) BZ.push(Z[f[0]]);
- if (F) BF.push(F[f[0]]);
- if (S) BS.push(S[f[0]]);
- }
+ const [GF = fill, setGF] = maybeLazyChannel(vfill);
+ const [GS = stroke, setGS] = maybeLazyChannel(vstroke);
+
+ return {
+ z: GZ,
+ fill: GF,
+ stroke: GS,
+ ...options,
+ ...GX && {x: GX},
+ ...GY && {y: GY},
+ ...Object.fromEntries(outputs.map(({name, output}) => [name, output])),
+ transform: maybeTransform(options, (data, facets) => {
+ const X = valueof(data, x);
+ const Y = valueof(data, y);
+ const Z = valueof(data, z);
+ const F = valueof(data, vfill);
+ const S = valueof(data, vstroke);
+ const G = maybeSubgroup(outputs, Z, F, S);
+ const groupFacets = [];
+ const groupData = [];
+ const GX = X && setGX([]);
+ const GY = Y && setGY([]);
+ const GZ = Z && setGZ([]);
+ const GF = F && setGF([]);
+ const GS = S && setGS([]);
+ let i = 0;
+ for (const o of outputs) o.initialize(data);
+ for (const facet of facets) {
+ const groupFacet = [];
+ for (const o of outputs) o.scope("facet", facet);
+ for (const [, I] of maybeGroup(facet, G)) {
+ for (const [y, gg] of maybeGroup(I, Y)) {
+ for (const [x, g] of maybeGroup(gg, X)) {
+ groupFacet.push(i++);
+ groupData.push(reduceData.reduce(g, data));
+ if (X) GX.push(x);
+ if (Y) GY.push(y);
+ if (Z) GZ.push(Z[g[0]]);
+ if (F) GF.push(F[g[0]]);
+ if (S) GS.push(S[g[0]]);
+ for (const o of outputs) o.reduce(g);
}
}
- groupFacets.push(groupFacet);
}
- return {data: groupData, facets: groupFacets};
- })
- },
- BL,
- BX,
- BY
- ];
+ groupFacets.push(groupFacet);
+ }
+ return {data: groupData, facets: groupFacets};
+ })
+ };
+}
+
+export function maybeOutputs(outputs, inputs) {
+ return Object.entries(outputs).map(([name, reduce]) => {
+ const value = maybeInput(name, inputs);
+ const reducer = maybeReduce(reduce, value);
+ const [output, setOutput] = lazyChannel(labelof(value, reducer.label));
+ let V, O, context;
+ return {
+ name,
+ output,
+ initialize(data) {
+ V = valueof(data, value);
+ O = setOutput([]);
+ if (reducer.scope === "data") {
+ context = reducer.reduce(range(data), V);
+ }
+ },
+ scope(scope, I) {
+ if (reducer.scope === scope) {
+ context = reducer.reduce(I, V);
+ }
+ },
+ reduce(I) {
+ O.push(reducer.reduce(I, V, context));
+ }
+ };
+ });
}
-function maybeDomain(domain) {
- if (domain === undefined) return defined1;
- if (domain === null) return () => false;
- domain = new InternSet(domain);
- return ([key]) => domain.has(key);
+export function maybeGroup(I, X) {
+ return X ? sort(grouper(I, i => X[i]), first) : [[, I]];
}
-function maybeNormalize(normalize) {
- if (!normalize) return;
- if (normalize === true) return 100;
- if (typeof normalize === "number") return normalize;
- switch ((normalize + "").toLowerCase()) {
- case "facet": case "z": return 100;
+export function maybeReduce(reduce, value) {
+ if (reduce && typeof reduce.reduce === "function") return reduce;
+ if (typeof reduce === "function") return reduceFunction(reduce);
+ switch ((reduce + "").toLowerCase()) {
+ case "first": return reduceFirst;
+ case "last": return reduceLast;
+ case "count": return reduceCount;
+ case "sum": return value == null ? reduceCount : reduceSum;
+ case "proportion": return reduceProportion(value, "data");
+ case "proportion-facet": return reduceProportion(value, "facet");
+ case "deviation": return reduceAccessor(deviation);
+ case "min": return reduceAccessor(min);
+ case "max": return reduceAccessor(max);
+ case "mean": return reduceAccessor(mean);
+ case "median": return reduceAccessor(median);
+ case "variance": return reduceAccessor(variance);
}
- throw new Error("invalid normalize");
+ throw new Error("invalid reduce");
}
-function defined1([key]) {
- return defined(key);
+export function maybeSubgroup(outputs, Z, F, S) {
+ return firstof(
+ outputs.some(o => o.name === "z") ? undefined : Z,
+ outputs.some(o => o.name === "fill") ? undefined : F,
+ outputs.some(o => o.name === "stroke") ? undefined : S
+ );
}
-export function groups(I, X, defined = defined1) {
- return X ? sort(grouper(I, i => X[i]), first).filter(defined) : [[, I]];
+function reduceFunction(f) {
+ return {
+ reduce(I, X) {
+ return f(take(X, I));
+ }
+ };
+}
+
+function reduceAccessor(f) {
+ return {
+ reduce(I, X) {
+ return f(I, i => X[i]);
+ }
+ };
+}
+
+export const reduceIdentity = {
+ reduce(I, X) {
+ return take(X, I);
+ }
+};
+
+const reduceFirst = {
+ reduce(I, X) {
+ return X[I[0]];
+ }
+};
+
+const reduceLast = {
+ reduce(I, X) {
+ return X[I[I.length - 1]];
+ }
+};
+
+const reduceCount = {
+ label: "Frequency",
+ reduce(I) {
+ return I.length;
+ }
+};
+
+const reduceSum = reduceAccessor(sum);
+
+function reduceProportion(value, scope) {
+ return value == null
+ ? {scope, label: "Frequency", reduce: (I, V, basis = 1) => I.length / basis}
+ : {scope, reduce: (I, V, basis = 1) => sum(I, i => V[i]) / basis};
}
diff --git a/src/transforms/reduce.js b/src/transforms/reduce.js
deleted file mode 100644
index 043e88ba2d..0000000000
--- a/src/transforms/reduce.js
+++ /dev/null
@@ -1,122 +0,0 @@
-import {deviation, group, min, max, mean, median, sum, variance} from "d3";
-import {firstof} from "../defined.js";
-import {lazyChannel, maybeColor, maybeLazyChannel, maybeInput, maybeTransform, take, valueof} from "../mark.js";
-
-// Group on y, z, fill, or stroke, if any, then reduce.
-export function reduceX(outputs, options) {
- return reducen("y", outputs, options);
-}
-
-// Group on x, z, fill, or stroke, if any, then reduce.
-export function reduceY(outputs, options) {
- return reducen("x", outputs, options);
-}
-
-// Group on z, fill, or stroke, if any, then reduce.
-export function reduce(outputs, options) {
- return reducen(undefined, outputs, options);
-}
-
-function reducen(
- key, // an optional additional group channel (x or y, typically)
- {data: reduceData = reduceIdentity, ...outputs} = {}, // channels to reduce
- options = {} // channels to group, and options
-) {
- const {[key]: k, z, fill, stroke, ...rest} = options;
- reduceData = maybeReduce(reduceData);
-
- // All channels that are candidates for grouping are aggregated by picking the
- // first value from the corresponding input value, even if they are not
- // actually used for grouping.
- const [zfill] = maybeColor(fill);
- const [zstroke] = maybeColor(stroke);
- const [RK, setRK] = maybeLazyChannel(k);
- const [RZ, setRZ] = maybeLazyChannel(z);
- const [RF = fill, setRF] = maybeLazyChannel(zfill);
- const [RS = stroke, setRS] = maybeLazyChannel(zstroke);
-
- // All output channels are aggregated by applying the corresponding specified
- // reducer on the associated input values for each group.
- const channels = Object.entries(outputs).map(([key, reduce]) => {
- const input = maybeInput(key, options);
- if (input == null) throw new Error(`missing channel: ${key}`);
- const [output, setOutput] = lazyChannel(input);
- return {key, input, output, setOutput, reduce: maybeReduce(reduce)};
- });
-
- return {
- ...key && {[key]: RK},
- z: RZ,
- fill: RF,
- stroke: RS,
- ...rest,
- ...Object.fromEntries(channels.map(({key, output}) => [key, output])),
- transform: maybeTransform(options, (data, facets) => {
- const outFacets = [];
- const outData = [];
- const X = channels.map(({input}) => valueof(data, input));
- const RX = channels.map(({setOutput}) => setOutput([]));
- const K = valueof(data, k);
- const Z = valueof(data, z);
- const F = valueof(data, zfill);
- const S = valueof(data, zstroke);
- const G = firstof(K, Z, F, S);
- const RK = K && setRK([]);
- const RZ = Z && setRZ([]);
- const RF = F && setRF([]);
- const RS = S && setRS([]);
- let i = 0;
- for (const facet of facets) {
- const outFacet = [];
- for (const I of G ? group(facet, i => G[i]).values() : [facet]) {
- outFacet.push(i++);
- outData.push(reduceData.reduce(I, data));
- channels.forEach(({reduce}, i) => RX[i].push(reduce.reduce(I, X[i])));
- if (K) RK.push(K[I[0]]);
- if (Z) RZ.push(Z[I[0]]);
- if (F) RF.push(F[I[0]]);
- if (S) RS.push(S[I[0]]);
- }
- outFacets.push(outFacet);
- }
- return {data: outData, facets: outFacets};
- })
- };
-}
-
-function maybeReduce(reduce) {
- if (reduce && typeof reduce.reduce === "function") return reduce;
- if (typeof reduce === "function") return reduceFunction(reduce);
- switch ((reduce + "").toLowerCase()) {
- case "deviation": return reduceFunction2(deviation);
- case "min": return reduceFunction2(min);
- case "max": return reduceFunction2(max);
- case "mean": return reduceFunction2(mean);
- case "median": return reduceFunction2(median);
- case "sum": return reduceFunction2(sum);
- case "variance": return reduceFunction2(variance);
- }
- throw new Error("invalid reduce");
-}
-
-function reduceFunction(f) {
- return {
- reduce(I, X) {
- return f(take(X, I));
- }
- };
-}
-
-function reduceFunction2(f) {
- return {
- reduce(I, X) {
- return f(I, i => X[i]);
- }
- };
-}
-
-const reduceIdentity = {
- reduce(I, X) {
- return take(X, I);
- }
-};
diff --git a/test/output/aaplVolume.svg b/test/output/aaplVolume.svg
index 577d34d89d..2c62b17b55 100644
--- a/test/output/aaplVolume.svg
+++ b/test/output/aaplVolume.svg
@@ -73,7 +73,7 @@
-
+
diff --git a/test/output/athletesHeightWeight.svg b/test/output/athletesHeightWeight.svg
new file mode 100644
index 0000000000..e555656714
--- /dev/null
+++ b/test/output/athletesHeightWeight.svg
@@ -0,0 +1,10974 @@
+
\ No newline at end of file
diff --git a/test/output/athletesHeightWeightBin.svg b/test/output/athletesHeightWeightBin.svg
new file mode 100644
index 0000000000..af6b696d38
--- /dev/null
+++ b/test/output/athletesHeightWeightBin.svg
@@ -0,0 +1,931 @@
+
\ No newline at end of file
diff --git a/test/output/athletesNationality.svg b/test/output/athletesNationality.svg
new file mode 100644
index 0000000000..a7ad7ae35e
--- /dev/null
+++ b/test/output/athletesNationality.svg
@@ -0,0 +1,112 @@
+
\ No newline at end of file
diff --git a/test/output/athletesSexWeight.svg b/test/output/athletesSexWeight.svg
new file mode 100644
index 0000000000..09b3d81c5a
--- /dev/null
+++ b/test/output/athletesSexWeight.svg
@@ -0,0 +1,127 @@
+
\ No newline at end of file
diff --git a/test/output/athletesSportWeight.svg b/test/output/athletesSportWeight.svg
new file mode 100644
index 0000000000..7cfc77ed86
--- /dev/null
+++ b/test/output/athletesSportWeight.svg
@@ -0,0 +1,1064 @@
+
\ No newline at end of file
diff --git a/test/output/athletesWeight.svg b/test/output/athletesWeight.svg
index 7a04a6d9b6..7d0e98057e 100644
--- a/test/output/athletesWeight.svg
+++ b/test/output/athletesWeight.svg
@@ -1,950 +1,92 @@
-
\ No newline at end of file
diff --git a/test/output/mobyDickLetterRelativeFrequency.svg b/test/output/mobyDickLetterRelativeFrequency.svg
new file mode 100644
index 0000000000..98e728ad76
--- /dev/null
+++ b/test/output/mobyDickLetterRelativeFrequency.svg
@@ -0,0 +1,167 @@
+
+
+
+
+ 0
+
+
+
+ 1
+
+
+
+ 2
+
+
+
+ 3
+
+
+
+ 4
+
+
+
+ 5
+
+
+
+ 6
+
+
+
+ 7
+
+
+
+ 8
+
+
+
+ 9
+
+
+
+ 10
+
+
+
+ 11
+
+
+
+ 12
+ ↑ Frequency (%)
+
+
+
+ A
+
+
+ B
+
+
+ C
+
+
+ D
+
+
+ E
+
+
+ F
+
+
+ G
+
+
+ H
+
+
+ I
+
+
+ J
+
+
+ K
+
+
+ L
+
+
+ M
+
+
+ N
+
+
+ O
+
+
+ P
+
+
+ Q
+
+
+ R
+
+
+ S
+
+
+ T
+
+
+ U
+
+
+ V
+
+
+ W
+
+
+ X
+
+
+ Y
+
+
+ Z
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/test/output/penguinSpeciesGroup.svg b/test/output/penguinSpeciesGroup.svg
index 3f1492bbb6..304c1ad761 100644
--- a/test/output/penguinSpeciesGroup.svg
+++ b/test/output/penguinSpeciesGroup.svg
@@ -1,45 +1,45 @@
- 0
+ 0.0
- 10
+ 0.1
- 20
+ 0.2
- 30
+ 0.3
- 40
+ 0.4
- 50
+ 0.5
- 60
+ 0.6
- 70
+ 0.7
- 80
+ 0.8
- 90
+ 0.9
- 100
- Frequency (%) →
+ 1.0
+ Frequency →
-
-
+
+
- AdelieChinstrapGentoo
+ AdelieChinstrapGentoo
diff --git a/test/output/penguinSpeciesIslandRelative.svg b/test/output/penguinSpeciesIslandRelative.svg
new file mode 100644
index 0000000000..3a468ed694
--- /dev/null
+++ b/test/output/penguinSpeciesIslandRelative.svg
@@ -0,0 +1,76 @@
+
+
+
+ 0
+
+
+ 10
+
+
+ 20
+
+
+ 30
+
+
+ 40
+
+
+ 50
+
+
+ 60
+
+
+ 70
+
+
+ 80
+
+
+ 90
+
+
+ 100
+ ↑ Frequency (%)
+
+
+
+ Adelie
+
+
+ Chinstrap
+
+
+ Gentoo
+ species
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/test/output/uniformRandomDifference.svg b/test/output/uniformRandomDifference.svg
index c5d402dda3..d91cc21ba6 100644
--- a/test/output/uniformRandomDifference.svg
+++ b/test/output/uniformRandomDifference.svg
@@ -83,20 +83,20 @@
-
+
-
-
+
+
-
+
-
+
diff --git a/test/output/wordLengthMobyDick.svg b/test/output/wordLengthMobyDick.svg
index 55db63244d..67cf1aca18 100644
--- a/test/output/wordLengthMobyDick.svg
+++ b/test/output/wordLengthMobyDick.svg
@@ -91,11 +91,11 @@
-
+
-
-
-
+
+
+
diff --git a/test/plots/aapl-volume.js b/test/plots/aapl-volume.js
index e3c48ef90f..94930d6ba3 100644
--- a/test/plots/aapl-volume.js
+++ b/test/plots/aapl-volume.js
@@ -9,10 +9,11 @@ export default async function() {
label: "Trade volume (log₁₀) →"
},
y: {
- grid: true
+ grid: true,
+ percent: true
},
marks: [
- Plot.rectY(data, Plot.binX({x: d => Math.log10(d.Volume), normalize: true})),
+ Plot.rectY(data, Plot.binX({y: "proportion"}, {x: d => Math.log10(d.Volume)})),
Plot.ruleY([0])
]
});
diff --git a/test/plots/athletes-height-weight-bin.js b/test/plots/athletes-height-weight-bin.js
new file mode 100644
index 0000000000..ef8e90ca1a
--- /dev/null
+++ b/test/plots/athletes-height-weight-bin.js
@@ -0,0 +1,20 @@
+import * as Plot from "@observablehq/plot";
+import * as d3 from "d3";
+
+export default async function() {
+ const athletes = await d3.csv("data/athletes.csv", d3.autoType);
+ return Plot.plot({
+ round: true,
+ grid: true,
+ height: 640,
+ y: {
+ ticks: 10
+ },
+ color: {
+ scheme: "YlGnBu"
+ },
+ marks: [
+ Plot.rect(athletes, Plot.bin({fill: "count"}, {x: "weight", y: "height", thresholds: 50}))
+ ]
+ });
+}
diff --git a/test/plots/athletes-height-weight.js b/test/plots/athletes-height-weight.js
new file mode 100644
index 0000000000..f9387dfe41
--- /dev/null
+++ b/test/plots/athletes-height-weight.js
@@ -0,0 +1,13 @@
+import * as Plot from "@observablehq/plot";
+import * as d3 from "d3";
+
+export default async function() {
+ const athletes = await d3.csv("data/athletes.csv", d3.autoType);
+ return Plot.plot({
+ grid: true,
+ height: 640,
+ marks: [
+ Plot.dot(athletes, {x: "weight", y: "height"})
+ ]
+ });
+}
diff --git a/test/plots/athletes-nationality.js b/test/plots/athletes-nationality.js
new file mode 100644
index 0000000000..d4d0fd6602
--- /dev/null
+++ b/test/plots/athletes-nationality.js
@@ -0,0 +1,19 @@
+import * as Plot from "@observablehq/plot";
+import * as d3 from "d3";
+
+export default async function() {
+ const athletes = await d3.csv("data/athletes.csv", d3.autoType);
+ const top = new Set(d3.groupSort(athletes, g => -g.length, d => d.nationality).slice(0, 20));
+ return Plot.plot({
+ x: {
+ grid: true
+ },
+ y: {
+ domain: top,
+ label: null
+ },
+ marks: [
+ Plot.barX(athletes, Plot.groupY({x: "count"}, {filter: d => top.has(d.nationality), y: "nationality"})) // TODO remove filter
+ ]
+ });
+}
diff --git a/test/plots/athletes-sex-weight.js b/test/plots/athletes-sex-weight.js
new file mode 100644
index 0000000000..64a0db316b
--- /dev/null
+++ b/test/plots/athletes-sex-weight.js
@@ -0,0 +1,15 @@
+import * as Plot from "@observablehq/plot";
+import * as d3 from "d3";
+
+export default async function() {
+ const athletes = await d3.csv("data/athletes.csv", d3.autoType);
+ return Plot.plot({
+ y: {
+ grid: true
+ },
+ marks: [
+ Plot.rectY(athletes, Plot.binX({y: "count"}, {x: "weight", fill: "sex", mixBlendMode: "multiply", thresholds: 30})),
+ Plot.ruleY([0])
+ ]
+ });
+}
diff --git a/test/plots/athletes-sport-weight.js b/test/plots/athletes-sport-weight.js
new file mode 100644
index 0000000000..00da2f9f20
--- /dev/null
+++ b/test/plots/athletes-sport-weight.js
@@ -0,0 +1,24 @@
+import * as Plot from "@observablehq/plot";
+import * as d3 from "d3";
+
+export default async function() {
+ const athletes = await d3.csv("data/athletes.csv", d3.autoType);
+ return Plot.plot({
+ height: 640,
+ x: {
+ grid: true
+ },
+ color: {
+ scheme: "YlGnBu",
+ zero: true
+ },
+ facet: {
+ data: athletes,
+ marginLeft: 100,
+ y: "sport"
+ },
+ marks: [
+ Plot.barX(athletes, Plot.binX({fill: "proportion-facet"}, {x: "weight", thresholds: 60}))
+ ]
+ });
+}
diff --git a/test/plots/athletes-weight-cumulative.js b/test/plots/athletes-weight-cumulative.js
new file mode 100644
index 0000000000..17612c9b7a
--- /dev/null
+++ b/test/plots/athletes-weight-cumulative.js
@@ -0,0 +1,11 @@
+import * as Plot from "@observablehq/plot";
+import * as d3 from "d3";
+
+export default async function() {
+ const athletes = await d3.csv("data/athletes.csv", d3.autoType);
+ return Plot.plot({
+ marks: [
+ Plot.rectY(athletes, Plot.binX({y: "count"}, {x: "weight", cumulative: true}))
+ ]
+ });
+}
diff --git a/test/plots/athletes-weight.js b/test/plots/athletes-weight.js
index 176c9b81c5..4af9cc255d 100644
--- a/test/plots/athletes-weight.js
+++ b/test/plots/athletes-weight.js
@@ -4,17 +4,8 @@ import * as d3 from "d3";
export default async function() {
const athletes = await d3.csv("data/athletes.csv", d3.autoType);
return Plot.plot({
- marginLeft: 100,
- height: 640,
- x: {
- grid: true
- },
- color: {
- scheme: "YlGnBu",
- zero: true
- },
marks: [
- Plot.barX(athletes, Plot.binX({x: "weight", y: "sport", thresholds: 60, normalize: "z", out: "fill"}))
+ Plot.rectY(athletes, Plot.binX({y: "count"}, {x: "weight"}))
]
});
}
diff --git a/test/plots/diamonds-carat-price-dots.js b/test/plots/diamonds-carat-price-dots.js
index 4571e52718..b7430ce3c5 100644
--- a/test/plots/diamonds-carat-price-dots.js
+++ b/test/plots/diamonds-carat-price-dots.js
@@ -17,7 +17,7 @@ export default async function() {
range: [0, 3]
},
marks: [
- Plot.dot(data, Plot.binR({x: "carat", y: "price", thresholds: 100}))
+ Plot.dot(data, Plot.binMid({r: "count"}, {x: "carat", y: "price", thresholds: 100}))
]
});
}
diff --git a/test/plots/diamonds-carat-price.js b/test/plots/diamonds-carat-price.js
index 6abba626a0..3a86705ab5 100644
--- a/test/plots/diamonds-carat-price.js
+++ b/test/plots/diamonds-carat-price.js
@@ -10,7 +10,7 @@ export default async function() {
type: "symlog"
},
marks: [
- Plot.rect(data, Plot.bin({x: "carat", y: "price", thresholds: 100}))
+ Plot.rect(data, Plot.bin({fill: "count"}, {x: "carat", y: "price", thresholds: 100}))
]
});
}
diff --git a/test/plots/fruit-sales.js b/test/plots/fruit-sales.js
index 56139f3efa..c26feed074 100644
--- a/test/plots/fruit-sales.js
+++ b/test/plots/fruit-sales.js
@@ -9,7 +9,7 @@ export default async function() {
label: null
},
marks: [
- Plot.barX(sales, Plot.groupY({y: "fruit", weight: "units"})),
+ Plot.barX(sales, Plot.groupY({x: "sum"}, {x: "units", y: "fruit"})),
Plot.ruleX([0])
]
});
diff --git a/test/plots/index.js b/test/plots/index.js
index 130cf515bc..53962fa05e 100644
--- a/test/plots/index.js
+++ b/test/plots/index.js
@@ -3,7 +3,13 @@ export {default as aaplChangeVolume} from "./aapl-change-volume.js";
export {default as aaplClose} from "./aapl-close.js";
export {default as aaplVolume} from "./aapl-volume.js";
export {default as anscombeQuartet} from "./anscombe-quartet.js";
+export {default as athletesHeightWeight} from "./athletes-height-weight.js";
+export {default as athletesHeightWeightBin} from "./athletes-height-weight-bin.js";
+export {default as athletesNationality} from "./athletes-nationality.js";
+export {default as athletesSexWeight} from "./athletes-sex-weight.js";
+export {default as athletesSportWeight} from "./athletes-sport-weight.js";
export {default as athletesWeight} from "./athletes-weight.js";
+export {default as athletesWeightCumulative} from "./athletes-weight-cumulative.js";
export {default as ballotStatusRace} from "./ballot-status-race.js";
export {default as beckerBarley} from "./becker-barley.js";
export {default as caltrain} from "./caltrain.js";
@@ -40,6 +46,7 @@ export {default as metroUnemploymentRidgeline} from "./metro-unemployment-ridgel
export {default as mobyDickFaceted} from "./moby-dick-faceted.js";
export {default as mobyDickLetterFrequency} from "./moby-dick-letter-frequency.js";
export {default as mobyDickLetterPosition} from "./moby-dick-letter-position.js";
+export {default as mobyDickLetterRelativeFrequency} from "./moby-dick-letter-relative-frequency.js";
export {default as morleyBoxplot} from "./morley-boxplot.js";
export {default as moviesProfitByGenre} from "./movies-profit-by-genre.js";
export {default as musicRevenue} from "./music-revenue.js";
@@ -53,6 +60,7 @@ export {default as penguinMassSpecies} from "./penguin-mass-species.js";
export {default as penguinSexMassCulmenSpecies} from "./penguin-sex-mass-culmen-species.js";
export {default as penguinSpeciesGroup} from "./penguin-species-group.js";
export {default as penguinSpeciesIsland} from "./penguin-species-island.js";
+export {default as penguinSpeciesIslandRelative} from "./penguin-species-island-relative.js";
export {default as policeDeaths} from "./police-deaths.js";
export {default as policeDeathsBar} from "./police-deaths-bar.js";
export {default as randomWalk} from "./random-walk.js";
diff --git a/test/plots/moby-dick-faceted.js b/test/plots/moby-dick-faceted.js
index 489790ff44..e405e8f654 100644
--- a/test/plots/moby-dick-faceted.js
+++ b/test/plots/moby-dick-faceted.js
@@ -17,7 +17,7 @@ export default async function() {
y: cases
},
marks: [
- Plot.barY(letters, Plot.groupX({x: uppers})),
+ Plot.barY(letters, Plot.groupX({y: "count"}, {x: uppers})),
Plot.ruleY([0])
]
});
diff --git a/test/plots/moby-dick-letter-frequency.js b/test/plots/moby-dick-letter-frequency.js
index a531036f2b..4228e01824 100644
--- a/test/plots/moby-dick-letter-frequency.js
+++ b/test/plots/moby-dick-letter-frequency.js
@@ -9,7 +9,7 @@ export default async function() {
grid: true
},
marks: [
- Plot.barY(letters, Plot.groupX()),
+ Plot.barY(letters, Plot.groupX({y: "count"})),
Plot.ruleY([0])
]
});
diff --git a/test/plots/moby-dick-letter-position.js b/test/plots/moby-dick-letter-position.js
index 4352b868d0..58829d26c9 100644
--- a/test/plots/moby-dick-letter-position.js
+++ b/test/plots/moby-dick-letter-position.js
@@ -30,7 +30,7 @@ export default async function() {
scheme: "blues"
},
marks: [
- Plot.cell(positions, Plot.group({insetTop: 1, insetLeft: 1}))
+ Plot.cell(positions, Plot.group({fill: "count"}, {inset: 0.5}))
]
});
}
diff --git a/test/plots/moby-dick-letter-relative-frequency.js b/test/plots/moby-dick-letter-relative-frequency.js
new file mode 100644
index 0000000000..72e3a3da88
--- /dev/null
+++ b/test/plots/moby-dick-letter-relative-frequency.js
@@ -0,0 +1,17 @@
+import * as Plot from "@observablehq/plot";
+import * as d3 from "d3";
+
+export default async function() {
+ const mobydick = await d3.text("data/moby-dick-chapter-1.txt");
+ const letters = [...mobydick].filter(c => /[a-z]/i.test(c)).map(c => c.toUpperCase());
+ return Plot.plot({
+ y: {
+ grid: true,
+ percent: true
+ },
+ marks: [
+ Plot.barY(letters, Plot.groupX({y: "proportion"})),
+ Plot.ruleY([0])
+ ]
+ });
+}
diff --git a/test/plots/morley-boxplot.js b/test/plots/morley-boxplot.js
index 9b2f2f5351..fdd5819dc4 100644
--- a/test/plots/morley-boxplot.js
+++ b/test/plots/morley-boxplot.js
@@ -24,9 +24,9 @@ function boxX(data, {
...options
} = {}) {
return [
- Plot.ruleY(data, Plot.reduceX({x1: iqr1, x2: iqr2}, {x, y, stroke, ...options})),
- Plot.barX(data, Plot.reduceX({x1: quartile1, x2: quartile3}, {x, y, fill, ...options})),
- Plot.tickX(data, Plot.reduceX({x: median}, {x, y, stroke, strokeWidth: 2, ...options})),
+ Plot.ruleY(data, Plot.groupY({x1: iqr1, x2: iqr2}, {x, y, stroke, ...options})),
+ Plot.barX(data, Plot.groupY({x1: quartile1, x2: quartile3}, {x, y, fill, ...options})),
+ Plot.tickX(data, Plot.groupY({x: median}, {x, y, stroke, strokeWidth: 2, ...options})),
Plot.dot(data, Plot.map({x: outliers}, {x, y, z: y, stroke, ...options}))
];
}
diff --git a/test/plots/penguin-mass-sex-species.js b/test/plots/penguin-mass-sex-species.js
index 8627b3c789..3e03390d4f 100644
--- a/test/plots/penguin-mass-sex-species.js
+++ b/test/plots/penguin-mass-sex-species.js
@@ -15,7 +15,7 @@ export default async function() {
marginRight: 70
},
marks: [
- Plot.rectY(data, Plot.binX({x: "body_mass_g"})),
+ Plot.rectY(data, Plot.binX({y: "count"}, {x: "body_mass_g"})),
Plot.ruleY([0])
]
});
diff --git a/test/plots/penguin-mass-sex.js b/test/plots/penguin-mass-sex.js
index ac225aa47b..01d8289990 100644
--- a/test/plots/penguin-mass-sex.js
+++ b/test/plots/penguin-mass-sex.js
@@ -14,7 +14,7 @@ export default async function() {
marginRight: 70
},
marks: [
- Plot.rectY(data, Plot.binX({x: "body_mass_g"})),
+ Plot.rectY(data, Plot.binX({y: "count"}, {x: "body_mass_g"})),
Plot.ruleY([0])
]
});
diff --git a/test/plots/penguin-mass-species.js b/test/plots/penguin-mass-species.js
index 8d822f8103..83fc13c6c4 100644
--- a/test/plots/penguin-mass-species.js
+++ b/test/plots/penguin-mass-species.js
@@ -12,7 +12,7 @@ export default async function() {
grid: true
},
marks: [
- Plot.rectY(data, Plot.stackY(Plot.binX({x: "body_mass_g", fill: "species"}))),
+ Plot.rectY(data, Plot.stackY(Plot.binX({y: "count"}, {x: "body_mass_g", fill: "species"}))),
Plot.ruleY([0])
]
});
diff --git a/test/plots/penguin-mass.js b/test/plots/penguin-mass.js
index 831d9749a7..68fdc36ce5 100644
--- a/test/plots/penguin-mass.js
+++ b/test/plots/penguin-mass.js
@@ -12,7 +12,7 @@ export default async function() {
grid: true
},
marks: [
- Plot.rectY(data, Plot.binX({x: "body_mass_g"})),
+ Plot.rectY(data, Plot.binX({y: "count"}, {x: "body_mass_g"})),
Plot.ruleY([0])
]
});
diff --git a/test/plots/penguin-sex-mass-culmen-species.js b/test/plots/penguin-sex-mass-culmen-species.js
index a68b984ef1..be87ebd188 100644
--- a/test/plots/penguin-sex-mass-culmen-species.js
+++ b/test/plots/penguin-sex-mass-culmen-species.js
@@ -20,7 +20,7 @@ export default async function() {
},
marks: [
Plot.frame(),
- Plot.dot(data, Plot.binR({
+ Plot.dot(data, Plot.binMid({r: "count"}, {
x: "body_mass_g",
y: "culmen_length_mm",
stroke: "species",
diff --git a/test/plots/penguin-species-group.js b/test/plots/penguin-species-group.js
index f915e9b7d9..846e92b9c1 100644
--- a/test/plots/penguin-species-group.js
+++ b/test/plots/penguin-species-group.js
@@ -5,9 +5,9 @@ export default async function() {
const penguins = await d3.csv("data/penguins.csv", d3.autoType);
return Plot.plot({
marks: [
- Plot.barX(penguins, Plot.stackX(Plot.groupZX({fill: "species", normalize: true}))),
- Plot.text(penguins, Plot.stackXMid(Plot.groupZX({z: "species", normalize: true, text: ([d]) => d.species}))),
- Plot.ruleX([0, 100])
+ Plot.barX(penguins, Plot.stackX(Plot.groupZ({x: "proportion"}, {fill: "species"}))),
+ Plot.text(penguins, Plot.stackXMid(Plot.groupZ({x: "proportion", text: "first"}, {z: "species", text: "species"}))),
+ Plot.ruleX([0, 1])
]
});
}
diff --git a/test/plots/penguin-species-island-relative.js b/test/plots/penguin-species-island-relative.js
new file mode 100644
index 0000000000..eaad4dd503
--- /dev/null
+++ b/test/plots/penguin-species-island-relative.js
@@ -0,0 +1,22 @@
+import * as Plot from "@observablehq/plot";
+import * as d3 from "d3";
+
+export default async function() {
+ const penguins = await d3.csv("data/penguins.csv", d3.autoType);
+ return Plot.plot({
+ y: {
+ percent: true
+ },
+ fx: {
+ tickSize: 6
+ },
+ facet: {
+ data: penguins,
+ x: "species"
+ },
+ marks: [
+ Plot.barY(penguins, Plot.stackY(Plot.groupZ({y: "proportion-facet"}, {fill: "island"}))),
+ Plot.ruleY([0])
+ ]
+ });
+}
diff --git a/test/plots/penguin-species-island.js b/test/plots/penguin-species-island.js
index d06d3bb097..6669a1e8cd 100644
--- a/test/plots/penguin-species-island.js
+++ b/test/plots/penguin-species-island.js
@@ -8,7 +8,7 @@ export default async function() {
grid: true
},
marks: [
- Plot.barY(data, Plot.stackY(Plot.groupX({x: "species", fill: "island"}))),
+ Plot.barY(data, Plot.stackY(Plot.groupX({y: "count"}, {x: "species", fill: "island"}))),
Plot.ruleY([0])
]
});
diff --git a/test/plots/seattle-temperature-cell.js b/test/plots/seattle-temperature-cell.js
index 43568ae5fd..4e0d00e98c 100644
--- a/test/plots/seattle-temperature-cell.js
+++ b/test/plots/seattle-temperature-cell.js
@@ -10,14 +10,12 @@ export default async function() {
tickFormat: i => "JFMAMJJASOND"[i]
},
marks: [
- Plot.cell(seattle, {
- ...Plot.group({
- x: d => d.date.getUTCDate(),
- y: d => d.date.getUTCMonth()
- }),
- fill: d => d3.max(d, d => d.temp_max),
+ Plot.cell(seattle, Plot.group({fill: "max"}, {
+ x: d => d.date.getUTCDate(),
+ y: d => d.date.getUTCMonth(),
+ fill: "temp_max",
inset: 0.5
- })
+ }))
]
});
}
diff --git a/test/plots/simpsons-ratings-dots.js b/test/plots/simpsons-ratings-dots.js
index 947f0505ef..a2eef52bb7 100644
--- a/test/plots/simpsons-ratings-dots.js
+++ b/test/plots/simpsons-ratings-dots.js
@@ -13,8 +13,8 @@ export default async function() {
label: "↑ IMDb rating"
},
marks: [
- Plot.ruleX(simpsons, Plot.reduceY({y1: "min", y2: "max"}, {x: "season", y: "imdb_rating"})),
- Plot.line(simpsons, Plot.reduceY({y: "median"}, {x: "season", y: "imdb_rating", stroke: "red"})),
+ Plot.ruleX(simpsons, Plot.groupX({y1: "min", y2: "max"}, {x: "season", y: "imdb_rating"})),
+ Plot.line(simpsons, Plot.groupX({y: "median"}, {x: "season", y: "imdb_rating", stroke: "red"})),
Plot.dot(simpsons, {x: "season", y: "imdb_rating"})
]
});
diff --git a/test/plots/uniform-random-difference.js b/test/plots/uniform-random-difference.js
index fb719734d9..242b9fe367 100644
--- a/test/plots/uniform-random-difference.js
+++ b/test/plots/uniform-random-difference.js
@@ -10,10 +10,11 @@ export default async function() {
labelAnchor: "center"
},
y: {
- grid: true
+ grid: true,
+ percent: true
},
marks: [
- Plot.rectY({length: 10000}, Plot.binX({x: () => random() - random(), normalize: true})),
+ Plot.rectY({length: 10000}, Plot.binX({y: "proportion"}, {x: () => random() - random()})),
Plot.ruleY([0])
]
});
diff --git a/test/plots/us-population-state-age-dots.js b/test/plots/us-population-state-age-dots.js
index ca61438c01..af3e894370 100644
--- a/test/plots/us-population-state-age-dots.js
+++ b/test/plots/us-population-state-age-dots.js
@@ -24,7 +24,7 @@ export default async function() {
},
marks: [
Plot.ruleX([0]),
- Plot.ruleY(stateage, Plot.reduceX({x1: "min", x2: "max"}, position)),
+ Plot.ruleY(stateage, Plot.groupY({x1: "min", x2: "max"}, position)),
Plot.dot(stateage, {...position, fill: "age"}),
Plot.text(stateage, Plot.selectMinX({...position, textAnchor: "end", dx: -6, text: "state"}))
]
diff --git a/test/plots/word-length-moby-dick.js b/test/plots/word-length-moby-dick.js
index 9eaf23e7ae..15319707cc 100644
--- a/test/plots/word-length-moby-dick.js
+++ b/test/plots/word-length-moby-dick.js
@@ -17,10 +17,11 @@ export default async function() {
labelAnchor: "right"
},
y: {
- grid: true
+ grid: true,
+ percent: true
},
marks: [
- Plot.barY(words, Plot.groupX({x: d => d.length, normalize: true}))
+ Plot.barY(words, Plot.groupX({y: "proportion"}, {x: "length"}))
]
});
}
diff --git a/test/transforms/reduce-test.js b/test/transforms/reduce-test.js
index e0caa798ea..a00d6eb36f 100644
--- a/test/transforms/reduce-test.js
+++ b/test/transforms/reduce-test.js
@@ -19,7 +19,7 @@ tape("function reducers reduce as expected", test => {
});
function testReducer(test, data, x, r) {
- const mark = Plot.dot(data, Plot.reduceX({x}, {x: d => d}));
+ const mark = Plot.dot(data, Plot.groupZ({x}, {x: d => d}));
const c = new Map(mark.initialize().channels);
test.deepEqual(c.get("x").value, [r]);
}