diff --git a/.github/workflows/make-and-test.yml b/.github/workflows/make-and-test.yml index a59eb608d2..a403d8a706 100644 --- a/.github/workflows/make-and-test.yml +++ b/.github/workflows/make-and-test.yml @@ -65,7 +65,8 @@ jobs: matrix: # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ node-version: - - "18.17.1" + - "18" + - "20" pg-version: - "13.12" - "16" @@ -93,5 +94,4 @@ jobs: - run: cd src && npm install -g pnpm - run: cd src && pnpm run make - - run: cd src && pnpm nats-server-daemon - run: cd src && pnpm run test diff --git a/src/compute/compute/lib/filesystem.ts b/src/compute/compute/lib/filesystem.ts index 53b37c71bc..e5a37c5524 100644 --- a/src/compute/compute/lib/filesystem.ts +++ b/src/compute/compute/lib/filesystem.ts @@ -22,7 +22,7 @@ import { apiCall } from "@cocalc/api-client"; import sendFiles from "./send-files"; import getFiles from "./get-files"; // ensure that the nats client is initialized so that syncfs can connect to nats properly. -import "@cocalc/project/nats"; +import "@cocalc/project/conat"; const logger = getLogger("compute:filesystem"); diff --git a/src/compute/compute/lib/manager.ts b/src/compute/compute/lib/manager.ts index b34680032e..3a6ff4ad45 100644 --- a/src/compute/compute/lib/manager.ts +++ b/src/compute/compute/lib/manager.ts @@ -7,7 +7,7 @@ The manager does the following: */ import debug from "debug"; -import startProjectServers from "@cocalc/project/nats"; +import startProjectServers from "@cocalc/project/conat"; import { pingProjectUntilSuccess, waitUntilFilesystemIsOfType } from "./util"; import { apiCall, project } from "@cocalc/api-client"; diff --git a/src/compute/compute/package.json b/src/compute/compute/package.json index 3cfb89b62d..6239329d12 100644 --- a/src/compute/compute/package.json +++ b/src/compute/compute/package.json @@ -32,7 +32,7 @@ "@cocalc/backend": "workspace:*", "@cocalc/compute": "link:", "@cocalc/jupyter": "workspace:*", - "@cocalc/nats": "workspace:*", + "@cocalc/conat": "workspace:*", "@cocalc/project": "workspace:*", "@cocalc/sync": "workspace:*", "@cocalc/sync-client": "workspace:*", diff --git a/src/compute/conat b/src/compute/conat new file mode 120000 index 0000000000..877aba1148 --- /dev/null +++ b/src/compute/conat @@ -0,0 +1 @@ +../packages/conat \ No newline at end of file diff --git a/src/compute/nats b/src/compute/nats deleted file mode 120000 index 702beda7a3..0000000000 --- a/src/compute/nats +++ /dev/null @@ -1 +0,0 @@ -../packages/nats \ No newline at end of file diff --git a/src/compute/pnpm-lock.yaml b/src/compute/pnpm-lock.yaml index 66e2285c09..a751af3e1d 100644 --- a/src/compute/pnpm-lock.yaml +++ b/src/compute/pnpm-lock.yaml @@ -17,12 +17,12 @@ importers: '@cocalc/compute': specifier: 'link:' version: 'link:' + '@cocalc/conat': + specifier: workspace:* + version: link:../conat '@cocalc/jupyter': specifier: workspace:* version: link:../jupyter - '@cocalc/nats': - specifier: workspace:* - version: link:../nats '@cocalc/project': specifier: workspace:* version: link:../project diff --git a/src/compute/pnpm-workspace.yaml b/src/compute/pnpm-workspace.yaml index 62eea2e0eb..cf9179ec47 100644 --- a/src/compute/pnpm-workspace.yaml +++ b/src/compute/pnpm-workspace.yaml @@ -3,7 +3,7 @@ packages: - backend - comm - jupyter - - nats + - conat - project - sync - sync-fs diff --git a/src/package.json b/src/package.json index 1454fef3fe..55f21a1ed7 100644 --- a/src/package.json +++ b/src/package.json @@ -19,15 +19,8 @@ "test-parallel": "unset DEBUG && pnpm run version-check && cd packages && pnpm run -r --parallel test", "test": "unset DEBUG && pnpm run version-check && cd packages && pnpm run -r test", "prettier-all": "cd packages/", - "nats-server": "cd ${COCALC_ROOT:=$INIT_CWD}/packages/backend && node -e \"require('@cocalc/backend/nats/install').main()\" && node -e \"require('@cocalc/backend/nats/conf').main()\" && node -e \"require('@cocalc/backend/nats/server').main()\"", - "build-nats": "cd ${COCALC_ROOT:=$INIT_CWD}/packages/util && pnpm install && pnpm build && cd ${COCALC_ROOT:=$INIT_CWD}/packages/nats && pnpm install && pnpm build && cd ${COCALC_ROOT:=$INIT_CWD}/packages/backend && pnpm install && pnpm build", - "nats-server-ci": "pnpm run build-nats && cd ${COCALC_ROOT:=$INIT_CWD}/packages/backend && node -e \"require('@cocalc/backend/nats/install').main()\" && node -e \"require('@cocalc/backend/nats/conf').main()\" && node -e \"require('@cocalc/backend/nats/server').main()\"", - "nats-server-daemon": "cd ${COCALC_ROOT:=$INIT_CWD}/packages/backend && node -e \"require('@cocalc/backend/nats/install').main()\" && node -e \"require('@cocalc/backend/nats/conf').main()\" && node -e \"require('@cocalc/backend/nats/server').main({daemon:true})\"", - "nats-server-verbose": "cd ${COCALC_ROOT:=$INIT_CWD}/packages/backend && node -e \"require('@cocalc/backend/nats/install').main()\" && node -e \"require('@cocalc/backend/nats/conf').main()\" && node -e \"require('@cocalc/backend/nats/server').main({verbose:true})\"", - "nats-cli": "cd ${COCALC_ROOT:=$INIT_CWD}/packages/backend && node -e \"require('@cocalc/backend/nats/cli').main()\"", - "nats-sys": "cd ${COCALC_ROOT:=$INIT_CWD}/packages/backend && node -e \"require('@cocalc/backend/nats/cli').main({user:'sys'})\"", - "nats-tiered-storage": "cd ${COCALC_ROOT:=$INIT_CWD}/packages/server && DEBUG=cocalc:* DEBUG_CONSOLE=yes node -e \"require('@cocalc/server/nats/tiered-storage').init()\"", - "local-ci": "./scripts/ci.sh" + "local-ci": "./scripts/ci.sh", + "conat-server": "cd packages/server && pnpm conat-server" }, "repository": { "type": "git", diff --git a/src/packages/backend/bin/conat-watch.cjs b/src/packages/backend/bin/conat-watch.cjs new file mode 100755 index 0000000000..b3c8f168bb --- /dev/null +++ b/src/packages/backend/bin/conat-watch.cjs @@ -0,0 +1,10 @@ +const { getEnv } = require('@cocalc/backend/conat') + +async function main() { + const subject = process.argv[2] ?? '>'; + console.log("watching ", {subject}) + const {cn} = await getEnv() + cn.watch(subject) +} + +main(); diff --git a/src/packages/backend/conat/conat.ts b/src/packages/backend/conat/conat.ts new file mode 100644 index 0000000000..3a1afe73d4 --- /dev/null +++ b/src/packages/backend/conat/conat.ts @@ -0,0 +1,15 @@ +import { conatPath, conatServer } from "@cocalc/backend/data"; +import { + connect as connect0, + Client, + type ConnectOptions, +} from "@cocalc/conat/core/client"; + +export type { Client }; + +export function connect(address?, options?: ConnectOptions): Client { + return connect0(address ? address : conatServer, { + path: conatPath, + ...options, + }); +} diff --git a/src/packages/backend/conat/env.ts b/src/packages/backend/conat/env.ts new file mode 100644 index 0000000000..27c1b1a74a --- /dev/null +++ b/src/packages/backend/conat/env.ts @@ -0,0 +1,9 @@ +import { sha1 } from "@cocalc/backend/sha1"; +import { connect as getConatClient } from "./conat"; + +export async function getEnv(options?) { + const jc = null as any; + const nc = null as any; + const cn = getConatClient(options); + return { nc, jc, cn, sha1 }; +} diff --git a/src/packages/backend/nats/index.ts b/src/packages/backend/conat/index.ts similarity index 82% rename from src/packages/backend/nats/index.ts rename to src/packages/backend/conat/index.ts index cb2d80ea41..c9a972d158 100644 --- a/src/packages/backend/nats/index.ts +++ b/src/packages/backend/conat/index.ts @@ -9,17 +9,17 @@ import { readFile } from "node:fs/promises"; import getLogger from "@cocalc/backend/logger"; import { getEnv } from "./env"; export { getEnv }; -import { inboxPrefix } from "@cocalc/nats/names"; -import { setNatsClient } from "@cocalc/nats/client"; +import { inboxPrefix } from "@cocalc/conat/names"; +import { setConatClient } from "@cocalc/conat/client"; import getConnection, { setConnectionOptions, -} from "@cocalc/backend/nats/persistent-connection"; +} from "@cocalc/backend/conat/persistent-connection"; import { hostname } from "os"; export { getConnection }; export function init() { - setNatsClient({ getNatsEnv: getEnv, getLogger }); + setConatClient({ getNatsEnv: getEnv, getLogger }); } init(); diff --git a/src/packages/backend/conat/persist.ts b/src/packages/backend/conat/persist.ts new file mode 100644 index 0000000000..dc435c828a --- /dev/null +++ b/src/packages/backend/conat/persist.ts @@ -0,0 +1,21 @@ +import "./index"; +import betterSqlite3 from "better-sqlite3"; +import { initContext } from "@cocalc/conat/persist/context"; +import { compress, decompress } from "zstd-napi"; +import { syncFiles } from "@cocalc/backend/data"; +import ensureContainingDirectoryExists from "@cocalc/backend/misc/ensure-containing-directory-exists"; + +initContext({ + betterSqlite3, + compress, + decompress, + syncFiles, + ensureContainingDirectoryExists, +}); + +export { pstream } from "@cocalc/conat/persist/storage"; +export { + init as initServer, + terminate as terminateServer, +} from "@cocalc/conat/persist/server"; +export { getAll, set, get } from "@cocalc/conat/persist/client"; diff --git a/src/packages/backend/conat/persistent-connection.ts b/src/packages/backend/conat/persistent-connection.ts new file mode 100644 index 0000000000..56f3c13669 --- /dev/null +++ b/src/packages/backend/conat/persistent-connection.ts @@ -0,0 +1,42 @@ +/* +Create a nats connection that doesn't break. + +The NATS docs + +https://github.com/nats-io/nats.js/blob/main/core/README.md#connecting-to-a-nats-server + +ensure us that "the client will always attempt to reconnect if the connection is +disrupted for a reason other than calling close()" but THAT IS NOT TRUE. +(I think the upstream code in disconnected in nats.js/core/src/protocol.ts is a lazy +and I disagree with it. It tries to connect but if anything goes slightly wrong, +just gives up forever.) + +There are definitely situations where the connection gets permanently closed +and the close() function was not called, at least not by any of our code. +I've given up on getting them to fix or understand their bugs in general: + +https://github.com/williamstein/nats-bugs/issues/8 + +We thus monitor the connection, and if it closed, we *swap out the protocol +object*, which is an evil hack to reconnect. This seems to work fine with all +our other code. + +All that said, it's excellent that the NATS library separates the protocol from +the connection object itself, so it's possible to do this at all! :-) +*/ + +import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; +import type { NatsConnection } from "@cocalc/conat/types"; + +export function setConnectionOptions(_: any) {} + +// gets the singleton connection +const getConnection = reuseInFlight(async (): Promise => { + return null as any; +}); + +export default getConnection; + +export async function getNewConnection(): Promise { + return null as any; +} diff --git a/src/packages/backend/conat/sync.ts b/src/packages/backend/conat/sync.ts new file mode 100644 index 0000000000..8d5cd7d2b2 --- /dev/null +++ b/src/packages/backend/conat/sync.ts @@ -0,0 +1,32 @@ +import { + dstream as createDstream, + type DStream, +} from "@cocalc/conat/sync/dstream"; +import { dkv as createDKV, type DKV } from "@cocalc/conat/sync/dkv"; +import { dko as createDKO, type DKO } from "@cocalc/conat/sync/dko"; +import { akv as createAKV, type AKV } from "@cocalc/conat/sync/akv"; +import { createOpenFiles, type OpenFiles } from "@cocalc/conat/sync/open-files"; +export { inventory } from "@cocalc/conat/sync/inventory"; +import "./index"; + +export type { DStream, DKV, DKO, AKV }; + +export async function dstream(opts): Promise> { + return await createDstream(opts); +} + +export async function dkv(opts): Promise> { + return await createDKV(opts); +} + +export function akv(opts): AKV { + return createAKV(opts); +} + +export async function dko(opts): Promise> { + return await createDKO(opts); +} + +export async function openFiles(project_id: string, opts?): Promise { + return await createOpenFiles({ project_id, ...opts }); +} diff --git a/src/packages/backend/conat/test/core/basic.test.ts b/src/packages/backend/conat/test/core/basic.test.ts new file mode 100644 index 0000000000..d0305df815 --- /dev/null +++ b/src/packages/backend/conat/test/core/basic.test.ts @@ -0,0 +1,260 @@ +/* +Very basic test of conats core client and server. +*/ + +import { connect, before, after } from "@cocalc/backend/conat/test/setup"; +import { wait } from "@cocalc/backend/conat/test/util"; + +beforeAll(before); + +describe("connect to the server from a client", () => { + it("creates a client and confirm it connects", async () => { + const cn = connect(); + expect(cn.conn.connected).toBe(false); + await cn.waitUntilConnected(); + expect(cn.conn.connected).toBe(true); + cn.close(); + expect(cn.conn.connected).toBe(false); + }); + + it("creates a client and waits for the info field to get set", async () => { + const cn = connect(); + await wait({ until: () => cn.info != null }); + expect(cn.info?.max_payload).toBeGreaterThan(10000); + }); +}); + +describe("basic test of publish and subscribe", () => { + let sub; + + let subject = "conat"; + let cn, cn2; + it("creates a subscription to 'conat'", async () => { + cn = connect(); + sub = await cn.subscribe(subject); + }); + + it("publishes to 'conat' and verifies that the subscription receives the message", async () => { + const data = "cocalc"; + await cn.publish(subject, data); + const { value, done } = await sub.next(); + expect(value.data).toEqual(data); + expect(done).toBe(false); + }); + + it("publishes using a second client", async () => { + const data = null; + cn2 = connect(); + expect(cn === cn2).toEqual(false); + await cn2.publish(subject, data); + const { value } = await sub.next(); + expect(value.data).toEqual(data); + }); + + const count = 15; + + it(`publish ${count} messages and confirm receipt via sub.next`, async () => { + for (let i = 0; i < count; i++) { + cn.publish(subject, i); + } + for (let i = 0; i < count; i++) { + const { value } = await sub.next(); + expect(value.data).toBe(i); + } + }); + + it(`publish ${count} messages and confirm receipt via async iteration`, async () => { + const w: number[] = []; + for (let i = 0; i < count; i++) { + cn.publish(subject, i); + w.push(i); + } + const v: number[] = []; + for await (const x of sub) { + v.push(x.data); + if (v.length == w.length) { + break; + } + } + expect(w).toEqual(v); + }); + + it("confirm existing the async iterator above ended the subscription", async () => { + // this is how async iterators work... + const { done } = await sub.next(); + expect(done).toBe(true); + }); + + it("make a new subscription, then stop subscription and confirm it ends", async () => { + const sub2 = await cn.subscribe(subject); + sub2.stop(); + const { value, done } = await sub.next(); + expect(value).toBe(undefined); + expect(done).toBe(true); + }); + + // I'm unsure whether or not this is a good constraint. It does make code simpler, + // and massively protects against leaks. + it("verify that you can't subscribe twice to the same subject with a single client", async () => { + const sub1 = await cn.subscribe(subject); + await expect(async () => { + await cn.subscribe(subject); + }).rejects.toThrowError("already subscribed"); + + sub1.stop(); + // now this works + const sub2 = await cn.subscribe(subject); + sub2.stop(); + }); + + const subject2 = "foo.*.bar.>"; + it(`tests using the subject '${subject2}' with a wildcard and >`, async () => { + const sub = await cn.subscribe(subject2); + // this is ignored + cn.publish("foo.x", "abc"); + // this is received + cn.publish("foo.a.bar.b", "xxx", { headers: { a: "b" } }); + const { value: mesg } = await sub.next(); + expect(mesg.data).toBe("xxx"); + expect(mesg.headers).toEqual({ a: "b" }); + expect(mesg.subject).toBe("foo.a.bar.b"); + }); + + it("queue groups -- same queue groups, so exactly one gets the message", async () => { + const sub1 = await cn.subscribe("pub", { queue: "1" }); + const sub2 = await cn2.subscribe("pub", { queue: "1" }); + const { count } = await cn.publish("pub", "hello"); + expect(count).toBe(1); + let count1 = 0; + let count2 = 0; + (async () => { + await sub1.next(); + count1 += 1; + })(); + (async () => { + await sub2.next(); + count2 += 1; + })(); + await wait({ until: () => count1 + count2 > 0 }); + expect(count1 + count2).toBe(1); + sub1.stop(); + sub2.stop(); + }); + + it("queue groups -- distinct queue groups ALL get the message", async () => { + const sub1 = await cn.subscribe("pub3", { queue: "1" }); + const sub2 = await cn2.subscribe("pub3", { queue: "2" }); + const { count } = await cn.publish("pub3", "hello"); + expect(count).toBe(2); + const { value: mesg1 } = await sub1.next(); + const { value: mesg2 } = await sub2.next(); + expect(mesg1.data).toBe("hello"); + expect(mesg2.data).toBe("hello"); + }); +}); + +describe("basic tests of request/respond", () => { + let c1, c2; + + it("create two clients", () => { + c1 = connect(); + c2 = connect(); + }); + + let sub; + it("make one client be an eval server", async () => { + sub = await c2.subscribe("eval"); + (async () => { + for await (const mesg of sub) { + mesg.respond(eval(mesg.data)); + } + })(); + }); + + it("send a request and gets a response", async () => { + const resp = await c1.request("eval", "1+2+3+4+5+6+7+8+9+10"); + expect(resp.data).toBe(55); + }); + + it("'server' can also send a request and gets a response", async () => { + const resp = await c2.request("eval", "1+2+3+4+5"); + expect(resp.data).toBe(15); + }); + + it("send a request to a server that doesn't exist and get 503 error", async () => { + try { + await c2.request("does-not-exist", "1+2+3+4+5"); + } catch (err) { + expect(err.code == 503); + } + }); + + it("stop our server above (close subscription) and confirm get 503 error", async () => { + sub.close(); + try { + await c1.request("eval", "1+2+3+4+5"); + } catch (err) { + expect(err.code == 503); + } + }); + + let callIter; + it("create a requestMany server that iterates over what you send it", async () => { + // This example illustrates how to define a requestMany server + // and includes error handling. Note the technique of using + // the *headers* for control signalling (e.g., when we're done, or if + // there is an error) and using the message payload for the actual data. + // In Conat headers are very well supported, encouraged, and easy to use + // (and arbitrary JSON), unlike NATS.js. + + sub = await c2.subscribe("iter"); + (async () => { + for await (const mesg of sub) { + try { + for (const x of mesg.data) { + mesg.respond(x, { headers: { done: false } }); + } + mesg.respond(null, { headers: { done: true } }); + } catch (err) { + mesg.respond(null, { headers: { done: true, error: `${err}` } }); + return; + } + } + })(); + + // also function to do request + callIter = async (client, x) => { + const iter = await client.requestMany("iter", x); + const v: any[] = []; + for await (const resp of iter) { + if (resp.headers?.error) { + throw Error(resp.headers?.error); + } + if (resp.headers.done) { + return v; + } + v.push(resp.data); + } + return v; + }; + }); + + it("call the iter server -- a simple test", async () => { + const w = [3, 8, 9]; + const v = await callIter(c1, w); + expect(v).toEqual(w); + expect(v).not.toBe(w); + + // also from other client + const v2 = await callIter(c2, w); + expect(v2).toEqual(w); + }); + + it("call the iter server -- test that throws an error", async () => { + await expect(async () => { + await callIter(c1, null); + }).rejects.toThrowError("is not iterable"); + }); +}); + +afterAll(after); diff --git a/src/packages/backend/conat/test/core/connect.test.ts b/src/packages/backend/conat/test/core/connect.test.ts new file mode 100644 index 0000000000..f49453dbcd --- /dev/null +++ b/src/packages/backend/conat/test/core/connect.test.ts @@ -0,0 +1,137 @@ +/* + +pnpm test ./connect.test.ts + +*/ + +import { getPort } from "@cocalc/backend/conat/test/util"; +import { initConatServer } from "@cocalc/backend/conat/test/setup"; +import { connect } from "@cocalc/backend/conat/conat"; +import { delay } from "awaiting"; +import { wait } from "@cocalc/backend/conat/test/util"; + +const path = "/conat"; +let port; +beforeAll(async () => { + port = await getPort(); +}); + +describe("create server *after* client and ensure connects properly", () => { + let cn; + it("starts a client connecting to that port, despite there being no server yet", async () => { + cn = connect(`http://localhost:${port}`, { + path, + reconnectionDelay: 25, // fast for tests + randomizationFactor: 0, + }); + await delay(20); + expect(cn.conn.connected).toBe(false); + }); + + let server; + it("create a server", async () => { + server = await initConatServer({ port, path }); + }); + + it("now client should connect", async () => { + await cn.waitUntilConnected(); + expect(cn.conn.connected).toBe(true); + }); + + it("close server and observe client disconnect", async () => { + server.close(); + await wait({ until: () => !cn.conn.connected }); + expect(cn.conn.connected).toBe(false); + }); + + it("create server again and observe client connects again", async () => { + server = await initConatServer({ port, path }); + await wait({ until: () => cn.conn.connected }); + expect(cn.conn.connected).toBe(true); + }); + + it("clean up", () => { + server.close(); + cn.close(); + }); +}); + +describe("create server after sync creating a subscription and publishing a message, and observe that messages are dropped", () => { + // The moral here is do NOT use subscribeSync and publishSync + // unless you don't care very much... + let cn; + it("starts a client, despite there being no server yet", async () => { + cn = connect(`http://localhost:${port}`, { path }); + expect(cn.conn.connected).toBe(false); + }); + + let sub; + it("create a subscription before the server exists", () => { + sub = cn.subscribeSync("xyz"); + const { bytes } = cn.publishSync("xyz", "hello"); + expect(bytes).toBe(6); + cn.publishSync("xyz", "conat"); + }); + + let server; + it("start the server", async () => { + server = await initConatServer({ port, path }); + await wait({ until: () => cn.conn.connected }); + await delay(50); + }); + + it("see that both messages we sent before connecting were dropped", async () => { + const { bytes, count } = await cn.publish("xyz", "more"); + expect(count).toBe(1); + expect(bytes).toBe(5); + const { value: mesg1 } = await sub.next(); + // we just got a message but it's AFTER the two above. + expect(mesg1.data).toBe("more"); + }); + + it("clean up", () => { + server.close(); + cn.close(); + sub.close(); + }); +}); + +describe("create server after async creating a subscription and async publishing a message, and observe that it DOES works", () => { + let cn; + it("starts a client, despite there being no server yet", async () => { + cn = connect(`http://localhost:${port}`, { path }); + expect(cn.conn.connected).toBe(false); + }); + + let sub; + let recv: any[] = []; + it("create a sync subscription before the server exists", () => { + const f = async () => { + sub = await cn.subscribe("xyz"); + await cn.publish("xyz", "hello"); + const { value: mesg } = await sub.next(); + recv.push(mesg.data); + await cn.publish("xyz", "conat"); + const { value: mesg2 } = await sub.next(); + recv.push(mesg2.data); + }; + f(); + }); + + let server; + it("start the server", async () => { + server = await initConatServer({ port, path }); + await wait({ until: () => cn.conn.connected }); + }); + + it("see that both messages we sent before connecting arrive", async () => { + await wait({ until: () => recv.length == 2 }); + expect(recv).toEqual(["hello", "conat"]); + }); + + it("clean up", () => { + server.close(); + cn.close(); + sub.close(); + }); +}); diff --git a/src/packages/backend/conat/test/core/core-stream.test.ts b/src/packages/backend/conat/test/core/core-stream.test.ts new file mode 100644 index 0000000000..427f5c921d --- /dev/null +++ b/src/packages/backend/conat/test/core/core-stream.test.ts @@ -0,0 +1,542 @@ +/* +DEVELOPMENT: + + + pnpm test ./core-stream.test.ts + +*/ + +import { connect, before, after } from "@cocalc/backend/conat/test/setup"; +import { + cstream, + COCALC_STREAM_HEADER, + type CoreStream, + KEY_GC_THRESH, +} from "@cocalc/conat/sync/core-stream"; +import { wait } from "@cocalc/backend/conat/test/util"; +import type { Client } from "@cocalc/conat/core/client"; +import { is_date as isDate } from "@cocalc/util/misc"; +import { dstream } from "@cocalc/conat/sync/dstream"; + +beforeAll(before); + +describe("create a client, create an ephemeral leader core-stream, and do basic tests involving just this leader", () => { + let client; + let stream; + let name = `test-${Math.random()}`; + + it("creates ephemeral core stream", async () => { + client = connect(); + stream = await cstream({ client, name, persist: false, leader: true }); + expect(stream.length).toBe(0); + expect(stream.leader).toBe(true); + expect(stream.start_seq).toBe(undefined); + }); + + it("publish some messages", async () => { + // publish null + await stream.publish(null); + expect(stream.get(0)).toBe(null); + expect(stream.length).toBe(1); + // publish a Buffer stays a Buffer + await stream.publish(Buffer.from("xyz")); + expect(stream.get(1)).toEqual(Buffer.from("xyz")); + expect(Buffer.isBuffer(stream.get(1))).toBe(true); + expect(stream.length).toBe(2); + // publish a Date stays a Date + const now = new Date(); + await stream.publish(now); + expect(stream.get(2)).toEqual(now); + expect(isDate(stream.get(2))).toEqual(true); + }); + + it("publishing undefined is not allowed", async () => { + await expect( + async () => await stream.publish(undefined), + ).rejects.toThrowError("must not be 'undefined'"); + }); + + it("a second client has the same messages", async () => { + const client2 = connect(); + const stream2 = await cstream({ + client: client2, + name, + persist: false, + leader: false, + }); + await wait({ until: () => stream2.length == 3 }); + expect(stream2.getAll()).toEqual(stream.getAll()); + }); + + it("close and create and see that it's ephemeral", async () => { + stream.close(); + stream = await cstream({ client, name, persist: false, leader: true }); + expect(stream.length).toBe(0); + }); + + const count = 100; + it(`publish ${count} messages and observe it works`, async () => { + const v: number[] = []; + for (let i = 0; i < 100; i++) { + await stream.publish(i); + v.push(i); + expect(stream.get(i)).toBe(i); + expect(stream.length).toBe(i + 1); + } + expect(stream.length).toBe(100); + expect(stream.getAll()).toEqual(v); + }); + + it("publish a message with a header", async () => { + await stream.publish("body", { headers: { foo: { 10: 5 } } }); + const headers = stream.headers(stream.length - 1); + expect(headers).toEqual(expect.objectContaining({ foo: { 10: 5 } })); + // streams also have an internalheader + expect(headers[COCALC_STREAM_HEADER].seq).toBe(101); + expect(typeof headers[COCALC_STREAM_HEADER].timestamp).toBe("number"); + expect(typeof headers[COCALC_STREAM_HEADER].sessionId).toBe("string"); + expect(stream.time(stream.length - 1)).toEqual( + new Date(headers[COCALC_STREAM_HEADER].timestamp), + ); + }); + + it("some time consistency checks", () => { + expect( + Math.abs(stream.time(stream.length - 1).valueOf() - Date.now()), + ).toBeLessThan(100); + const times = stream.times(); + expect(times.length).toBe(stream.length); + expect(times.slice(-1)[0]).toEqual(stream.time(stream.length - 1)); + }); + + it("stats consistency check", () => { + const stats = stream.stats(); + expect(stats.count).toBe(stream.length); + expect(stats.bytes).not.toBeNaN(); + expect(stats.bytes).toBeGreaterThan(100); + }); + + it("enforce limits doesn't crash (not much of a test as we didn't set any limits)", async () => { + await stream.enforceLimitsNow(); + }); + + it("delete everything in the stream", async () => { + await stream.delete({all:true}); + expect(stream.length).toBe(0); + const stats = stream.stats(); + expect(stats.count).toBe(0); + expect(stats.bytes).toBe(0); + }); + + it("clean up", () => { + stream.close(); + client.close(); + }); +}); + +describe("create three clients, create a leader core ephemeral stream with one client and two non-leader core ephemeral streams with the other two clients, then observe basic sync operations work", () => { + const clients: Client[] = []; + const streams: CoreStream[] = []; + let name = `test-${Math.random()}`; + + it("creates clients and 3 ephemeral core streams -- one a leader and two followers", async () => { + for (let i = 0; i < 4; i++) { + clients.push(connect()); + } + streams.push( + await cstream({ client: clients[0], name, persist: false, leader: true }), + ); + for (let i = 1; i < 3; i++) { + streams.push( + await cstream({ + client: clients[i], + name, + persist: false, + leader: false, + }), + ); + } + expect(streams[0].length).toBe(0); + // @ts-ignore + expect(streams[0].leader).toBe(true); + expect(streams[1].length).toBe(0); + // @ts-ignore + expect(streams[1].leader).toBe(false); + expect(streams[2].length).toBe(0); + // @ts-ignore + expect(streams[2].leader).toBe(false); + }); + + it("writes to leader stream and sees change reflected in the other 2", async () => { + await streams[0].publish("hello"); + expect(streams[0].length).toBe(1); + await wait({ until: () => streams[1].length > 0 && streams[2].length > 0 }); + expect(streams[1].length).toBe(1); + expect(streams[2].length).toBe(1); + for (let i = 0; i < 3; i++) { + expect(streams[i].get(0)).toBe("hello"); + } + }); + + it("writes to non-leader stream and sees change reflected in the other 2", async () => { + await streams[1].publish("conat"); + expect(streams[1].length).toBe(2); + await wait({ until: () => streams[0].length > 1 && streams[2].length > 1 }); + expect(streams[0].length).toBe(2); + expect(streams[2].length).toBe(2); + for (let i = 0; i < 3; i++) { + expect(streams[i].get(1)).toBe("conat"); + } + }); + + it("add a new non-leader stream and confirm it is initialized correctly and works", async () => { + streams.push( + await cstream({ + client: clients[3], + name, + persist: false, + leader: false, + }), + ); + expect(streams[3].getAll()).toEqual(["hello", "conat"]); + + await streams[3].publish({ a: "stream" }); + await wait({ until: () => streams[0].length > 2 && streams[1].length > 2 }); + for (let i = 0; i < 4; i++) { + expect(streams[i].get(2)).toEqual({ a: "stream" }); + } + }); + + it("cleans up", () => { + for (let i = 0; i < 4; i++) { + streams[i].close(); + clients[i].close(); + } + }); +}); + +describe("test creating a non-leader first, then the leader", () => { + const clients: Client[] = []; + const streams: (CoreStream | null)[] = [null, null]; + let name = `test-${Math.random()}`; + + it("creates 2 clients, then a follower, then the leader ", async () => { + clients.push(connect()); + clients.push(connect()); + + const createFollower = async () => { + streams[0] = await cstream({ + client: clients[0], + name, + persist: false, + leader: false, + }); + await streams[0].publish("before"); + }; + createFollower(); + + // now follower is being created, but should be stuck waiting for the leader + + streams[1] = await cstream({ + client: clients[1], + name, + persist: false, + leader: true, + }); + + await wait({ + until: () => + (streams[0]?.length ?? 0) > 0 && (streams[1]?.length ?? 0) > 0, + }); + expect(streams[1].getAll()).toEqual(["before"]); + expect(streams[0]?.getAll()).toEqual(["before"]); + }); + + it("cleans up", () => { + for (let i = 2; i < 2; i++) { + streams[i]?.close(); + clients[i]?.close(); + } + }); +}); + +// There a lot more similar tests of dstream ephemeral in backend/conat/test/sync/dstream-ephemeral.test.ts +describe("test using ephemeral dstream", () => { + let client; + let stream; + let name = `test-${Math.random()}`; + + it("creates an ephemeral dstream", async () => { + client = connect(); + stream = await dstream({ client, name, ephemeral: true, leader: true }); + expect(stream.length).toBe(0); + }); + + it("publishes a value", () => { + stream.publish(0); + expect(stream.getAll()).toEqual([0]); + }); + + // [ ] TODO: this is be fast even for count=10000, + // but it is NOT. We use a smaller value for now. + const count = 100; + it(`publish ${count} messages`, async () => { + const v: number[] = [0]; + for (let i = 1; i < count; i++) { + stream.publish(i); + v.push(i); + expect(stream.get(i)).toBe(i); + expect(stream.length).toBe(i + 1); + } + expect(stream.length).toBe(count); + expect(stream.getAll()).toEqual(v); + await stream.save(); + }); + + let client2; + let stream2; + it("opens a second dstream non-leader", async () => { + client2 = connect(); + stream2 = await dstream({ + client: client2, + name, + ephemeral: true, + leader: true, + }); + expect(stream2.length).toBe(count); + }); + + it("write to the second stream and see reflected in the first", async () => { + await stream2.publish("x"); + wait({ until: () => stream.length == 101 }); + expect(stream.get(stream.length - 1)).toBe("x"); + }); +}); + +describe("test basic key:value functionality for persistent core stream", () => { + let client; + let stream; + let name = "kv0"; + + it("creates persistent core stream", async () => { + client = connect(); + stream = await cstream({ client, name, persist: true }); + expect(stream.length).toBe(0); + expect(stream.start_seq).toBe(undefined); + }); + + let seq; + + it("writes a key:value and confirms it was written", async () => { + await stream.setKv("key", "value"); + expect(await stream.getKv("key")).toEqual("value"); + seq = stream.seqKv("key"); + }); + + it("also confirm via getAllKv", () => { + expect(stream.getAllKv()).toEqual({ key: "value" }); + }); + + it("closes and reopens stream, to confirm the key was persisted", async () => { + stream.close(); + expect(stream.kv).toBe(undefined); + stream = await cstream({ client, name, persist: true }); + expect(stream.hasKv("key")).toBe(true); + expect(stream.hasKv("key2")).toBe(false); + expect(stream.length).toBe(1); + expect(await stream.getKv("key")).toEqual("value"); + expect(stream.seqKv("key")).toBe(seq); + }); + + let client2; + let stream2; + it("create a second client and observe it sees the correct value", async () => { + client2 = connect(); + stream2 = await cstream({ + client: client2, + name, + persist: true, + noCache: true, + }); + expect(await stream2.getKv("key")).toEqual("value"); + }); + + it("modify the value via the second client and see it change in the first", async () => { + await stream2.setKv("key", "value2"); + await wait({ until: () => stream.getKv("key") == "value2" }); + }); + + it("verify that the overwritten message is cleared to save space in both streams", () => { + expect(stream.get(0)).not.toBe(undefined); + expect(stream2.get(0)).not.toBe(undefined); + stream.gcKv(); + stream2.gcKv(); + expect(stream.get(0)).toBe(undefined); + expect(stream2.get(0)).toBe(undefined); + expect(stream.headers(0)).toBe(undefined); + expect(stream2.headers(0)).toBe(undefined); + }); + + it("write a large key:value, then write it again to cause automatic garbage collection", async () => { + await stream.setKv("key", Buffer.from("x".repeat(KEY_GC_THRESH + 10))); + expect(stream.get(stream.length - 1).length).toBe(KEY_GC_THRESH + 10); + await stream.setKv("key", Buffer.from("x".repeat(KEY_GC_THRESH + 10))); + // it's gone + expect(stream.get(stream.length - 2)).toBe(undefined); + }); + + it("close and reload and note there is only one item in the stream (the first message was removed since it is no longer needed)", async () => { + stream.close(); + expect(stream.kv).toBe(undefined); + stream = await cstream({ client, name, persist: true }); + expect(stream.length).toBe(1); + expect(stream.seqKv(0)).toBe(stream2.seqKv(1)); + }); + + it("cleans up", () => { + stream.close(); + stream2.close(); + client.close(); + client2.close(); + }); +}); + +describe("test key:value delete", () => { + let client; + let stream; + let name = "kvd"; + let client2; + let stream2; + + it("creates new persistent core stream with two copies/clients", async () => { + client = connect(); + stream = await cstream({ client, name, persist: true }); + + client2 = connect(); + stream2 = await cstream({ + client: client2, + name, + persist: true, + noCache: true, + }); + }); + + it("writes to key:value and confirms it was written", async () => { + await stream.setKv("key", "value"); + expect(await stream.getKv("key")).toEqual("value"); + await wait({ until: () => stream2.getKv("key") == "value" }); + + // also use an empty '' key + await stream.setKv("", "a value"); + expect(await stream.getKv("")).toEqual("a value"); + await wait({ until: () => stream2.getKv("") == "a value" }); + }); + + it("deletes the key and confirms it was deleted", async () => { + await stream.deleteKv("key"); + expect(await stream.getKv("key")).toEqual(undefined); + await wait({ until: () => stream2.getKv("key") === undefined }); + }); + + it("also delete the empty key one", async () => { + await stream2.deleteKv(""); + expect(await stream2.getKv("")).toEqual(undefined); + await wait({ until: () => stream.getKv("") === undefined }); + }); + + it("delete a key that doesn't exist -- a no-op (shouldn't make sequence longer)", async () => { + const n = stream.length; + await stream.deleteKv("fake"); + expect(stream.length).toBe(n); + }); + + it("cleans up", () => { + stream.close(); + stream2.close(); + client.close(); + client2.close(); + }); +}); + +describe("test previousSeq when setting keys, which can be used to ensure consistent read/writes", () => { + let client; + let stream; + let name = "prev"; + + it("creates persistent stream", async () => { + client = connect(); + stream = await cstream({ client, name, persist: true }); + }); + + let seq; + it("sets a value", async () => { + const { seq: seq0 } = await stream.setKv("my", "value"); + expect(seq0).toBeGreaterThan(0); + seq = seq0; + }); + + it("tries to change the value using the wrong previousSeq", async () => { + await expect(async () => { + await stream.setKv("my", "newval", { previousSeq: 0 }); + }).rejects.toThrowError("wrong last sequence"); + }); + + it("changes the value using the correct previousSeq", async () => { + const { seq: seq1 } = await stream.setKv("my", "newval", { + previousSeq: seq, + }); + expect(stream.getKv("my")).toBe("newval"); + expect(stream.seqKv("my")).toBe(seq1); + }); + + it("previousSeq is ignored with non-key sets", async () => { + await stream.publish("stuff", { previousSeq: 0 }); + expect(stream.get(stream.length - 1)).toBe("stuff"); + }); +}); + +describe("test msgID dedup", () => { + let client; + let stream; + let name = "msgid"; + let client2; + let stream2; + + it("creates two clients", async () => { + client = connect(); + stream = await cstream({ client, name, persist: true }); + + client2 = connect(); + stream2 = await cstream({ + client: client2, + name, + persist: true, + noCache: true, + }); + + expect(stream === stream2).toBe(false); + }); + + it("publishes a message with msgID twice and sees it only appears once", async () => { + await stream.publish("x", { msgID: "myid" }); + await stream.publish("y", { msgID: "myid2" }); + await stream.publish("x", { msgID: "myid" }); + expect(stream.getAll()).toEqual(["x", "y"]); + await wait({ until: () => stream2.length == 2 }); + expect(stream2.getAll()).toEqual(["x", "y"]); + expect(stream.msgIDs.has("myid")).toBe(true); + }); + + it("publishes same message from other stream doesn't cause it to appear again either (so msgID check is server side)", async () => { + // not just using local info and not accidentally the same object: + expect(stream2.msgIDs.has("myid")).toBe(false); + await stream2.publish("x", { msgID: "myid" }); + expect(stream2.getAll()).toEqual(["x", "y"]); + await stream2.publish("y", { msgID: "myid2" }); + expect(stream2.getAll()).toEqual(["x", "y"]); + }); +}); + +// TODO ephemeral kv store (not implemented yet!) + +afterAll(after); diff --git a/src/packages/backend/conat/test/core/services.test.ts b/src/packages/backend/conat/test/core/services.test.ts new file mode 100644 index 0000000000..71349714aa --- /dev/null +++ b/src/packages/backend/conat/test/core/services.test.ts @@ -0,0 +1,112 @@ +/* +pnpm test ./services.test.ts +*/ + +import { before, after, connect } from "@cocalc/backend/conat/test/setup"; +import { Client } from "@cocalc/conat/core/client"; +import { delay } from "awaiting"; +import { wait } from "@cocalc/backend/conat/test/util"; + +beforeAll(before); + +describe("test creating subscriptions with service property set", () => { + let client1, client2; + it("create two clients", async () => { + client1 = connect({ reconnectionDelay: 50 }); + client2 = connect(); + }); + + let sub; + it("create an ephemeral subscription in client1 and make sure it can be used from client2", async () => { + sub = await client1.subscribe("foo", { ephemeral: true }); + const { count } = await client2.publish("foo", "hello"); + expect(count).toBe(1); + const { value } = await sub.next(); + expect(value.data).toBe("hello"); + }); + + it("disconnects client1 and observes that client2 doesn't think client1 is listening anymore, rather than having requests 'hang forever'", async () => { + client1.conn.io.engine.close(); + await wait({ + until: async () => { + const { count } = await client2.publish("foo", "hello"); + return count == 0; + }, + }); + }); + + it("waits for client1 to connect again and observes that it *does* start receiving messages", async () => { + await wait({ + until: async () => { + const { count } = await client2.publish("foo", "hello"); + return count == 1; + }, + }); + }); + + let sub2; + it("tries the same with services not set and observes that messages are queued", async () => { + sub2 = await client1.subscribe("foo2", { ephemeral: false }); + client1.conn.io.engine.close(); + await delay(10); + const { count } = await client2.publish("foo2", "hello"); + expect(count).toBe(1); + }); + + it("gets the message upon reconnect", async () => { + const { value } = await sub2.next(); + expect(value.data).toBe("hello"); + }); + + it("cleans up", () => { + sub.close(); + sub2.close(); + client1.close(); + client2.close(); + }); +}); + +describe("services with the ephemeral option", () => { + let client1: Client, client2: Client; + it("create two clients", async () => { + client1 = connect({ reconnectionDelay: 1000 }); + client2 = connect(); + }); + + let service, arith; + it("create a service in client1 and make sure it can be used from client2", async () => { + interface Api { + add: (a: number, b: number) => Promise; + mul: (a: number, b: number) => Promise; + } + service = await client1.service("arith", { + add: async (a, b) => a + b, + mul: async (a, b) => a * b, + }); + + arith = client2.call("arith"); + expect(await arith.mul(2, 3)).toBe(6); + expect(await arith.add(2, 3)).toBe(5); + }); + + it("tests disconnect", async () => { + client1.conn.io.engine.close(); + await wait({ + until: async () => { + const { count } = await client2.publish("arith", "hello"); + return count == 0; + }, + }); + await expect(async () => { + await arith.mul(2, 3); + }).rejects.toThrowError("no subscribers"); + }); + + it("cleans up", () => { + service.close(); + client1.close(); + client2.close(); + }); +}); + +afterAll(after); diff --git a/src/packages/backend/nats/test/files/read.test.ts b/src/packages/backend/conat/test/files/read.test.ts similarity index 92% rename from src/packages/backend/nats/test/files/read.test.ts rename to src/packages/backend/conat/test/files/read.test.ts index 9a72326516..ef87e46139 100644 --- a/src/packages/backend/nats/test/files/read.test.ts +++ b/src/packages/backend/conat/test/files/read.test.ts @@ -4,12 +4,15 @@ Test async streaming read of files from a compute servers using NATS. DEVELOPMENT: -pnpm exec jest --watch --forceExit --detectOpenHandles "read.test.ts" +pnpm test ./read.test.ts */ -import "@cocalc/backend/nats"; -import { close, createServer, readFile } from "@cocalc/nats/files/read"; +import { before, after } from "@cocalc/backend/conat/test/setup"; + +beforeAll(before); + +import { close, createServer, readFile } from "@cocalc/conat/files/read"; import { createReadStream } from "fs"; import { file as tempFile } from "tmp-promise"; import { writeFile as fsWriteFile } from "fs/promises"; @@ -103,3 +106,6 @@ describe("do a larger test that involves multiple chunks and a different name", } }); }); + + +afterAll(after); diff --git a/src/packages/backend/nats/test/files/write.test.ts b/src/packages/backend/conat/test/files/write.test.ts similarity index 93% rename from src/packages/backend/nats/test/files/write.test.ts rename to src/packages/backend/conat/test/files/write.test.ts index 9ce062f95e..029efc4994 100644 --- a/src/packages/backend/nats/test/files/write.test.ts +++ b/src/packages/backend/conat/test/files/write.test.ts @@ -4,12 +4,15 @@ Test async streaming writing of files to compute servers using NATS. DEVELOPMENT: - pnpm exec jest --watch --forceExit --detectOpenHandles "write.test.ts" + pnpm test ./write.test.ts */ -import "@cocalc/backend/nats"; -import { close, createServer, writeFile } from "@cocalc/nats/files/write"; +import { before, after } from "@cocalc/backend/conat/test/setup"; + +beforeAll(before); + +import { close, createServer, writeFile } from "@cocalc/conat/files/write"; import { createWriteStream, createReadStream } from "fs"; import { file as tempFile } from "tmp-promise"; import { writeFile as fsWriteFile, readFile } from "fs/promises"; @@ -125,3 +128,8 @@ describe("do a more challenging test that involves a larger file that has to be } }); }); + + + + +afterAll(after); \ No newline at end of file diff --git a/src/packages/backend/nats/test/llm.test.ts b/src/packages/backend/conat/test/llm.test.ts similarity index 88% rename from src/packages/backend/nats/test/llm.test.ts rename to src/packages/backend/conat/test/llm.test.ts index 74929fae43..84474c0f00 100644 --- a/src/packages/backend/nats/test/llm.test.ts +++ b/src/packages/backend/conat/test/llm.test.ts @@ -8,10 +8,13 @@ DEVELOPMENT: */ // this sets client -import "@cocalc/backend/nats"; +import "@cocalc/backend/conat"; -import { init, close } from "@cocalc/nats/llm/server"; -import { llm } from "@cocalc/nats/llm/client"; +import { init, close } from "@cocalc/conat/llm/server"; +import { llm } from "@cocalc/conat/llm/client"; +import { before, after } from "@cocalc/backend/conat/test/setup"; + +beforeAll(before); describe("create an llm server, client, and stub evaluator, and run an evaluation", () => { // define trivial evaluate @@ -82,3 +85,5 @@ describe("test an evaluate that throws an error half way through", () => { await close(); }); }); + +afterAll(after); diff --git a/src/packages/backend/conat/test/server/limits.test.ts b/src/packages/backend/conat/test/server/limits.test.ts new file mode 100644 index 0000000000..5ea66d6a81 --- /dev/null +++ b/src/packages/backend/conat/test/server/limits.test.ts @@ -0,0 +1,34 @@ +/* +Test various configurable limits of the server. + +pnpm test ./limits.test.ts +*/ + +import { createServer } from "@cocalc/backend/conat/test/setup"; + +describe("test the per user subscription limit", () => { + let server; + + it("creates a server with a subscription limit of 3", async () => { + server = await createServer({ maxSubscriptionsPerClient: 3 }); + }); + + let client; + it("creates a client and makes 2 subscriptions fine", async () => { + // can't make a third, since the default INBOX subscription already counts. + client = server.client(); + await client.sub("sub1"); + await client.sub("sub2"); + }); + + it("creates another subscription and gets an error", async () => { + await expect(async () => { + await client.sub("sub3"); + }).rejects.toThrowError("limit"); + }); + + it("cleans up", () => { + client.close(); + server.close(); + }); +}); diff --git a/src/packages/backend/conat/test/service.test.ts b/src/packages/backend/conat/test/service.test.ts new file mode 100644 index 0000000000..2709b5d147 --- /dev/null +++ b/src/packages/backend/conat/test/service.test.ts @@ -0,0 +1,279 @@ +/* + +DEVELOPMENT: + +pnpm test ./service.test.ts + +*/ + +import { callConatService, createConatService } from "@cocalc/conat/service"; +import { + createServiceClient, + createServiceHandler, +} from "@cocalc/conat/service/typed"; +import { before, after } from "@cocalc/backend/conat/test/setup"; +import { wait } from "@cocalc/backend/conat/test/util"; +import { is_date as isDate } from "@cocalc/util/misc"; +import { delay } from "awaiting"; +import { initConatServer } from "@cocalc/backend/conat/test/setup"; +import { getPort } from "@cocalc/backend/conat/test/util"; +import { once } from "@cocalc/util/async-utils"; + +beforeAll(before); + +describe("create a service and test it out", () => { + let s; + it("creates a service", async () => { + s = createConatService({ + service: "echo", + handler: (mesg) => mesg.repeat(2), + }); + await once(s, "running"); + expect(await callConatService({ service: "echo", mesg: "hello" })).toBe( + "hellohello", + ); + }); + + it("closes the services and observes it doesn't work anymore", async () => { + s.close(); + await expect(async () => { + await callConatService({ service: "echo", mesg: "hi", timeout: 250 }); + }).rejects.toThrowError("timeout"); + }); +}); + +describe("verify that you can create a service AFTER calling it and things to still work fine", () => { + let result = ""; + it("call a service that does not exist yet", () => { + (async () => { + result = await callConatService({ service: "echo3", mesg: "hello " }); + })(); + }); + + it("create the echo3 service and observe that it answer the request we made before the service was created", async () => { + const s = createConatService({ + service: "echo3", + handler: (mesg) => mesg.repeat(3), + }); + await wait({ until: () => result }); + expect(result).toBe("hello hello hello "); + + s.close(); + }); +}); + +describe("create and test a more complicated service", () => { + let client, service; + it("defines the service", async () => { + interface Api { + add: (a: number, b: number) => Promise; + concat: (a: Buffer, b: Buffer) => Promise; + now: () => Promise; + big: (n: number) => Promise; + len: (s: string) => Promise; + } + + const name = "my-service"; + service = await createServiceHandler({ + service: name, + subject: name, + description: "My Service", + impl: { + // put any functions here that take/return MsgPack'able values + add: async (a, b) => a + b, + concat: async (a, b) => Buffer.concat([a, b]), + now: async () => { + await delay(5); + return new Date(); + }, + big: async (n: number) => "x".repeat(n), + len: async (s: string) => s.length, + }, + }); + + client = createServiceClient({ + service: name, + subject: name, + }); + }); + + it("tests the service", async () => { + // these calls are all type checked using typescript + expect(await client.add(2, 3)).toBe(5); + + const a = Buffer.from("hello"); + const b = Buffer.from(" conat"); + expect(await client.concat(a, b)).toEqual(Buffer.concat([a, b])); + + const d = await client.now(); + expect(isDate(d)).toBe(true); + expect(Math.abs(d.valueOf() - Date.now())).toBeLessThan(100); + + const n = 10 * 1e6; + expect((await client.big(n)).length).toBe(n); + + expect(await client.len("x".repeat(n))).toBe(n); + }); + + it("cleans up", () => { + service.close(); + }); +}); + +describe("create a service with specified client, stop and start the server, and see service still works", () => { + let server; + let client; + let client2; + let port; + it("create a conat server and client", async () => { + port = await getPort(); + server = await initConatServer({ port }); + client = server.client({ reconnectionDelay: 50 }); + client2 = server.client({ reconnectionDelay: 50 }); + }); + + let service; + it("create a non-ephemeral service using specific client and call it using both clients", async () => { + //You usually do NOT want a non-ephemeral service... + service = createConatService({ + client, + service: "double", + handler: (mesg) => mesg.repeat(2), + ephemeral: false, + }); + await once(service, "running"); + + expect( + await callConatService({ client, service: "double", mesg: "hello" }), + ).toBe("hellohello"); + + expect( + await callConatService({ + client: client2, + service: "double", + mesg: "hello", + }), + ).toBe("hellohello"); + }); + + it("disconnect client and check service still works on reconnect (because not ephemeral)", async () => { + // cause a disconnect -- client will connect again in 50ms soon + // and handle the request below: + client.conn.io.engine.close(); + expect( + await callConatService({ + client: client2, + service: "double", + mesg: "hello", + }), + ).toBe("hellohello"); + }); + + // it("disconnect client2 and check service still works on reconnect", async () => { + // // cause a disconnect -- client will connect again in 50ms soon + // // and handle the request below: + // client2.conn.io.engine.close(); + // expect( + // await callConatService({ + // client: client2, + // service: "double", + // mesg: "hello", + // }), + // ).toBe("hellohello"); + // }); + + // it("disconnect both clients and check service still works on reconnect", async () => { + // // cause a disconnect -- client will connect again in 50ms soon + // // and handle the request below: + // client.conn.io.engine.close(); + // client2.conn.io.engine.close(); + // expect( + // await callConatService({ + // client: client2, + // service: "double", + // mesg: "hello", + // }), + // ).toBe("hellohello"); + // }); + + it("kills the server, then makes another server serving on the same port", async () => { + await server.close(); + server = await initConatServer({ port }); + //await delay(250); + // Killing the server is not at all a normal thing to expect, and causes loss of + // its state. The clients have to sync realize subscriptions are missing. This + // takes a fraction of a second and the call below won't immediately work. + await wait({ + until: async () => { + try { + await callConatService({ + client: client2, + service: "double", + mesg: "hello", + noRetry: true, + timeout: 250, + }); + return true; + } catch (err) { + return false; + } + }, + }); + expect( + await callConatService({ + client: client2, + service: "double", + mesg: "hello", + noRetry: true, + }), + ).toBe("hellohello"); + }); + + it("cleans up", () => { + service.close(); + client.close(); + client2.close(); + server.close(); + }); +}); + +describe("create a slow service and check that the timeout parameter works", () => { + let s; + it("creates a slow service", async () => { + s = createConatService({ + service: "slow", + handler: async (d) => { + await delay(d); + return { delay: d }; + }, + }); + await once(s, "running"); + }); + + it("confirms it works", async () => { + const t0 = Date.now(); + const r = await callConatService({ + service: s.name, + mesg: 50, + }); + expect(r).toEqual({ delay: 50 }); + expect(Date.now() - t0).toBeGreaterThan(45); + expect(Date.now() - t0).toBeLessThan(500); + }); + + it("confirms it throws a timeout error", async () => { + await expect(async () => { + await callConatService({ + service: s.name, + mesg: 5000, + timeout: 75, + }); + }).rejects.toThrowError("imeout"); + }); + + it("clean up", async () => { + s.close(); + }); +}); + +afterAll(after); diff --git a/src/packages/backend/conat/test/setup.ts b/src/packages/backend/conat/test/setup.ts new file mode 100644 index 0000000000..8a349266c5 --- /dev/null +++ b/src/packages/backend/conat/test/setup.ts @@ -0,0 +1,79 @@ +import { getPort } from "@cocalc/backend/conat/test/util"; +import { type Client } from "@cocalc/conat/core/client"; +import { + init as createConatServer, + type Options, + type ConatServer, +} from "@cocalc/conat/core/server"; +import { Server } from "socket.io"; +import getLogger from "@cocalc/backend/logger"; +import { setConatClient } from "@cocalc/conat/client"; +import { sha1 } from "@cocalc/backend/sha1"; +import { + initServer as initPersistServer, + terminateServer as terminatePersistServer, +} from "@cocalc/backend/conat/persist"; +import { syncFiles } from "@cocalc/conat/persist/context"; +import { mkdtemp, rm } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import { join } from "path"; +export { wait } from "@cocalc/backend/conat/test/util"; + +const logger = getLogger("conat:test:setup"); + +export const path = "/conat"; + +export async function initConatServer( + options: Partial = {}, +): Promise { + logger.debug("init"); + if (!options?.port) { + const port = await getPort(); + options = { ...options, port }; + } + + return createConatServer({ + logger: logger.debug, + Server, + ...options, + }); +} + +export let server; +export let tempDir; + +export async function createServer(opts?) { + const port = await getPort(); + server = await initConatServer({ port, path, ...opts }); + await initPersistServer({ client: connect() }); + return server; +} + +export async function before() { + tempDir = await mkdtemp(join(tmpdir(), "conat-test")); + server = await createServer(); + syncFiles.local = join(tempDir, "local"); + syncFiles.archive = join(tempDir, "archive"); + setConatClient({ + getNatsEnv: async () => { + return { cn: connect(), sha1 } as any; + }, + getLogger, + }); +} + +const clients: Client[] = []; +export function connect(...args): Client { + const cn = server.client(...args); + clients.push(cn); + return cn; +} + +export async function after() { + terminatePersistServer(); + await rm(tempDir, { force: true, recursive: true }); + await server.close(); + for (const cn of clients) { + cn.close(); + } +} diff --git a/src/packages/backend/nats/test/sync/akv.test.ts b/src/packages/backend/conat/test/sync/akv.test.ts similarity index 75% rename from src/packages/backend/nats/test/sync/akv.test.ts rename to src/packages/backend/conat/test/sync/akv.test.ts index 193ed44e35..5205f7aa5f 100644 --- a/src/packages/backend/nats/test/sync/akv.test.ts +++ b/src/packages/backend/conat/test/sync/akv.test.ts @@ -3,13 +3,16 @@ Testing basic ops with dkv DEVELOPMENT: -pnpm exec jest --forceExit "akv.test.ts" +pnpm test ./akv.test.ts */ -import { dkv as createDkv, akv as createAkv } from "@cocalc/backend/nats/sync"; +import { dkv as createDkv, akv as createAkv } from "@cocalc/backend/conat/sync"; import { once } from "@cocalc/util/async-utils"; -import { getMaxPayload } from "@cocalc/nats/util"; +import { wait } from "@cocalc/backend/conat/test/util"; +import { before, after, connect } from "@cocalc/backend/conat/test/setup"; + +beforeAll(before); describe("test basics with an akv", () => { let kv; @@ -30,6 +33,10 @@ describe("test basics with an akv", () => { expect(await kv.get("y")).toBe(null); }); + it("gets all keys", async () => { + expect(await kv.keys()).toEqual(["x", "y"]); + }); + it("check that deleting a value works", async () => { await kv.delete("x"); expect(await kv.get("x")).toBe(undefined); @@ -84,6 +91,17 @@ describe("test interop with a dkv", () => { ); }); + it("check sqlite query fails", async () => { + await expect(async () => { + await akv.sqlite("SELECT count(*) AS n FROM messages"); + }).rejects.toThrowError("sqlite command not currently supported"); + }); + // intentially skipped! + it.skip("check sqlite query works", async () => { + const v = await akv.sqlite("SELECT count(*) AS n FROM messages"); + expect(v[0].n).toBe((await akv.keys()).length); + }); + it("cleans up", async () => { dkv.clear(); await dkv.close(); @@ -94,8 +112,10 @@ describe("testing writing and reading chunked data", () => { let maxPayload = 0; it("sanity check on the max payload", async () => { - maxPayload = await getMaxPayload(); - expect(maxPayload).toBeGreaterThan(1000000); + const client = connect(); + await wait({ until: () => client.info != null }); + maxPayload = client.info?.max_payload ?? 0; + expect(maxPayload).toBeGreaterThan(500000); }); let kv; @@ -113,3 +133,5 @@ describe("testing writing and reading chunked data", () => { await k.close(); }); }); + +afterAll(after); diff --git a/src/packages/backend/conat/test/sync/binary.test.ts b/src/packages/backend/conat/test/sync/binary.test.ts new file mode 100644 index 0000000000..b19bf747ce --- /dev/null +++ b/src/packages/backend/conat/test/sync/binary.test.ts @@ -0,0 +1,95 @@ +/* +Test using binary data with kv and stream. + +You can just store binary directly in kv and stream, since MsgPack +handles buffers just fine. + +DEVELOPMENT: + +pnpm test ./binary.test.ts +*/ + +import { dstream, dkv } from "@cocalc/backend/conat/sync"; +import { before, after, connect } from "@cocalc/backend/conat/test/setup"; +import { wait } from "@cocalc/backend/conat/test/util"; + +beforeAll(before); + +let maxPayload; + +describe("test binary data with a dstream", () => { + let s, + name = `${Math.random()}`; + + // binary values come back as Uint8Array with streams + const data10 = Uint8Array.from(Buffer.from("x".repeat(10))); + it("creates a binary dstream and writes/then reads binary data to/from it", async () => { + s = await dstream({ name }); + expect(s.name).toBe(name); + s.publish(data10); + expect(s.get(0).length).toEqual(data10.length); + await s.close(); + s = await dstream({ name }); + expect(s.get(0).length).toEqual(data10.length); + }); + + it("sanity check on the max payload", async () => { + const client = connect(); + await wait({ until: () => client.info != null }); + maxPayload = client.info?.max_payload ?? 0; + expect(maxPayload).toBeGreaterThan(500000); + }); + + it("writes large binary data to the dstream to test chunking", async () => { + s = await dstream({ name }); + const data = Uint8Array.from(Buffer.from("x".repeat(maxPayload * 1.5))); + s.publish(data); + expect(s.get(s.length - 1).length).toEqual(data.length); + await s.close(); + s = await dstream({ name }); + expect(s.get(s.length - 1).length).toEqual(data.length); + }); + + it("clean up", async () => { + await s.delete({all:true}); + await s.close(); + }); +}); + +describe("test binary data with a dkv", () => { + let s, + name = `${Math.random()}`; + + // binary values come back as buffer with dkv + const data10 = Buffer.from("x".repeat(10)); + + it("creates a binary dkv and writes/then reads binary data to/from it", async () => { + s = await dkv({ name }); + expect(s.name).toBe(name); + s.x = data10; + expect(s.x).toEqual(data10); + expect(s.x.length).toEqual(data10.length); + await s.close(); + s = await dkv({ name }); + await wait({ until: () => s.has("x") }); + expect(s.x.length).toEqual(data10.length); + expect(s.x).toEqual(data10); + }); + + it("writes large binary data to the dkv to test chunking", async () => { + s = await dkv({ name }); + const data = Uint8Array.from(Buffer.from("x".repeat(maxPayload * 1.5))); + s.y = data; + expect(s.y.length).toEqual(data.length); + await s.close(); + s = await dkv({ name }); + expect(s.y.length).toEqual(data.length); + }); + + it("clean up", async () => { + await s.clear(); + await s.close(); + }); +}); + +afterAll(after); diff --git a/src/packages/backend/nats/test/sync/dko.test.ts b/src/packages/backend/conat/test/sync/dko.test.ts similarity index 77% rename from src/packages/backend/nats/test/sync/dko.test.ts rename to src/packages/backend/conat/test/sync/dko.test.ts index 562e5e82eb..fc29092cdc 100644 --- a/src/packages/backend/nats/test/sync/dko.test.ts +++ b/src/packages/backend/conat/test/sync/dko.test.ts @@ -3,12 +3,15 @@ Testing basic ops with dko = distributed key:object store with SPARSE updates. DEVELOPMENT: -pnpm exec jest --forceExit "dko.test.ts" +pnpm test ./dko.test.ts */ -import { dko as createDko } from "@cocalc/backend/nats/sync"; -import { getMaxPayload } from "@cocalc/nats/util"; +import { dko as createDko } from "@cocalc/backend/conat/sync"; +import { before, after, connect } from "@cocalc/backend/conat/test/setup"; +import { wait } from "@cocalc/backend/conat/test/util"; + +beforeAll(before); describe("create a public dko and do a basic operation", () => { let kv; @@ -72,12 +75,20 @@ describe("test a large value that requires chunking", () => { let kv; const name = `test-${Math.random()}`; + let maxPayload = 0; + + it("sanity check on the max payload", async () => { + const client = connect(); + await wait({ until: () => client.info != null }); + maxPayload = client.info?.max_payload ?? 0; + expect(maxPayload).toBeGreaterThan(500000); + }); + it("creates the dko", async () => { kv = await createDko({ name }); expect(kv.getAll()).toEqual({}); - const n = await getMaxPayload(); - const big = { foo: "b".repeat(n * 1.3) }; + const big = { foo: "b".repeat(maxPayload * 1.3) }; kv.set("big", big); expect(kv.get("big")).toEqual(big); }); @@ -87,3 +98,5 @@ describe("test a large value that requires chunking", () => { await kv.close(); }); }); + +afterAll(after); diff --git a/src/packages/backend/conat/test/sync/dkv-basics.test.ts b/src/packages/backend/conat/test/sync/dkv-basics.test.ts new file mode 100644 index 0000000000..091143dbc4 --- /dev/null +++ b/src/packages/backend/conat/test/sync/dkv-basics.test.ts @@ -0,0 +1,110 @@ +/* +DEVELOPMENT: + +pnpm test ./dkv-basics.test.ts + +*/ +import { DKV } from "@cocalc/conat/sync/dkv"; +import { connect, before, after } from "@cocalc/backend/conat/test/setup"; +import { wait } from "@cocalc/backend/conat/test/util"; + +beforeAll(before); + +describe("create a general kv and do basic operations", () => { + const name = "test"; + let client, kv; + + it("creates the kv", async () => { + client = connect(); + kv = new DKV({ name, client }); + await kv.init(); + }); + + it("sets and deletes a key", async () => { + expect(kv.has("foo")).toBe(false); + kv.set("foo", 10); + expect(kv.has("foo")).toBe(true); + expect(kv.getAll()).toEqual({ foo: 10 }); + kv.delete("foo"); + expect(kv.getAll()).toEqual({}); + kv.set("co", "nat"); + await kv.save(); + }); + + let client2, kv2; + it("view the kv from a second client via sync, set a date value and observe it syncs", async () => { + client2 = connect(); + kv2 = new DKV({ name, client: client2 }); + await kv2.init(); + expect(kv2.getAll()).toEqual({ co: "nat" }); + + const date = new Date("1974"); + kv2.set("x", date); + // replication is not instant + expect(kv.get("x")).toBe(undefined); + await kv2.save(); + await wait({ until: () => kv.get("x") }); + expect(kv.getAll()).toEqual({ x: date, co: "nat" }); + }); + + it("checks that clear works", async () => { + kv.clear(); + await kv.save(); + expect(kv.length).toBe(0); + await wait({ until: () => kv2.length == 0 }); + }); + + it("checks that time works", async () => { + const key = "x".repeat(10000); + kv.set(key, "big key"); + await kv.save(); + expect(Math.abs(Date.now() - kv.time(key))).toBeLessThan(300); + expect(kv.time()).toEqual({ [key]: kv.time(key) }); + expect(kv2.time()).toEqual({ [key]: kv2.time(key) }); + }); + + it("check headers work", async () => { + kv.set("big", "headers", { headers: { silicon: "valley", x: { y: "z" } } }); + // this uses local state + expect(kv.headers("big")).toEqual({ silicon: "valley", x: { y: "z" } }); + await kv.save(); + // this uses what got echoed back from server + expect(kv.headers("big")).toEqual({ silicon: "valley", x: { y: "z" } }); + expect(kv2.headers("big")).toEqual({ silicon: "valley", x: { y: "z" } }); + }); + + it("checks hasUnsavedChanges works", async () => { + expect(kv.hasUnsavedChanges()).toBe(false); + kv.set("unsaved", ["changes"]); + expect(kv.hasUnsavedChanges()).toBe(true); + expect(kv.unsavedChanges()).toEqual(["unsaved"]); + expect(kv2.hasUnsavedChanges()).toBe(false); + await kv.save(); + expect(kv.hasUnsavedChanges()).toBe(false); + }); + + it("checks stats works", () => { + const { bytes, count } = kv.stats(); + expect(bytes).not.toBeNaN(); + expect(bytes).toBeGreaterThan(0); + expect(count).not.toBeNaN(); + expect(count).toBeGreaterThan(0); + }); + + it("checks seq is ", async () => { + kv.set("x", "11"); + await kv.save(); + const seq = kv.seq("x"); + expect(seq).toBeGreaterThan(0); + kv.set("x", 15); + await kv.save(); + expect(kv.seq("x") - seq).toBe(1); + }); + + it("clean up", async () => { + kv.close(); + client.close(); + }); +}); + +afterAll(after); diff --git a/src/packages/backend/nats/test/sync/dkv-merge.test.ts b/src/packages/backend/conat/test/sync/dkv-merge.test.ts similarity index 92% rename from src/packages/backend/nats/test/sync/dkv-merge.test.ts rename to src/packages/backend/conat/test/sync/dkv-merge.test.ts index c80a73c044..e78c6e876a 100644 --- a/src/packages/backend/nats/test/sync/dkv-merge.test.ts +++ b/src/packages/backend/conat/test/sync/dkv-merge.test.ts @@ -3,16 +3,19 @@ Testing merge conflicts with dkv DEVELOPMENT: -pnpm exec jest --watch --forceExit "dkv-merge.test.ts" +pnpm test ./dkv-merge.test.ts */ -import { dkv as createDkv } from "@cocalc/backend/nats/sync"; +import { dkv as createDkv } from "@cocalc/backend/conat/sync"; import { once } from "@cocalc/util/async-utils"; import { diff_match_patch } from "@cocalc/util/dmp"; +import { before, after } from "@cocalc/backend/conat/test/setup"; + +beforeAll(before); async function getKvs(opts?) { - const name = `test-${Math.random()}`; + const name = `test${Math.round(1000 * Math.random())}`; // We disable autosave so that we have more precise control of how conflicts // get resolved, etc. for testing purposes. const kv1 = await createDkv({ @@ -27,6 +30,10 @@ async function getKvs(opts?) { ...opts, noCache: true, }); + // @ts-ignore -- a little double check + if (kv1.kv === kv2.kv) { + throw Error("must not being using same underlying kv"); + } return { kv1, kv2 }; } @@ -181,3 +188,5 @@ describe("test a 3-way merge of that merges objects", () => { expect(kv2.get("x")).toEqual({ a: 5, b: 15, c: 12, d: 3 }); }); }); + +afterAll(after); diff --git a/src/packages/backend/nats/test/sync/dkv.test.ts b/src/packages/backend/conat/test/sync/dkv.test.ts similarity index 79% rename from src/packages/backend/nats/test/sync/dkv.test.ts rename to src/packages/backend/conat/test/sync/dkv.test.ts index 3c246e9f6f..1efae53ff7 100644 --- a/src/packages/backend/nats/test/sync/dkv.test.ts +++ b/src/packages/backend/conat/test/sync/dkv.test.ts @@ -3,20 +3,24 @@ Testing basic ops with dkv DEVELOPMENT: -pnpm test dkv.test.ts +pnpm test ./dkv.test.ts */ -import { dkv as createDkv } from "@cocalc/backend/nats/sync"; +import { dkv as createDkv } from "@cocalc/backend/conat/sync"; import { once } from "@cocalc/util/async-utils"; import { delay } from "awaiting"; +import { before, after, connect } from "@cocalc/backend/conat/test/setup"; +import { wait } from "@cocalc/backend/conat/test/util"; + +beforeAll(before); describe("create a public dkv and do basic operations", () => { let kv; const name = `test-${Math.random()}`; it("creates the dkv", async () => { - kv = await createDkv({ name }); + kv = await createDkv({ name, noCache: true }); expect(kv.getAll()).toEqual({}); }); @@ -28,14 +32,14 @@ describe("create a public dkv and do basic operations", () => { it("waits for the dkv to be longterm saved, then closing and recreates the kv and verifies that the key is there.", async () => { await kv.save(); kv.close(); - kv = await createDkv({ name }); + kv = await createDkv({ name, noCache: true }); expect(kv.a).toEqual(10); }); it("closes the kv", async () => { await kv.clear(); - kv.close(); - expect(kv.getAll).toThrow("closed"); + await kv.close(); + expect(() => kv.getAll()).toThrow("closed"); }); }); @@ -52,14 +56,15 @@ describe("opens a dkv twice and verifies the cache works and is reference counte }); it("closes kv1 (one reference)", async () => { - kv1.close(); + await kv1.close(); expect(kv2.getAll).not.toThrow(); }); it("closes kv2 (another reference)", async () => { - kv2.close(); + await kv2.close(); + await delay(1); // really closed! - expect(kv2.getAll).toThrow("closed"); + expect(() => kv2.getAll()).toThrow("closed"); }); it("create and see it is new now", async () => { @@ -244,18 +249,19 @@ describe("set several items, confirm write worked, save, and confirm they are st expect(Object.keys(kv.getAll()).length).toEqual(count); expect(kv.getAll()).toEqual(obj); await kv.save(); - expect(Date.now() - t0).toBeLessThan(1000); + expect(Date.now() - t0).toBeLessThan(2000); + await wait({ until: () => Object.keys(kv.getAll()).length == count }); expect(Object.keys(kv.getAll()).length).toEqual(count); // // the local state maps should also get cleared quickly, // // but there is no event for this, so we loop: // @ts-ignore: saved is private - while (Object.keys(kv.generalDKV.local).length > 0) { + while (Object.keys(kv.local).length > 0) { await delay(10); } // @ts-ignore: local is private - expect(kv.generalDKV.local).toEqual({}); + expect(kv.local).toEqual({}); // @ts-ignore: saved is private - expect(kv.generalDKV.saved).toEqual({}); + expect(kv.saved).toEqual({}); await kv.clear(); await kv.close(); @@ -264,7 +270,7 @@ describe("set several items, confirm write worked, save, and confirm they are st describe("do an insert and clear test", () => { const name = `test-${Math.random()}`; - const count = 100; + const count = 25; it(`adds ${count} entries, saves, clears, and confirms empty`, async () => { const kv = await createDkv({ name }); expect(kv.getAll()).toEqual({}); @@ -273,6 +279,7 @@ describe("do an insert and clear test", () => { } expect(Object.keys(kv.getAll()).length).toEqual(count); await kv.save(); + await wait({ until: () => Object.keys(kv.getAll()).length == count }); expect(Object.keys(kv.getAll()).length).toEqual(count); kv.clear(); expect(kv.getAll()).toEqual({}); @@ -333,10 +340,10 @@ describe("create many distinct clients at once, write to all of them, and see th }); }); -describe("tests involving null/undefined values", () => { +describe("tests involving null/undefined values and delete", () => { let kv1; let kv2; - const name = `test-${Math.random()}`; + const name = `test-${Math.round(100 * Math.random())}`; it("creates the dkv twice", async () => { kv1 = await createDkv({ name, noAutosave: true, noCache: true }); @@ -345,16 +352,17 @@ describe("tests involving null/undefined values", () => { expect(kv1 === kv2).toBe(false); }); - it("sets a value to null, which is fully supported like any other value", () => { - kv1.a = null; - expect(kv1.a).toBe(null); - expect(kv1.a === null).toBe(true); - expect(kv1.length).toBe(1); - }); + // it("sets a value to null, which is fully supported like any other value", () => { + // kv1.a = null; + // expect(kv1.a).toBe(null); + // expect(kv1.a === null).toBe(true); + // expect(kv1.length).toBe(1); + // }); it("make sure null value sync's as expected", async () => { - kv1.save(); - await once(kv2, "change"); + kv1.a = null; + await kv1.save(); + await wait({ until: () => kv2.has("a") }); expect(kv2.a).toBe(null); expect(kv2.a === null).toBe(true); expect(kv2.length).toBe(1); @@ -362,6 +370,7 @@ describe("tests involving null/undefined values", () => { it("sets a value to undefined, which is the same as deleting a value", () => { kv1.a = undefined; + expect(kv1.has("a")).toBe(false); expect(kv1.a).toBe(undefined); expect(kv1.a === undefined).toBe(true); expect(kv1.length).toBe(0); @@ -369,8 +378,8 @@ describe("tests involving null/undefined values", () => { }); it("make sure undefined (i.e., delete) sync's as expected", async () => { - kv1.save(); - await once(kv2, "change"); + await kv1.save(); + await wait({ until: () => kv2.a === undefined }); expect(kv2.a).toBe(undefined); expect(kv2.a === undefined).toBe(true); expect(kv2.length).toBe(0); @@ -395,72 +404,61 @@ describe("tests involving null/undefined values", () => { }); }); -import { numSubscriptions } from "@cocalc/nats/client"; +describe("ensure there isn't a really obvious subscription leak", () => { + let client; -describe("ensure there are no NATS subscription leaks", () => { - // There is some slight slack at some point due to the clock stuff, - // inventory, etc. It is constant and small, whereas we allocate - // a large number of kv's in the test. - const SLACK = 4; + it("create a client, which initially has only one subscription (the inbox)", async () => { + client = connect(); + expect(client.numSubscriptions()).toBe(1); + }); - it("creates and closes many kv's and checks there is no leak", async () => { - const before = numSubscriptions(); - const COUNT = 20; + const count = 10; + it(`creates and closes ${count} streams and checks there is no leak`, async () => { + const before = client.numSubscriptions(); // create const a: any = []; - for (let i = 0; i < COUNT; i++) { + for (let i = 0; i < count; i++) { a[i] = await createDkv({ name: `${Math.random()}`, noAutosave: true, noCache: true, }); } - for (let i = 0; i < COUNT; i++) { + // NOTE: in fact there's very unlikely to be a subscription leak, since + // dkv's don't use new subscriptions -- they all use requestMany instead + // to a common inbox prefix, and there's just one subscription for an inbox. + expect(client.numSubscriptions()).toEqual(before); + for (let i = 0; i < count; i++) { await a[i].close(); } - const after = numSubscriptions(); - expect(Math.abs(after - before)).toBeLessThan(SLACK); + const after = client.numSubscriptions(); + expect(after).toBe(before); + + // also check count on server went down. + expect((await client.getSubscriptions()).size).toBe(before); }); it("does another leak test, but with a set operation each time", async () => { - const before = numSubscriptions(); - const COUNT = 20; + const before = client.numSubscriptions(); // create const a: any = []; - for (let i = 0; i < COUNT; i++) { + for (let i = 0; i < count; i++) { a[i] = await createDkv({ name: `${Math.random()}`, noAutosave: true, noCache: true, }); - a[i].set(i, i); + a[i].set(`${i}`, i); await a[i].save(); } - for (let i = 0; i < COUNT; i++) { - a[i].clear(); - await a[i].close(); - } - const after = numSubscriptions(); - expect(Math.abs(after - before)).toBeLessThan(SLACK); - }); - - it("does another leak test, but opening and immediately closing and doing a set operation each time", async () => { - const before = numSubscriptions(); - const COUNT = 20; - // create - const a: any = []; - for (let i = 0; i < COUNT; i++) { - a[i] = await createDkv({ - name: `${Math.random()}`, - noAutosave: true, - noCache: true, - }); - a[i].set(i, i); - await a[i].save(); - a[i].clear(); + // this isn't going to be a problem: + expect(client.numSubscriptions()).toEqual(before); + for (let i = 0; i < count; i++) { await a[i].close(); } - const after = numSubscriptions(); - expect(Math.abs(after - before)).toBeLessThan(SLACK); + const after = client.numSubscriptions(); + expect(after).toBe(before); }); }); + +afterAll(after); diff --git a/src/packages/backend/nats/test/sync/estream.test.ts b/src/packages/backend/conat/test/sync/dstream-ephemeral.test.ts similarity index 71% rename from src/packages/backend/nats/test/sync/estream.test.ts rename to src/packages/backend/conat/test/sync/dstream-ephemeral.test.ts index 6d5104f29d..d281605527 100644 --- a/src/packages/backend/nats/test/sync/estream.test.ts +++ b/src/packages/backend/conat/test/sync/dstream-ephemeral.test.ts @@ -6,16 +6,25 @@ The first tests are initially similar to those for dstream.test.ts, but with DEVELOPMENT: -pnpm test estream.test.ts +pnpm test ./dstream-estream.test.ts */ +import { connect, before, after } from "@cocalc/backend/conat/test/setup"; import { createDstreamEphemeral as create } from "./util"; -import { dstream as createDstream0 } from "@cocalc/backend/nats/sync"; +import { dstream as createDstream0 } from "@cocalc/backend/conat/sync"; import { once } from "@cocalc/util/async-utils"; +beforeAll(before); + async function createDstream(opts) { - return await createDstream0({ ephemeral: true, leader: true, ...opts }); + return await createDstream0({ + noCache: true, + noAutosave: true, + ephemeral: true, + leader: true, + ...opts, + }); } describe("create a dstream and do some basic operations", () => { @@ -65,14 +74,11 @@ describe("create a dstream and do some basic operations", () => { describe("create two dstreams and observe sync between them", () => { const name = `test-${Math.random()}`; let s1, s2; + let client2; it("creates two distinct dstream objects s1 and s2 with the same name", async () => { - s1 = await createDstream({ name, noAutosave: true, noCache: true }); - s2 = await createDstream({ - name, - noAutosave: true, - noCache: true, - leader: false, - }); + client2 = connect(); + s1 = await createDstream({ name }); + s2 = await createDstream({ client: client2, name, leader: false }); // definitely distinct expect(s1 === s2).toBe(false); }); @@ -108,21 +114,50 @@ describe("create two dstreams and observe sync between them", () => { expect(s2.getAll()).toEqual(["hello", "hi from s2"]); }); - it("write to s1 and s2 and save at the same time and see some 'random choice' of order gets imposed by the server", async () => { + it("cleans up", () => { + s1.close(); + s2.close(); + client2.close(); + }); +}); + +describe("create two dstreams and test sync with parallel save", () => { + const name = `test-${Math.random()}`; + let s1, s2; + let client2; + it("creates two distinct dstream objects s1 and s2 with the same name", async () => { + client2 = connect(); + s1 = await createDstream({ name }); + s2 = await createDstream({ client: client2, name, leader: false }); + // definitely distinct + expect(s1 === s2).toBe(false); + }); + + it("write to s1 and s2 and save at the same time", async () => { s1.push("s1"); s2.push("s2"); // our changes are reflected locally - expect(s1.getAll()).toEqual(["hello", "hi from s2", "s1"]); - expect(s2.getAll()).toEqual(["hello", "hi from s2", "s2"]); + expect(s1.getAll()).toEqual(["s1"]); + expect(s2.getAll()).toEqual(["s2"]); // now kick off the two saves *in parallel* s1.save(); s2.save(); await once(s1, "change"); - while (s2.length != s1.length) { - await once(s2, "change"); + while (s2.length != 2 || s1.length != 2) { + if (s1.length > 2 || s2.length > 2) { + throw Error("bug"); + } + if (s2.length < 2) { + await once(s2, "change"); + } else if (s1.length < 2) { + await once(s1, "change"); + } } expect(s1.getAll()).toEqual(s2.getAll()); - expect(s1.getAll()).toEqual(["hello", "hi from s2", "s1", "s2"]); + }); + + it("cleans up", () => { + client2.close(); }); }); @@ -169,7 +204,7 @@ describe("get sequence number and time of message", () => { if (s.time(1) == null) { await once(s, "change"); } - expect(s.time(0).getTime()).toBeLessThan(s.time(1).getTime()); + expect(s.time(0).getTime()).toBeLessThanOrEqual(s.time(1).getTime()); }); }); @@ -195,7 +230,9 @@ describe("testing start_seq", () => { let t; it("it opens another copy of the stream, but starting with the last sequence number, so only one message", async () => { + const client = connect(); t = await createDstream({ + client, name, noAutosave: true, leader: false, @@ -227,8 +264,9 @@ describe("a little bit of a stress test", () => { s.push({ i }); } expect(s.length).toBe(count); - // NOTE: warning -- this is **MUCH SLOWER**, e.g., 10x slower, - // running under jest, hence why count is small. + // [ ] TODO rewrite this save to send everything in a single message + // which gets chunked, will we be much faster, then change the count + // above to 1000 or 10000. await s.save(); expect(s.length).toBe(count); }); @@ -249,38 +287,39 @@ describe("dstream typescript test", () => { }); }); -import { numSubscriptions } from "@cocalc/nats/client"; +describe("ensure there isn't a really obvious subscription leak", () => { + let client; -describe("ensure there are no NATS subscription leaks", () => { - // There is some slight slack at some point due to the clock stuff, - // inventory, etc. It is constant and small, whereas we allocate - // a large number of kv's in the test. - const SLACK = 4; + it("create a client, which initially has only one subscription (the inbox)", async () => { + client = connect(); + expect(client.numSubscriptions()).toBe(1); + }); - it("creates and closes many kv's and checks there is no leak", async () => { - const before = numSubscriptions(); - const COUNT = 20; + const count = 100; + it(`creates and closes ${count} streams and checks there is no leak`, async () => { + const before = client.numSubscriptions(); // create const a: any = []; - for (let i = 0; i < COUNT; i++) { + for (let i = 0; i < count; i++) { a[i] = await createDstream({ name: `${Math.random()}`, - noAutosave: true, }); } - for (let i = 0; i < COUNT; i++) { + for (let i = 0; i < count; i++) { await a[i].close(); } - const after = numSubscriptions(); - expect(Math.abs(after - before)).toBeLessThan(SLACK); + const after = client.numSubscriptions(); + expect(after).toBe(before); + + // also check count on server went down. + expect((await client.getSubscriptions()).size).toBe(before); }); - it("does another leak test, but with a set operation each time", async () => { - const before = numSubscriptions(); - const COUNT = 20; + it("does another leak test, but with a publish operation each time", async () => { + const before = client.numSubscriptions(); // create const a: any = []; - for (let i = 0; i < COUNT; i++) { + for (let i = 0; i < count; i++) { a[i] = await createDstream({ name: `${Math.random()}`, noAutosave: true, @@ -288,11 +327,12 @@ describe("ensure there are no NATS subscription leaks", () => { a[i].publish(i); await a[i].save(); } - for (let i = 0; i < COUNT; i++) { - await a[i].purge(); + for (let i = 0; i < count; i++) { await a[i].close(); } - const after = numSubscriptions(); - expect(Math.abs(after - before)).toBeLessThan(SLACK); + const after = client.numSubscriptions(); + expect(after).toBe(before); }); }); + +afterAll(after); diff --git a/src/packages/backend/nats/test/sync/dstream.test.ts b/src/packages/backend/conat/test/sync/dstream.test.ts similarity index 61% rename from src/packages/backend/nats/test/sync/dstream.test.ts rename to src/packages/backend/conat/test/sync/dstream.test.ts index 2154b98d48..a4f500eb39 100644 --- a/src/packages/backend/nats/test/sync/dstream.test.ts +++ b/src/packages/backend/conat/test/sync/dstream.test.ts @@ -1,15 +1,18 @@ /* -Testing basic ops with dsteam (distributed streams) +Testing basic ops with *persistent* dstreams. DEVELOPMENT: -pnpm test dstream.test.ts +pnpm test ./dstream.test.ts */ import { createDstream as create } from "./util"; -import { dstream as createDstream } from "@cocalc/backend/nats/sync"; +import { dstream as createDstream } from "@cocalc/backend/conat/sync"; import { once } from "@cocalc/util/async-utils"; +import { connect, before, after, wait } from "@cocalc/backend/conat/test/setup"; + +beforeAll(before); describe("create a dstream and do some basic operations", () => { let s; @@ -93,13 +96,15 @@ describe("create two dstreams and observe sync between them", () => { // now kick off the two saves *in parallel* s1.save(); s2.save(); - await once(s1, "change"); - if (s2.length != s1.length) { - await once(s2, "change"); - } + await wait({ + until: () => { + return s1.length == 4 && s2.length == 4; + }, + }); expect(s1.getAll()).toEqual(s2.getAll()); - // in fact s1,s2 is the order since we called s1.save first: - expect(s1.getAll()).toEqual(["hello", "hi from s2", "s1", "s2"]); + expect(new Set(s1.getAll())).toEqual( + new Set(["hello", "hi from s2", "s1", "s2"]), + ); }); }); @@ -155,7 +160,8 @@ describe("closing also saves by default, but not if autosave is off", () => { const name = `test-${Math.random()}`; it("creates stream and write a message", async () => { - s = await createDstream({ name, noAutosave: false /* the default */ }); + // noAutosave: false is the default: + s = await createDstream({ name, noAutosave: false }); s.push(389); }); @@ -215,6 +221,36 @@ describe("testing start_seq", () => { expect(s.getAll()).toEqual([2, 3]); expect(s.start_seq).toEqual(seq[1]); }); + + it("a bigger example involving loading older messages", async () => { + for (let i = 4; i < 100; i++) { + s.push(i); + } + await s.save(); + const last = s.seq(s.length - 1); + const mid = s.seq(s.length - 50); + await s.close(); + s = await createDstream({ + name, + noAutosave: true, + start_seq: last, + }); + expect(s.length).toBe(1); + expect(s.getAll()).toEqual([99]); + expect(s.start_seq).toEqual(last); + + await s.load({ start_seq: mid }); + expect(s.length).toEqual(50); + expect(s.start_seq).toEqual(mid); + for (let i = 0; i < 50; i++) { + expect(s.get(i)).toBe(i + 50); + } + + await s.load({ start_seq: 0 }); + for (let i = 0; i < 99; i++) { + expect(s.get(i)).toBe(i + 1); + } + }); }); describe("a little bit of a stress test", () => { @@ -252,38 +288,39 @@ describe("dstream typescript test", () => { }); }); -import { numSubscriptions } from "@cocalc/nats/client"; +describe("ensure there isn't a really obvious subscription leak", () => { + let client; -describe("ensure there are no NATS subscription leaks", () => { - // There is some slight slack at some point due to the clock stuff, - // inventory, etc. It is constant and small, whereas we allocate - // a large number of kv's in the test. - const SLACK = 4; + it("create a client, which initially has only one subscription (the inbox)", async () => { + client = connect(); + expect(client.numSubscriptions()).toBe(1); + }); - it("creates and closes many kv's and checks there is no leak", async () => { - const before = numSubscriptions(); - const COUNT = 20; + const count = 100; + it(`creates and closes ${count} streams and checks there is no leak`, async () => { + const before = client.numSubscriptions(); // create const a: any = []; - for (let i = 0; i < COUNT; i++) { + for (let i = 0; i < count; i++) { a[i] = await createDstream({ name: `${Math.random()}`, - noAutosave: true, }); } - for (let i = 0; i < COUNT; i++) { + for (let i = 0; i < count; i++) { await a[i].close(); } - const after = numSubscriptions(); - expect(Math.abs(after - before)).toBeLessThan(SLACK); + const after = client.numSubscriptions(); + expect(after).toBe(before); + + // also check count on server went down. + expect((await client.getSubscriptions()).size).toBe(before); }); - it("does another leak test, but with a set operation each time", async () => { - const before = numSubscriptions(); - const COUNT = 20; + it("does another leak test, but with a publish operation each time", async () => { + const before = client.numSubscriptions(); // create const a: any = []; - for (let i = 0; i < COUNT; i++) { + for (let i = 0; i < count; i++) { a[i] = await createDstream({ name: `${Math.random()}`, noAutosave: true, @@ -291,11 +328,90 @@ describe("ensure there are no NATS subscription leaks", () => { a[i].publish(i); await a[i].save(); } - for (let i = 0; i < COUNT; i++) { - await a[i].purge(); + for (let i = 0; i < count; i++) { await a[i].close(); } - const after = numSubscriptions(); - expect(Math.abs(after - before)).toBeLessThan(SLACK); + const after = client.numSubscriptions(); + expect(after).toBe(before); }); }); + +describe("test delete of messages from stream", () => { + let client1, client2, s1, s2; + const name = "test-delete"; + it("create two clients", async () => { + client1 = connect(); + client2 = connect(); + s1 = await createDstream({ + client: client1, + name, + noAutosave: true, + noCache: true, + }); + s2 = await createDstream({ + client: client2, + name, + noAutosave: true, + noCache: true, + }); + }); + + it("writes message one, confirm seen by other, then delete and confirm works", async () => { + s1.push("hello"); + await s1.save(); + await wait({ until: () => s2.length > 0 }); + s1.delete({ all: true }); + await wait({ until: () => s2.length == 0 && s1.length == 0 }); + }); + + it("same delete test as above but with a few more items and delete on s2 instead", async () => { + for (let i = 0; i < 10; i++) { + s1.push(i); + } + await s1.save(); + await wait({ until: () => s2.length == 10 }); + s2.delete({ all: true }); + await wait({ until: () => s2.length == 0 && s1.length == 0 }); + }); + + it("delete specific index", async () => { + s1.push("x", "y", "z"); + await s1.save(); + await wait({ until: () => s2.length == 3 }); + s2.delete({ last_index: 1 }); + await wait({ until: () => s2.length == 1 && s1.length == 1 }); + expect(s1.get()).toEqual(["z"]); + }); + + it("delete specific seq number", async () => { + s1.push("x", "y"); + await s1.save(); + expect(s1.get()).toEqual(["z", "x", "y"]); + const seq = s1.seq(1); + const { seqs } = await s1.delete({ seq }); + expect(seqs).toEqual([seq]); + await wait({ until: () => s2.length == 2 && s1.length == 2 }); + expect(s1.get()).toEqual(["z", "y"]); + }); + + it("delete up to a sequence number", async () => { + s1.push("x", "y"); + await s1.save(); + expect(s1.get()).toEqual(["z", "y", "x", "y"]); + const seq = s1.seq(1); + const { seqs } = await s1.delete({ last_seq: seq }); + expect(seqs.length).toBe(2); + expect(seqs[1]).toBe(seq); + expect(s1.get()).toEqual(["x", "y"]); + }); + + it("delete specific key", async () => { + s1.stream.setKv("my-key", 5); + s1.delete({ key: "my-key" }); + await s1.save(); + expect(s1.stream.getKv("my-key")).toBe(undefined); + expect(s2.stream.getKv("my-key")).toBe(undefined); + }); +}); + +afterAll(after); diff --git a/src/packages/backend/conat/test/sync/headers.test.ts b/src/packages/backend/conat/test/sync/headers.test.ts new file mode 100644 index 0000000000..4788c5caf6 --- /dev/null +++ b/src/packages/backend/conat/test/sync/headers.test.ts @@ -0,0 +1,80 @@ +/* +Test using user-defined headers with kv and stream. + +DEVELOPMENT: + +pnpm test ./headers.test.ts +*/ + +import { dstream, dkv } from "@cocalc/backend/conat/sync"; +import { once } from "@cocalc/util/async-utils"; +import { before, after, wait } from "@cocalc/backend/conat/test/setup"; + +beforeAll(before); + +describe("test headers with a dstream", () => { + let s; + const name = `${Math.random()}`; + it("creates a dstream and writes a value without a header", async () => { + s = await dstream({ name }); + expect(s.headers(s.length - 1)).toBe(undefined); + s.publish("x"); + await once(s, "change"); + const h = s.headers(s.length - 1); + for (const k in h ?? {}) { + if (!k.startsWith("Nats-") && !k.startsWith("CoCalc-")) { + throw Error("headers must start with Nats- or CoCalc-"); + } + } + }); + + it("writes a value with a header", async () => { + s.publish("y", { headers: { my: "header" } }); + // header isn't visible until ack'd by server + // NOTE: not optimal but this is what is implemented and documented! + expect(s.headers(s.length - 1)).toEqual(undefined); + await wait({ until: () => s.headers(s.length - 1) != null }); + expect(s.headers(s.length - 1)).toEqual( + expect.objectContaining({ my: "header" }), + ); + }); + + it("header still there", async () => { + await s.close(); + s = await dstream({ name }); + expect(s.headers(s.length - 1)).toEqual( + expect.objectContaining({ my: "header" }), + ); + }); + + it("clean up", async () => { + await s.delete({ all: true }); + }); +}); + +describe("test headers with a dkv", () => { + let s; + const name = `${Math.random()}`; + it("creates a dkv and writes a value without a header", async () => { + s = await dkv({ name }); + s.set("x", 10); + await once(s, "change"); + const h = s.headers("x"); + for (const k in h ?? {}) { + if (!k.startsWith("Nats-") && !k.startsWith("CoCalc-")) { + throw Error("headers must start with Nats- or CoCalc-"); + } + } + }); + + it("writes a value with a header - defined even before saving", async () => { + s.set("y", 20, { headers: { my: "header" } }); + expect(s.headers("y")).toEqual(expect.objectContaining({ my: "header" })); + }); + + it("clean up", async () => { + await s.clear(); + }); +}); + +afterAll(after); diff --git a/src/packages/backend/conat/test/sync/limits.test.ts b/src/packages/backend/conat/test/sync/limits.test.ts new file mode 100644 index 0000000000..b8014d624d --- /dev/null +++ b/src/packages/backend/conat/test/sync/limits.test.ts @@ -0,0 +1,334 @@ +/* +Testing the limits. + +DEVELOPMENT: + +pnpm test ./limits.test.ts + +*/ + +import { dkv as createDkv } from "@cocalc/backend/conat/sync"; +import { dstream as createDstream } from "@cocalc/backend/conat/sync"; +import { delay } from "awaiting"; +import { once } from "@cocalc/util/async-utils"; +import { before, after, wait, connect } from "@cocalc/backend/conat/test/setup"; + +beforeAll(before); + +describe("create a dkv with limit on the total number of keys, and confirm auto-delete works", () => { + let kv; + const name = `test-${Math.random()}`; + + it("creates the dkv", async () => { + kv = await createDkv({ name, config: { max_msgs: 2 } }); + expect(kv.getAll()).toEqual({}); + }); + + it("adds 2 keys, then a third, and sees first is gone", async () => { + kv.a = 10; + kv.b = 20; + expect(kv.a).toEqual(10); + expect(kv.b).toEqual(20); + kv.c = 30; + expect(kv.c).toEqual(30); + // have to wait until it's all saved and acknowledged before enforcing limit + if (!kv.isStable()) { + await once(kv, "stable"); + } + // next change is the enforcement happening + if (kv.has("a")) { + await once(kv, "change", 500); + } + // and confirm it + expect(kv.a).toBe(undefined); + expect(kv.getAll()).toEqual({ b: 20, c: 30 }); + }); + + it("closes the kv", async () => { + await kv.clear(); + await kv.close(); + }); +}); + +describe("create a dkv with limit on age of keys, and confirm auto-delete works", () => { + let kv; + const name = `test-${Math.random()}`; + + it("creates the dkv", async () => { + kv = await createDkv({ name, config: { max_age: 50 } }); + expect(kv.getAll()).toEqual({}); + }); + + it("adds 2 keys, then a third, and sees first two are gone due to aging out", async () => { + kv.a = 10; + kv.b = 20; + expect(kv.a).toEqual(10); + expect(kv.b).toEqual(20); + await kv.save(); + await kv.config(); + await delay(50); + await kv.config(); + await delay(10); + expect(kv.has("a")).toBe(false); + expect(kv.has("b")).toBe(false); + }); + + it("closes the kv", async () => { + await kv.clear(); + await kv.close(); + }); +}); + +describe("create a dkv with limit on total bytes of keys, and confirm auto-delete works", () => { + let kv; + const name = `test-${Math.random()}`; + + it("creates the dkv", async () => { + kv = await createDkv({ name, config: { max_bytes: 100 } }); + expect(kv.getAll()).toEqual({}); + }); + + it("adds a key, then a second, and sees first one is gone due to bytes", async () => { + kv.a = "x".repeat(50); + kv.b = "x".repeat(55); + expect(kv.getAll()).toEqual({ a: "x".repeat(50), b: "x".repeat(55) }); + await kv.save(); + expect(kv.has("b")).toBe(true); + await wait({ + until: async () => { + await kv.config(); + return !kv.has("a"); + }, + }); + expect(kv.getAll()).toEqual({ b: "x".repeat(55) }); + }); + + it("closes the kv", async () => { + await kv.clear(); + await kv.close(); + }); +}); + +describe("create a dkv with limit on max_msg_size, and confirm writing small messages works but writing a big one result in a 'reject' event", () => { + let kv; + const name = `test-${Math.random()}`; + + it("creates the dkv", async () => { + kv = await createDkv({ name, config: { max_msg_size: 100 } }); + expect(kv.getAll()).toEqual({}); + }); + + it("adds a key, then a second big one results in a 'reject' event", async () => { + const rejects: { key: string; value: string }[] = []; + kv.once("reject", (x) => { + rejects.push(x); + }); + kv.a = "x".repeat(50); + await kv.save(); + kv.b = "x".repeat(150); + await kv.save(); + expect(rejects).toEqual([{ key: "b", value: "x".repeat(150) }]); + expect(kv.has("b")).toBe(false); + }); + + it("closes the kv", async () => { + await kv.clear(); + await kv.close(); + }); +}); + +describe("create a dstream with limit on the total number of messages, and confirm max_msgs, max_age works", () => { + let s, s2; + const name = `test-${Math.random()}`; + + it("creates the dstream and another with a different client", async () => { + s = await createDstream({ name, config: { max_msgs: 2 } }); + s2 = await createDstream({ + client: connect(), + name, + config: { max_msgs: 2 }, + noCache: true, + }); + expect(s.get()).toEqual([]); + expect((await s.config()).max_msgs).toBe(2); + expect((await s2.config()).max_msgs).toBe(2); + }); + + it("push 2 messages, then a third, and see first is gone and that this is reflected on both clients", async () => { + expect((await s.config()).max_msgs).toBe(2); + expect((await s2.config()).max_msgs).toBe(2); + s.push("a"); + s.push("b"); + await wait({ until: () => s.length == 2 && s2.length == 2 }); + expect(s2.get()).toEqual(["a", "b"]); + s.push("c"); + await wait({ + until: () => + s.get(0) != "a" && + s.get(1) == "c" && + s2.get(0) != "a" && + s2.get(1) == "c", + }); + expect(s.getAll()).toEqual(["b", "c"]); + expect(s2.getAll()).toEqual(["b", "c"]); + + // also check limits ar enforced if we close, then open new one: + await s.close(); + s = await createDstream({ name, config: { max_msgs: 2 } }); + expect(s.getAll()).toEqual(["b", "c"]); + + await s.config({ max_msgs: -1 }); + }); + + it("verifies that max_age works", async () => { + await delay(30); + s.push("new"); + s.config({ max_age: 25 }); // anything older than 25ms should be deleted + await wait({ until: () => s.length == 1 }); + expect(s.getAll()).toEqual(["new"]); + await s.config({ max_age: -1 }); + }); + + it("verifies that ttl works", async () => { + const conf = await s.config(); + expect(conf.allow_msg_ttl).toBe(false); + const conf2 = await s.config({ max_age: -1, allow_msg_ttl: true }); + expect(conf2.allow_msg_ttl).toBe(true); + + s.publish("ttl-message", { ttl: 50 }); + await s.save(); + await wait({ + until: async () => { + await s.config(); + return s.length == 1; + }, + }); + expect(s.get()).toEqual(["new"]); + }); + + it("verifies that max_bytes works -- publishing something too large causes everything to end up gone", async () => { + const conf = await s.config({ max_bytes: 100 }); + expect(conf.max_bytes).toBe(100); + s.publish("x".repeat(1000)); + await s.config(); + await wait({ until: () => s.length == 0 }); + expect(s.length).toBe(0); + }); + + it("max_bytes -- publish something then another thing that causes the first to get deleted", async () => { + s.publish("x".repeat(75)); + s.publish("y".repeat(90)); + await wait({ + until: async () => { + await s.config(); + return s.length == 1; + }, + }); + expect(s.get()).toEqual(["y".repeat(90)]); + await s.config({ max_bytes: -1 }); + }); + + it("verifies that max_msg_size rejects messages that are too big", async () => { + await s.config({ max_msg_size: 100 }); + expect((await s.config()).max_msg_size).toBe(100); + s.publish("x".repeat(70)); + await expect(async () => { + await s.stream.publish("x".repeat(150)); + }).rejects.toThrowError("max_msg_size"); + await s.config({ max_msg_size: 200 }); + s.publish("x".repeat(150)); + await s.config({ max_msg_size: -1 }); + expect((await s.config()).max_msg_size).toBe(-1); + }); + + it("closes the stream", async () => { + await s.close(); + await s2.close(); + }); +}); + +describe("create a dstream with limit on max_age, and confirm auto-delete works", () => { + let s; + const name = `test-${Math.random()}`; + + it("creates the dstream", async () => { + s = await createDstream({ name, config: { max_age: 50 } }); + }); + + it("push a message, then another and see first disappears", async () => { + s.push({ a: 10 }); + await delay(75); + s.push({ b: 20 }); + expect(s.get()).toEqual([{ a: 10 }, { b: 20 }]); + await wait({ + until: async () => { + await s.config(); + return s.length == 1; + }, + }); + expect(s.getAll()).toEqual([{ b: 20 }]); + }); + + it("closes the stream", async () => { + await s.delete({ all: true }); + await s.close(); + }); +}); + +describe("create a dstream with limit on max_bytes, and confirm auto-delete works", () => { + let s; + const name = `test-${Math.random()}`; + + it("creates the dstream", async () => { + s = await createDstream({ name, config: { max_bytes: 50 } }); + }); + + it("push a message, then another and see first disappears", async () => { + s.push("x".repeat(40)); + s.push("x".repeat(45)); + s.push("x"); + if (!s.isStable()) { + await once(s, "stable"); + } + expect(s.getAll()).toEqual(["x".repeat(45), "x"]); + }); + + it("closes the stream", async () => { + await s.delete({ all: true }); + await s.close(); + }); +}); + +describe("create a dstream with limit on max_msg_size, and confirm auto-delete works", () => { + let s; + const name = `test-${Math.random()}`; + + it("creates the dstream", async () => { + s = await createDstream({ name, config: { max_msg_size: 50 } }); + }); + + it("push a message, then another and see first disappears", async () => { + const rejects: any[] = []; + s.on("reject", ({ mesg }) => { + rejects.push(mesg); + }); + s.push("x".repeat(40)); + s.push("y".repeat(60)); // silently vanishes (well a reject event is emitted) + s.push("x"); + await wait({ + until: async () => { + await s.config(); + return s.length == 2; + }, + }); + expect(s.getAll()).toEqual(["x".repeat(40), "x"]); + expect(rejects).toEqual(["y".repeat(60)]); + }); + + it("closes the stream", async () => { + await s.delete({ all: true }); + await s.close(); + }); +}); + +afterAll(after); diff --git a/src/packages/backend/nats/test/sync/open-files.test.ts b/src/packages/backend/conat/test/sync/open-files.test.ts similarity index 95% rename from src/packages/backend/nats/test/sync/open-files.test.ts rename to src/packages/backend/conat/test/sync/open-files.test.ts index 47e05c6171..90b4fd0e1a 100644 --- a/src/packages/backend/nats/test/sync/open-files.test.ts +++ b/src/packages/backend/conat/test/sync/open-files.test.ts @@ -8,13 +8,16 @@ to open so they can fulfill their backend responsibilities: DEVELOPMENT: -pnpm exec jest --forceExit "open-files.test.ts" +pnpm test ./open-files.test.ts */ -import { openFiles as createOpenFiles } from "@cocalc/backend/nats/sync"; +import { openFiles as createOpenFiles } from "@cocalc/backend/conat/sync"; import { once } from "@cocalc/util/async-utils"; import { delay } from "awaiting"; +import { before, after } from "@cocalc/backend/conat/test/setup"; + +beforeAll(before); const project_id = "00000000-0000-4000-8000-000000000000"; async function create() { @@ -130,3 +133,5 @@ describe("create open file tracker and do some basic operations", () => { expect(o2.get(file2).error).toBe(undefined); }); }); + +afterAll(after); diff --git a/src/packages/backend/nats/test/sync/util.ts b/src/packages/backend/conat/test/sync/util.ts similarity index 85% rename from src/packages/backend/nats/test/sync/util.ts rename to src/packages/backend/conat/test/sync/util.ts index 9eed813594..b81bca5036 100644 --- a/src/packages/backend/nats/test/sync/util.ts +++ b/src/packages/backend/conat/test/sync/util.ts @@ -1,4 +1,4 @@ -import { dstream } from "@cocalc/backend/nats/sync"; +import { dstream } from "@cocalc/backend/conat/sync"; export async function createDstream() { const name = `test-${Math.random()}`; diff --git a/src/packages/backend/nats/test/time.test.ts b/src/packages/backend/conat/test/time.test.ts similarity index 81% rename from src/packages/backend/nats/test/time.test.ts rename to src/packages/backend/conat/test/time.test.ts index 3386e79a76..ddd454fd07 100644 --- a/src/packages/backend/nats/test/time.test.ts +++ b/src/packages/backend/conat/test/time.test.ts @@ -4,10 +4,11 @@ DEVELOPMENT: pnpm test ./time.test.ts */ -// this sets client -import "@cocalc/backend/nats"; +import { timeClient, createTimeService } from "@cocalc/conat/service/time"; +import time, { getSkew } from "@cocalc/conat/time"; +import { before, after } from "@cocalc/backend/conat/test/setup"; -import time, { getSkew } from "@cocalc/nats/time"; +beforeAll(before); describe("get time from nats", () => { it("tries to get the time before the skew, so it is not initialized yet", () => { @@ -29,14 +30,13 @@ describe("get time from nats", () => { }); }); -import { timeClient, createTimeService } from "@cocalc/nats/service/time"; - describe("start the time server and client and test that it works", () => { it("starts the time server and queries it", async () => { - await import("@cocalc/backend/nats"); createTimeService(); const client = timeClient(); const t = await client.time(); expect(Math.abs(Date.now() - t)).toBeLessThan(200); }); }); + +afterAll(after); diff --git a/src/packages/backend/conat/test/util.ts b/src/packages/backend/conat/test/util.ts new file mode 100644 index 0000000000..dee27e9c0c --- /dev/null +++ b/src/packages/backend/conat/test/util.ts @@ -0,0 +1,26 @@ +import { createServer } from "http"; +import { delay } from "awaiting"; + +export async function getPort(): Promise { + return new Promise((resolve, reject) => { + const server = createServer(); + server.listen(0, () => { + const address = server.address(); + if (typeof address === "object" && address !== null) { + const port = address.port; + server.close(() => resolve(port)); + } else { + reject(new Error("Failed to get port")); + } + }); + server.on("error", reject); + }); +} + +export async function wait({ until }: { until: Function }) { + let d = 5; + while (!(await until())) { + await delay(d); + d = Math.min(1000, d * 1.2); + } +} diff --git a/src/packages/backend/data.ts b/src/packages/backend/data.ts index 5151004969..4543a2dfa9 100644 --- a/src/packages/backend/data.ts +++ b/src/packages/backend/data.ts @@ -29,6 +29,8 @@ import { ConnectionOptions } from "node:tls"; import { existsSync, mkdirSync, readFileSync } from "fs"; import { isEmpty } from "lodash"; import { hostname } from "os"; +import basePath from "@cocalc/backend/base-path"; +import port from "@cocalc/backend/port"; function determineRootFromPath(): string { const cur = __dirname; @@ -179,6 +181,15 @@ export const pgdatabase: string = export const projects: string = process.env.PROJECTS ?? join(data, "projects", "[project_id]"); export const secrets: string = process.env.SECRETS ?? join(data, "secrets"); + +export const syncFiles = { + // Persistent local storage of streams and kv's as sqlite3 files + local: process.env.COCALC_SYNC ?? join(data, "sync"), + // Archived storage of streams and kv's as sqlite3 files, if set. + // This could be a gcsfuse mountpoint. + archive: process.env.COCALC_SYNC_ARCHIVE ?? "", +}; + // if the directory secrets doesn't exist, create it (sync, during this load): if (!existsSync(secrets)) { try { @@ -227,6 +238,21 @@ export function setNatsServer(server) { natsWebsocketServer = `ws://${natsServer}:${natsPorts.ws}`; } +// dev mode defaults +export let conatServer = process.env.CONAT_SERVER ?? `http://localhost:${port}`; +export let conatPath = process.env.CONAT_PATH ?? join(basePath, "conat"); +export let conatPassword = ""; + +export function setConatServer(server: string) { + conatServer = server; +} +export function setConatPath(path: string) { + conatPath = path; +} +export function setConatPassword(password: string) { + conatPassword = password; +} + // Password used to connect to the nats server export let natsPassword = ""; export const natsPasswordPath = join(secrets, "nats_password"); @@ -242,24 +268,6 @@ export const natsBackup = export const natsUser = "cocalc"; -// Secrets used for cryptography between the auth callout service and -// and the nats server. The *secret keys* are only needed by -// the auth callout service, and the corresponding public keys are -// only needed by the nats server, but right now (and since password is already -// known to both), we are just making the private keys available to both. -// These keys make it so if somebody tries to listen in on nats traffic -// between the server and auth callout, they can't impersonate users, etc. -// In particular: -// - nseed = account key - used by server to sign message to the auth callout -// - xseed = curve key - used by auth callout to encrypt response -// These are both arbitrary elliptic curve ed25519 secrets (nkeys), -// which are the "seed" generated using https://www.npmjs.com/package/@nats-io/nkeys -// or https://github.com/nats-io/nkeys?tab=readme-ov-file#installation -// E.g., -// ~/cocalc/src/data/secrets$ go get github.com/nats-io/nkeys -// ~/cocalc/src/data/secrets$ nk -gen account > nats_auth_nseed -// ~/cocalc/src/data/secrets$ nk -gen curve > nats_auth_xseed - export let natsAuthCalloutNSeed = ""; export const natsAuthCalloutNSeedPath = join(secrets, "nats_auth_nseed"); try { diff --git a/src/packages/backend/execute-code.test.ts b/src/packages/backend/execute-code.test.ts index d8a3b1db4f..ca96deb605 100644 --- a/src/packages/backend/execute-code.test.ts +++ b/src/packages/backend/execute-code.test.ts @@ -268,7 +268,9 @@ describe("async", () => { // Instead of taking 5+ seconds to test some polling implementation, // they should have a parameter to change the polling interval, so the // test can be much quicker. -- WS - it( + // This test also screws up running multiple tests in parallel. + // ** HENCE SKIPPING THIS!!** + it.skip( "long running async job", async () => { const c = await executeCode({ diff --git a/src/packages/backend/misc/ensure-containing-directory-exists.ts b/src/packages/backend/misc/ensure-containing-directory-exists.ts index 322dbd67a9..75033abbc7 100644 --- a/src/packages/backend/misc/ensure-containing-directory-exists.ts +++ b/src/packages/backend/misc/ensure-containing-directory-exists.ts @@ -7,20 +7,23 @@ import abspath from "./abspath"; // Make sure that that the directory containing the file indicated by // the path exists and has restrictive permissions. export default async function ensureContainingDirectoryExists( - path: string + path: string, ): Promise { path = abspath(path); const containingDirectory = path_split(path).head; // containing path if (!containingDirectory) return; + await ensureDirectoryExists(containingDirectory); +} +export async function ensureDirectoryExists(path: string): Promise { try { - await access(containingDirectory, fsc.R_OK | fsc.W_OK); + await access(path, fsc.R_OK | fsc.W_OK); // it exists, yeah! return; } catch (err) { // Doesn't exist, so create, via recursion: try { - await mkdir(containingDirectory, { mode: 0o700, recursive: true }); + await mkdir(path, { mode: 0o700, recursive: true }); } catch (err) { if (err?.code === "EEXIST") { // no problem -- it exists. diff --git a/src/packages/backend/nats/cli.ts b/src/packages/backend/nats/cli.ts deleted file mode 100644 index 154eb5dc2e..0000000000 --- a/src/packages/backend/nats/cli.ts +++ /dev/null @@ -1,55 +0,0 @@ -/* -Run an interactive bash terminal, but with the nats and nsc command -available and configured to work with full permissions. This is -useful for interactively using those command to inspect the state -of the system, learning how to do something, etc. -*/ - -import { data, natsPassword, natsUser } from "@cocalc/backend/data"; -import { join } from "path"; -import { spawnSync } from "node:child_process"; -import { natsServerUrl } from "./conf"; - -const natsBin = join(data, "nats", "bin"); - -export function natsCoCalcUserEnv({ user = natsUser }: { user?: string } = {}) { - return { - NATS_URL: natsServerUrl, - NATS_PASSWORD: natsPassword, - NATS_USER: user ?? natsUser, - PATH: `${natsBin}:${process.env.PATH}`, - }; -} - -function params({ user }) { - return { - command: "bash", - args: ["--norc", "--noprofile"], - env: { - ...natsCoCalcUserEnv({ user }), - HOME: process.env.HOME, - TERM: process.env.TERM, - PS1: `\\w [nats-${user}]$ `, - }, - }; -} - -// echo; echo '# Use CoCalc config of NATS (nats and nsc) via this subshell:'; echo; NATS_URL=nats://${COCALC_NATS_SERVER:=localhost}:${COCALC_NATS_PORT:=4222} XDG_DATA_HOME=${COCALC_ROOT:=$INIT_CWD}/data XDG_CONFIG_HOME=${COCALC_ROOT:=$INIT_CWD}/data PATH=${COCALC_ROOT:=$INIT_CWD}/data/nats/bin:$PATH bash - -// the supported users here are natsUser and 'sys'. - -export function main({ user = natsUser }: { user?: string } = {}) { - let { command, args, env } = params({ user }); - console.log("# Use CoCalc config of NATS (nats and nsc) via this subshell:"); - console.log( - JSON.stringify( - { ...env, NATS_PASSWORD: "xxx", PATH: natsBin + ":..." }, - undefined, - 2, - ), - ); - spawnSync(command, args, { - env: { ...env, PATH: `${natsBin}:${process.env.PATH}` }, - stdio: "inherit", - }); -} diff --git a/src/packages/backend/nats/conf.ts b/src/packages/backend/nats/conf.ts deleted file mode 100644 index 3cd8e07d62..0000000000 --- a/src/packages/backend/nats/conf.ts +++ /dev/null @@ -1,181 +0,0 @@ -/* -Configure nats-server, i.e., generate configuration files. - -node -e "require('@cocalc/backend/nats/conf').main()" - - - -NOTES: - -- I tried very hard to use NKEYS and/or JWT, but it's -just not compatible with auth callout, and auth callout -is required for scalability, given my use case. That's -why there is an explicit password. -*/ - -import { pathExists } from "fs-extra"; -import { - nats, - natsPorts, - natsServer, - natsPassword, - natsPasswordPath, - setNatsPassword, - natsUser, - natsAuthCalloutNSeed, - setNatsAuthCalloutNSeed, - natsAuthCalloutNSeedPath, - natsAuthCalloutXSeed, - setNatsAuthCalloutXSeed, - natsAuthCalloutXSeedPath, - natsClusterName, - natsServerName, -} from "@cocalc/backend/data"; -import { join } from "path"; -import getLogger from "@cocalc/backend/logger"; -import { writeFile } from "fs/promises"; -import { REMEMBER_ME_COOKIE_NAME } from "@cocalc/backend/auth/cookie-names"; -import { executeCode } from "@cocalc/backend/execute-code"; -import { createPrivateKey, publicKey } from "./nkeys"; - -const logger = getLogger("backend:nats:install"); - -// this is assumed in cocalc/src/package.json: -const confPath = join(nats, "server.conf"); - -// for now for local dev: -export const natsServerUrl = `nats://${natsServer}:${natsPorts.server}`; -export const natsAccountName = "cocalc"; - -// I tested and if you make this bigger, then smaller, it does NOT break -// large jetstream messages created when it was bigger. So it should be -// safe to adjust. -// 1MB is the global NATS default -// const max_payload = "1MB"; -// Note that 64MB is the max allowed. -const max_payload = process.env.COCALC_NATS_MAX_PAYLOAD ?? "8MB"; -// However, using anything big means messages can take longer to send -// messages and risk timing out. I've also implemented chunking, -// *everywhere* it is needed. -// Clients do NOT cache the payload size so if you make it big, then make it -// small, that does not require restarting everything. - -export async function configureNatsServer() { - logger.debug("configureNatsServer", { confPath, natsPorts }); - if (await pathExists(confPath)) { - logger.debug( - `configureNatsServer: target conf file '${confPath}' already exists so updating it`, - ); - } - - let ISSUER_NKEY, ISSUER_XKEY, PASSWORD; - if (!natsPassword) { - PASSWORD = createPrivateKey("user"); - setNatsPassword(PASSWORD); - await writeFile(natsPasswordPath, PASSWORD); - } else { - PASSWORD = natsPassword; - } - if (!natsAuthCalloutNSeed) { - const nseed = createPrivateKey("account"); - setNatsAuthCalloutNSeed(nseed); - await writeFile(natsAuthCalloutNSeedPath, nseed); - ISSUER_NKEY = publicKey(nseed); - } else { - ISSUER_NKEY = publicKey(natsAuthCalloutNSeed); - } - if (!natsAuthCalloutXSeed) { - const xseed = createPrivateKey("curve"); - setNatsAuthCalloutXSeed(xseed); - await writeFile(natsAuthCalloutXSeedPath, xseed); - ISSUER_XKEY = publicKey(xseed); - } else { - ISSUER_XKEY = publicKey(natsAuthCalloutXSeed); - } - - // problem with server_name -- this line - // const user = fromPublic(userNkey); - // in server/nats/auth/index.ts fails. - - await writeFile( - confPath, - ` -# Amazingly, just setting the server_name breaks auth callout, -# with it saying the nkey is invalid. This may require a lot -# "reverse engineering" work. -# server_name: ${natsServerName} -listen: ${natsServer}:${natsPorts.server} - -max_payload:${max_payload} - -jetstream: enabled - -jetstream { - store_dir: data/nats/jetstream -} - -websocket { - listen: "${natsServer}:${natsPorts.ws}" - no_tls: true - token_cookie: "${REMEMBER_ME_COOKIE_NAME}" -} - -# This does not work yet. I guess a single node cluster -# isn't possible. Reload also isn't -- the only way we ever -# grow to multiple nodes will require restarts. -# cluster { -# name: "${natsClusterName}" -# listen: "${natsServer}:${natsPorts.cluster}" -# routes: ["${natsServer}:${natsPorts.cluster}"] -# compression: { -# mode: s2_auto -# } -# } - -accounts { - COCALC { - users: [ - { user:"${natsUser}", password:"${PASSWORD}" } - ], - jetstream: { - max_mem: -1 - max_file: -1 - max_streams: -1 - max_consumers: -1 - } - } - SYS { - users: [ - { user:"sys", password:"${PASSWORD}" } - ], - } -} -system_account: SYS - -max_control_line 64KB - -authorization { - # slightly longer timeout (than 2s default): probably not necessary, but db - # queries involved (usually takes 50ms - 250ms) - timeout: 7.5 - auth_callout { - issuer: ${ISSUER_NKEY} - xkey: ${ISSUER_XKEY} - users: [ ${natsUser}, sys ] - account: COCALC - } -} - -`, - ); - - // Ensure that ONLY we can read/write the nats config directory, - // which contains highly sensitive information. This could matter - // on cocalc-docker style systems. - await executeCode({ command: "chmod", args: ["og-rwx", nats] }); -} - -export async function main() { - await configureNatsServer(); - process.exit(0); -} diff --git a/src/packages/backend/nats/env.ts b/src/packages/backend/nats/env.ts deleted file mode 100644 index 598aac94b3..0000000000 --- a/src/packages/backend/nats/env.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { sha1 } from "@cocalc/backend/sha1"; -import { JSONCodec } from "nats"; -import { getConnection } from "./index"; - -export async function getEnv() { - const jc = JSONCodec(); - const nc = await getConnection(); - return { nc, jc, sha1 }; -} diff --git a/src/packages/backend/nats/install.ts b/src/packages/backend/nats/install.ts deleted file mode 100644 index 500ce77306..0000000000 --- a/src/packages/backend/nats/install.ts +++ /dev/null @@ -1,122 +0,0 @@ -/* -Ensure installed specific correct versions of the following -three GO programs in {data}/nats/bin on this server, correct -for this architecture: - - - nats - - nats-server - -We assume curl and python3 are installed. - -DEVELOPMENT: - -Installation happens automatically, e.g,. when you do 'pnpm nats-server' or -start the hub via 'pnpm hub'. However, you can explicitly do -an install as follows: - -~/cocalc/src/packages/backend/nats$ DEBUG=cocalc:* DEBUG_CONSOLE=yes node -Welcome to Node.js v18.17.1. -Type ".help" for more information. - -Install latest tested version of nats-server and nats cli: - - > await require('@cocalc/backend/nats/install').install() - -Installing just the server: - - > await require('@cocalc/backend/nats/install').installNatsServer() -*/ - -import { nats } from "@cocalc/backend/data"; -import { join } from "path"; -import { pathExists } from "fs-extra"; -import { executeCode } from "@cocalc/backend/execute-code"; -import getLogger from "@cocalc/backend/logger"; - -const VERSIONS = { - // https://github.com/nats-io/nats-server/releases - "nats-server": "v2.11.0", - // https://github.com/nats-io/natscli/releases - nats: "v0.2.0", -}; - -export const bin = join(nats, "bin"); -const logger = getLogger("backend:nats:install"); - -export async function install(noUpgrade = false) { - logger.debug("ensure nats binaries installed in ", bin); - - if (!(await pathExists(bin))) { - await executeCode({ command: "mkdir", args: ["-p", bin] }); - } - - await Promise.all([ - installNatsServer(noUpgrade), - installNatsCli(noUpgrade), - ]); -} - -// call often, but runs at most once and ONLY does something if -// there is no binary i.e., it doesn't upgrade. -let installed = false; -export async function ensureInstalled() { - if (installed) { - return; - } - installed = true; - await install(true); -} - -async function getVersion(name: string) { - try { - const { stdout } = await executeCode({ - command: join(bin, name), - args: ["--version"], - }); - const v = stdout.trim().split(/\s/g); - return v[v.length - 1]; - } catch { - return ""; - } -} - -export async function installNatsServer(noUpgrade) { - if (noUpgrade && (await pathExists(join(bin, "nats-server")))) { - return; - } - if ((await getVersion("nats-server")) == VERSIONS["nats-server"]) { - logger.debug( - `nats-server version ${VERSIONS["nats-server"]} already installed`, - ); - return; - } - const command = `curl -sf https://binaries.nats.dev/nats-io/nats-server/v2@${VERSIONS["nats-server"]} | sh`; - logger.debug("installing nats-server: ", command); - await executeCode({ - command, - path: bin, - verbose: true, - }); -} - -export async function installNatsCli(noUpgrade) { - if (noUpgrade && (await pathExists(join(bin, "nats")))) { - return; - } - if ((await getVersion("nats")) == VERSIONS["nats"]) { - logger.debug(`nats version ${VERSIONS["nats"]} already installed`); - return; - } - logger.debug("installing nats cli"); - await executeCode({ - command: `curl -sf https://binaries.nats.dev/nats-io/natscli/nats@${VERSIONS["nats"]} | sh`, - path: bin, - verbose: true, - }); -} - - -export async function main() { - await install(); - process.exit(0); -} diff --git a/src/packages/backend/nats/nkeys.ts b/src/packages/backend/nats/nkeys.ts deleted file mode 100644 index 7c6743fa52..0000000000 --- a/src/packages/backend/nats/nkeys.ts +++ /dev/null @@ -1,50 +0,0 @@ -/* -EXAMPLE: - -~/cocalc/src/packages/backend/nats$ n -Welcome to Node.js v18.17.1. -Type ".help" for more information. -> a = require('@cocalc/backend/nats/nkeys') -{ - publicKey: [Function: publicKey], - createPrivateKey: [Function: createPrivateKey] -} -> a.createPrivateKey('user') -'SUACDK5OBWPWYKHAZSKNO4IC3UXDYWD4LLPOVMM3DEY6Z7UXJQB3CK63B4' -> seed = a.createPrivateKey('user') -'SUACLFDTUS353H4ITLDAFQWYA43IAP2L7LGZ5XDEEARMJ4KNPHUWDKDUFQ' -> a.publicKey(seed) -'UCBWG2NENI2VLZRMXKAQOZVKVVPA5GBUY2G7KGEDJRDWFSQ5VV3P7VYD' -*/ - -import * as nkeys from "@nats-io/nkeys"; -import { capitalize } from "@cocalc/util/misc"; - -export function publicKey(seed: string): string { - const t = new TextEncoder(); - let kp; - if (seed.startsWith("SX")) { - kp = nkeys.fromCurveSeed(t.encode(seed)); - } else { - kp = nkeys.fromSeed(t.encode(seed)); - } - return kp.getPublicKey(); -} - -type KeyType = - | "account" - | "cluster" - | "curve" - | "operator" - | "pair" - | "server" - | "user"; - -export function createPrivateKey(type: KeyType): string { - const kp = nkeys[`create${capitalize(type)}`](); - const t = new TextDecoder(); - if (type == "curve") { - return t.decode(kp.getSeed()); - } - return t.decode(kp.seed); -} diff --git a/src/packages/backend/nats/persistent-connection.ts b/src/packages/backend/nats/persistent-connection.ts deleted file mode 100644 index 3cb5b62cca..0000000000 --- a/src/packages/backend/nats/persistent-connection.ts +++ /dev/null @@ -1,130 +0,0 @@ -/* -Create a nats connection that doesn't break. - -The NATS docs - -https://github.com/nats-io/nats.js/blob/main/core/README.md#connecting-to-a-nats-server - -ensure us that "the client will always attempt to reconnect if the connection is -disrupted for a reason other than calling close()" but THAT IS NOT TRUE. -(I think the upstream code in disconnected in nats.js/core/src/protocol.ts is a lazy -and I disagree with it. It tries to connect but if anything goes slightly wrong, -just gives up forever.) - -There are definitely situations where the connection gets permanently closed -and the close() function was not called, at least not by any of our code. -I've given up on getting them to fix or understand their bugs in general: - -https://github.com/williamstein/nats-bugs/issues/8 - -We thus monitor the connection, and if it closed, we *swap out the protocol -object*, which is an evil hack to reconnect. This seems to work fine with all -our other code. - -All that said, it's excellent that the NATS library separates the protocol from -the connection object itself, so it's possible to do this at all! :-) -*/ - -import { getLogger } from "@cocalc/backend/logger"; -import { delay } from "awaiting"; -import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import type { NatsConnection } from "@cocalc/nats/types"; -import { connect as connectViaTCP } from "nats"; -import { connect as connectViaWebsocket } from "nats.ws"; -import { CONNECT_OPTIONS } from "@cocalc/util/nats"; -import { WebSocket } from "ws"; - -const MONITOR_INTERVAL = 3000; - -const logger = getLogger("backend:nats:connection"); - -let options: any = null; -let getOptions: (() => Promise) | null = null; -export function setConnectionOptions(_getOptions: () => Promise) { - getOptions = _getOptions; -} - -let nc: NatsConnection | null = null; - -// gets the singleton connection -const getConnection = reuseInFlight(async (): Promise => { - if (nc == null) { - logger.debug("initializing nats cocalc backend connection"); - nc = await getNewConnection(); - monitorConnection(nc); - } - return nc; -}); - -export default getConnection; - -// NOTE: this monitorConnection also has to work properly with the -// waitUntilConnected function from @cocalc/nats/util. - -// The NATS docs ensure us that "the client will always attempt to -// reconnect if the connection is disrupted for a reason other than -// calling close()" but THAT IS NOT TRUE. There are many situations -// where the connection gets permanently closed and close was not -// called, at least not by any of our code. We thus monitor the -// connection, and if it closed, we *swap out the protocol object*, which -// is an evil hack to reconnect. This seems to work fine with all our -// other code. -async function monitorConnection(nc) { - while (true) { - if (nc.isClosed()) { - console.log("fixing the NATS connection..."); - const nc2 = await getNewConnection(); - // @ts-ignore - nc.protocol = nc2.protocol; - if (!nc.isClosed()) { - console.log("successfully fixed the NATS connection!"); - } else { - console.log("failed to fix the NATS connection!"); - } - } - await delay(MONITOR_INTERVAL); - } -} - -function getServer(servers) { - return typeof servers == "string" ? servers : servers[0]; -} - -export async function getNewConnection(): Promise { - logger.debug("create new connection"); - // make initial delay short, because secret token is being written to database - // right when project starts, so second attempt very likely to work. - let d = 1000; - while (true) { - try { - if (options == null && getOptions != null) { - options = { ...CONNECT_OPTIONS, ...(await getOptions()) }; - } - if (options == null) { - throw Error("options not set yet..."); - } - let connect; - if (getServer(options.servers).startsWith("ws")) { - // this is a workaround for a bug involving reconnect that I saw on some forum - // @ts-ignore - global.WebSocket = WebSocket; - connect = connectViaWebsocket; - } else { - connect = connectViaTCP; - } - logger.debug(`connecting to ${options.servers}`); - const conn = await connect({ ...options }); - if (conn == null) { - throw Error("connection failed"); - } - logger.debug(`connected to ${conn.getServer()}`); - return conn; - } catch (err) { - d = Math.min(15000, d * 1.35) + Math.random() / 2; - logger.debug( - `ERROR connecting to ${JSON.stringify(options?.servers)}; will retry in ${d / 1000} seconds. err=${err}`, - ); - await delay(d); - } - } -} diff --git a/src/packages/backend/nats/server.ts b/src/packages/backend/nats/server.ts deleted file mode 100644 index 2d657d50b3..0000000000 --- a/src/packages/backend/nats/server.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { nats } from "@cocalc/backend/data"; -import { join } from "path"; -import { spawn, spawnSync } from "node:child_process"; - -function params() { - return { - command: join(nats, "bin", "nats-server"), - args: ["-c", join(nats, "server.conf")], - env: { cwd: nats }, - }; -} - -export function startServer(): number { - const { command, args, env } = params(); - const { pid } = spawn(command, args, env); - if (pid == null) { - throw Error("issue spawning nats-server"); - } - return pid; -} - -export function main({ - verbose, - daemon, -}: { verbose?: boolean; daemon?: boolean } = {}) { - let { command, args, env } = params(); - if (verbose) { - args = [...args, "-DV"]; - } - let opts; - if (daemon) { - opts = { ...env, detached: true, stdio: "ignore" }; - const child = spawn(command, args, opts); - child.on("error", (err) => { - throw Error(`Failed to start process: ${err}`); - }); - - if (daemon) { - console.log(`Process started as daemon with PID: ${child.pid}`); - child.unref(); - } - } else { - opts = { ...env, stdio: "inherit" }; - spawnSync(command, args, opts); - } -} diff --git a/src/packages/backend/nats/sync.ts b/src/packages/backend/nats/sync.ts deleted file mode 100644 index 2b2dd58348..0000000000 --- a/src/packages/backend/nats/sync.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { stream as createStream, type Stream } from "@cocalc/nats/sync/stream"; -import { - dstream as createDstream, - type DStream, -} from "@cocalc/nats/sync/dstream"; -import { kv as createKV, type KV } from "@cocalc/nats/sync/kv"; -import { dkv as createDKV, type DKV } from "@cocalc/nats/sync/dkv"; -import { dko as createDKO, type DKO } from "@cocalc/nats/sync/dko"; -import { akv as createAKV, type AKV } from "@cocalc/nats/sync/akv"; -import { createOpenFiles, type OpenFiles } from "@cocalc/nats/sync/open-files"; -export { inventory } from "@cocalc/nats/sync/inventory"; -import "./index"; - -export type { Stream, DStream, KV, DKV, DKO, AKV }; - -export async function stream(opts): Promise> { - return await createStream(opts); -} - -export async function dstream(opts): Promise> { - return await createDstream(opts); -} - -export async function kv(opts): Promise> { - return await createKV(opts); -} - -export async function dkv(opts): Promise> { - return await createDKV(opts); -} - -export function akv(opts): AKV { - return createAKV(opts); -} - -export async function dko(opts): Promise> { - return await createDKO(opts); -} - -export async function openFiles(project_id: string, opts?): Promise { - return await createOpenFiles({ project_id, ...opts }); -} diff --git a/src/packages/backend/nats/test/service.test.ts b/src/packages/backend/nats/test/service.test.ts deleted file mode 100644 index b216cbcae0..0000000000 --- a/src/packages/backend/nats/test/service.test.ts +++ /dev/null @@ -1,38 +0,0 @@ -/* - -DEVELOPMENT: - -pnpm test --forceExit service.test.ts - -*/ - -import { callNatsService, createNatsService } from "@cocalc/nats/service"; -import { once } from "@cocalc/util/async-utils"; -import "@cocalc/backend/nats"; - -describe("create a service and test it out", () => { - let s; - it("creates a service", async () => { - s = createNatsService({ - service: "echo", - handler: (mesg) => mesg.repeat(2), - }); - await once(s, "running"); - expect(await callNatsService({ service: "echo", mesg: "hello" })).toBe( - "hellohello", - ); - }); - - it("closes the services", async () => { - s.close(); - - let t = ""; - // expect( ...).toThrow doesn't seem to work with this: - try { - await callNatsService({ service: "echo", mesg: "hi", timeout: 1000 }); - } catch (err) { - t = `${err}`; - } - expect(t).toContain("Error: timeout"); - }); -}); diff --git a/src/packages/backend/nats/test/sync/binary.test.ts b/src/packages/backend/nats/test/sync/binary.test.ts deleted file mode 100644 index 6a20198448..0000000000 --- a/src/packages/backend/nats/test/sync/binary.test.ts +++ /dev/null @@ -1,116 +0,0 @@ -/* -Test using binary data with kv and stream. - -The default value type is json, which is heavily tested in all the other -unit tests. Here we test binary data instead. - -DEVELOPMENT: - -pnpm exec jest --forceExit "binary.test.ts" -*/ - -import "@cocalc/backend/nats"; // ensure client is setup -import { getMaxPayload } from "@cocalc/nats/util"; -import { dstream, dkv } from "@cocalc/backend/nats/sync"; - -describe("test binary data with a dstream", () => { - let s, - s2, - name = `${Math.random()}`; - - // binary values come back as Uint8Array with streams - const data10 = Uint8Array.from(Buffer.from("x".repeat(10))); - it("creates a binary dstream and writes/then reads binary data to/from it", async () => { - s = await dstream({ name, valueType: "binary" }); - expect(s.name).toBe(name); - s.publish(data10); - expect(s.get(0).length).toEqual(data10.length); - await s.close(); - s = await dstream({ name, valueType: "binary" }); - expect(s.get(0).length).toEqual(data10.length); - }); - - it("creates a dstream with the same name but json format and sees it is separate", async () => { - s2 = await dstream({ name, valueType: "json" }); - expect(s2.length).toBe(0); - s = await dstream({ name, valueType: "binary" }); - expect(s.length).toBe(1); - s2.push({ hello: "cocalc" }); - expect(s.length).toBe(1); - expect(s2.length).toBe(1); - await s2.close(); - s2 = await dstream({ name, valueType: "json" }); - expect(s2.get(0)).toEqual({ hello: "cocalc" }); - }); - - it("writes large binary data to the dstream to test chunking", async () => { - s = await dstream({ name, valueType: "binary" }); - const maxPayload = await getMaxPayload(); - const data = Uint8Array.from(Buffer.from("x".repeat(maxPayload * 1.5))); - s.publish(data); - expect(s.get(s.length - 1).length).toEqual(data.length); - await s.close(); - s = await dstream({ name, valueType: "binary" }); - expect(s.get(s.length - 1).length).toEqual(data.length); - }); - - it("clean up", async () => { - await s.purge(); - await s.close(); - await s2.purge(); - await s2.close(); - }); -}); - -describe("test binary data with a dkv", () => { - let s, - name = `${Math.random()}`; - - // binary values come back as buffer with dkv - const data10 = Buffer.from("x".repeat(10)); - - it("creates a binary dkv and writes/then reads binary data to/from it", async () => { - s = await dkv({ name, valueType: "binary" }); - expect(s.name).toBe(name); - s.x = data10; - expect(s.x).toEqual(data10); - expect(s.x.length).toEqual(data10.length); - await s.close(); - s = await dkv({ name, valueType: "binary" }); - expect(s.x.length).toEqual(data10.length); - expect(s.x).toEqual(data10); - }); - - let s2; - it("creates a dkv with the same name but json format and sees it is separate", async () => { - s2 = await dkv({ name, valueType: "json" }); - expect(s2.length).toBe(0); - s = await dkv({ name, valueType: "binary" }); - expect(s.length).toBe(1); - s2.x = { hello: "cocalc" }; - expect(s.length).toBe(1); - expect(s2.length).toBe(1); - await s2.close(); - s2 = await dkv({ name, valueType: "json" }); - expect(s2.x).toEqual({ hello: "cocalc" }); - expect(s.x.length).toEqual(data10.length); - }); - - it("writes large binary data to the dkv to test chunking", async () => { - s = await dkv({ name, valueType: "binary" }); - const maxPayload = await getMaxPayload(); - const data = Uint8Array.from(Buffer.from("x".repeat(maxPayload * 1.5))); - s.y = data; - expect(s.y.length).toEqual(data.length); - await s.close(); - s = await dkv({ name, valueType: "binary" }); - expect(s.y.length).toEqual(data.length); - }); - - it("clean up", async () => { - await s.clear(); - await s.close(); - await s2.clear(); - await s2.close(); - }); -}); diff --git a/src/packages/backend/nats/test/sync/chunk.test.ts b/src/packages/backend/nats/test/sync/chunk.test.ts deleted file mode 100644 index bf0cf835d2..0000000000 --- a/src/packages/backend/nats/test/sync/chunk.test.ts +++ /dev/null @@ -1,67 +0,0 @@ -/* -We support arbitrarily large values for both our kv store and stream. - -This tests that this actually works. - -DEVELOPMENT: - -pnpm exec jest --forceExit "chunk.test.ts" - -WARNING: - -If this suddenly breaks, see the comment in packages/nats/sync/general-kv.ts -about potentially having to fork NATS. -*/ - -import "@cocalc/backend/nats"; // ensure client is setup -import { getMaxPayload } from "@cocalc/nats/util"; -import { createDstream } from "./util"; -import { dstream } from "@cocalc/backend/nats/sync"; -import { dkv as createDkv } from "@cocalc/backend/nats/sync"; - -describe("create a dstream and a dkv and write a large chunk to each", () => { - let maxPayload = 0; - - it("sanity check on the max payload", async () => { - maxPayload = await getMaxPayload(); - expect(maxPayload).toBeGreaterThan(1000000); - }); - - it("write a large value with a dstream", async () => { - const largeValue = "x".repeat(2.5 * maxPayload); - const stream = await createDstream(); - stream.push(largeValue); - expect(stream[0].length).toBe(largeValue.length); - expect(stream[0] == largeValue).toBe(true); - await stream.save(); - expect(stream.hasUnsavedChanges()).toBe(false); - const name = stream.name; - await stream.close(); - - const stream2 = await dstream({ name, noAutosave: true }); - expect(stream2[0].length).toBe(largeValue.length); - expect(stream2[0] == largeValue).toBe(true); - // @ts-ignore some modicum of cleanup... - await stream2.stream.purge(); - }); - - it("write a large value to a dkv", async () => { - const name = `test-${Math.random()}`; - const largeValue = "x".repeat(2.5 * maxPayload); - const dkv = await createDkv({ name }); - dkv.set("a", largeValue); - expect(dkv.get("a").length).toBe(largeValue.length); - await dkv.save(); - expect(dkv.hasUnsavedChanges()).toBe(false); - await dkv.close(); - - const dkv2 = await createDkv({ name, noAutosave: true }); - expect(dkv2.get("a").length).toBe(largeValue.length); - expect(dkv2.get("a") == largeValue).toBe(true); - // @ts-ignore some modicum of cleanup... - await dkv2.delete("a"); - }); -}); - -// TODO: the above is just the most minimal possible test. a million things -// aren't tested yet... diff --git a/src/packages/backend/nats/test/sync/general-kv.test.ts b/src/packages/backend/nats/test/sync/general-kv.test.ts deleted file mode 100644 index 2f420adee4..0000000000 --- a/src/packages/backend/nats/test/sync/general-kv.test.ts +++ /dev/null @@ -1,235 +0,0 @@ -/* -A lot of GeneralKV is indirectly unit tested because many other things -build on it, and they are tested, e.g., dkv. But it's certainly good -to test the basics here directly as well, since if something goes wrong, -it'll be easier to track down with lower level tests in place. - -DEVELOPMENT: - -pnpm exec jest --forceExit "general-kv.test.ts" - -*/ -// import { once } from "@cocalc/util/async-utils"; -import { delay } from "awaiting"; -import { getEnv } from "@cocalc/backend/nats/env"; -import { GeneralKV } from "@cocalc/nats/sync/general-kv"; -import { getMaxPayload } from "@cocalc/nats/util"; - -describe("create a general kv and do basic operations", () => { - let kv, kv2, kv3, env; - const name = `test-${Math.round(1000 * Math.random())}`; - - it("creates the kv", async () => { - env = await getEnv(); - kv = new GeneralKV({ name, env, filter: ["foo.>"] }); - await kv.init(); - await kv.clear(); - }); - - it("sets and deletes a key", async () => { - await kv.set("foo.x", 10); - expect(kv.getAll()).toEqual({ "foo.x": 10 }); - await kv.delete("foo.x"); - expect(kv.getAll()).toEqual({}); - await kv.set("foo.x", 10); - }); - - it("a second kv with a different filter", async () => { - kv2 = new GeneralKV({ name, env, filter: ["bar.>"] }); - await kv2.init(); - await kv2.clear(); - expect(kv2.getAll()).toEqual({}); - await kv2.set("bar.abc", 10); - expect(await kv2.getAll()).toEqual({ "bar.abc": 10 }); - expect(kv.getAll()).toEqual({ "foo.x": 10 }); - }); - - it("the union", async () => { - kv3 = new GeneralKV({ name, env, filter: ["bar.>", "foo.>"] }); - await kv3.init(); - expect(kv3.getAll()).toEqual({ "foo.x": 10, "bar.abc": 10 }); - }); - - it("clear and closes the kv", async () => { - await kv.clear(); - kv.close(); - await kv2.clear(); - kv2.close(); - }); -}); - -// NOTE: with these tests, we're "dancing" with https://github.com/nats-io/nats.js/issues/246 -// and might be forced to fork nats.js. Let's hope not! -describe("test that complicated keys work", () => { - let kv, env; - const name = `test-${Math.round(1000 * Math.random())}`; - - it("creates the kv", async () => { - env = await getEnv(); - kv = new GeneralKV({ name, env, filter: ["foo.>"] }); - await kv.init(); - }); - - it("creates complicated keys that ARE allowed", async () => { - for (const k of [ - `foo.${base64}`, - "foo.!@#$%^&()", - "foo.bar.baz!.bl__-+#@ah.nat\\s", - "foo.CoCalc-和-NATS-的结合非常棒!", - // and a VERY long 50kb key: - "foo." + "x".repeat(50000), - ]) { - await kv.set(k, "cocalc"); - expect(kv.get(k)).toEqual("cocalc"); - } - }); - - it("creates keys that are NOT allowed", async () => { - for (const k of [ - "foo.b c", - "foo.", - "foo.bar.", - "foo.b\u0000c", - "foo.b*c", - "foo.b>c", - ]) { - expect(async () => await kv.set(k, "not-allowed")).rejects.toThrow(); - } - }); - - it("clear and closes the kv", async () => { - await kv.clear(); - kv.close(); - }); -}); - -const base64 = - "0123456789+/ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz="; - -describe("a complicated filter", () => { - let kv, env; - const name = `test-${Math.round(1000 * Math.random())}`; - - it("creates the kv", async () => { - env = await getEnv(); - kv = new GeneralKV({ name, env, filter: [`${base64}.>`] }); - await kv.init(); - }); - it("clear and closes the kv", async () => { - await kv.clear(); - kv.close(); - }); -}); - -describe("test using the binary value type", () => { - let kv, env; - const name = `test-${Math.round(1000 * Math.random())}`; - - it("creates the kv", async () => { - env = await getEnv(); - kv = new GeneralKV({ name, env, filter: ["foo.>"], valueType: "binary" }); - await kv.init(); - }); - - it("set and get a binary value", async () => { - const value = Buffer.from([0, 0, 3, 8, 9, 5, 0, 7, 7]); - await kv.set("foo.b", value); - expect(kv.get("foo.b")).toEqual(value); - expect(kv.get("foo.b").length).toEqual(9); - }); - - it("sets and gets a large binary value that requires chunking", async () => { - const m = await getMaxPayload(); - const value = Buffer.from("x".repeat(1.5 * m)); - value[0] = 7; - await kv.set("foo.big", value); - expect(kv.get("foo.big").length).toEqual(value.length); - }); - - it("clear and closes the kv", async () => { - await kv.clear(); - kv.close(); - }); -}); - -describe("test using a range of useful functions: length, has, time, headers, etc.", () => { - let kv, env; - const name = `test-${Math.round(1000 * Math.random())}`; - - it("creates the kv", async () => { - env = await getEnv(); - kv = new GeneralKV({ name, env, filter: ["foo.>"] }); - await kv.init(); - }); - - it("sets a value and observe length matches", async () => { - expect(kv.length).toBe(0); - await kv.set("foo.x", 10); - expect(kv.length).toBe(1); - }); - - it("sets a value and observe time is reasonable", async () => { - await kv.set("foo.time", 10); - while (kv.time("foo.time") == null) { - await delay(10); - } - expect(Math.abs(kv.time("foo.time").valueOf() - Date.now())).toBeLessThan( - 10000, - ); - }); - - it("check has works", async () => { - expect(await kv.has("foo.has")).toBe(false); - await kv.set("foo.has", "it"); - expect(await kv.has("foo.has")).toBe(true); - await kv.delete("foo.has"); - expect(await kv.has("foo.has")).toBe(false); - }); - - it("verifying key is valid given the filter", async () => { - expect(kv.isValidKey("foo.x")).toBe(true); - expect(kv.isValidKey("bar.x")).toBe(false); - }); - - it("expire keys using ageMs", async () => { - await kv.set("foo.old", 10); - await delay(100); - await kv.set("foo.new", 20); - await kv.expire({ ageMs: 200 }); - expect(kv.has("foo.old")).toBe(true); - await kv.expire({ ageMs: 50 }); - expect(kv.has("foo.old")).toBe(false); - expect(kv.has("foo.new")).toBe(true); - }); - - it("expire keys using cutoff", async () => { - await kv.set("foo.old0", 10); - await delay(50); - const cutoff = new Date(); - await delay(50); - await kv.set("foo.new0", 20); - await kv.expire({ cutoff }); - expect(kv.has("foo.old0")).toBe(false); - expect(kv.has("foo.new0")).toBe(true); - }); - - it("sets and gets a header", async () => { - await kv.set("foo.head", 10, { headers: { CoCalc: "NATS" } }); - expect(kv.get("foo.head")).toBe(10); - while (kv.headers("foo.head") == null) { - await delay(10); - } - expect(kv.headers("foo.head").CoCalc).toBe("NATS"); - }); - - it("sanity check on stats", async () => { - const stats = kv.stats(); - expect(stats.count).toBeGreaterThan(0); - expect(stats.bytes).toBeGreaterThan(0); - }); - - it("clear and closes the kv", async () => { - await kv.clear(); - kv.close(); - }); -}); diff --git a/src/packages/backend/nats/test/sync/headers.test.ts b/src/packages/backend/nats/test/sync/headers.test.ts deleted file mode 100644 index 4211287387..0000000000 --- a/src/packages/backend/nats/test/sync/headers.test.ts +++ /dev/null @@ -1,170 +0,0 @@ -/* -Test using user-defined headers with kv and stream. - -DEVELOPMENT: - -pnpm exec jest --forceExit "headers.test.ts" -*/ - -import "@cocalc/backend/nats"; // ensure client is setup -import { getMaxPayload } from "@cocalc/nats/util"; -import { dstream, stream, dkv, kv } from "@cocalc/backend/nats/sync"; -import { once } from "@cocalc/util/async-utils"; - -describe("test headers with a stream", () => { - let s; - it("creates a stream and writes a value without a header", async () => { - s = await stream({ name: `${Math.random()}` }); - expect(s.headers(s.length - 1)).toBe(undefined); - s.publish("x"); - await once(s, "change"); - expect(s.headers(s.length - 1)).toBe(undefined); - }); - - it("writes a value with a header", async () => { - s.publish("y", { headers: { my: "header" } }); - await once(s, "change"); - expect(s.headers(s.length - 1)).toEqual({ my: "header" }); - }); - - it("writes a large value to a stream that requires chunking and a header", async () => { - s.publish("y".repeat((await getMaxPayload()) * 2), { - headers: { large: "chunks", multiple: "keys" }, - }); - await once(s, "change"); - expect(s.headers(s.length - 1)).toEqual( - expect.objectContaining({ large: "chunks", multiple: "keys" }), - ); - expect(s.headers(s.length - 1)).toEqual({ - large: "chunks", - multiple: "keys", - // CoCalc- and Nats- headers get used internally, but are still visible. - // 3 because of how size was chosen above. - "CoCalc-Chunks": "3/3", - }); - }); - - it("clean up", async () => { - await s.purge(); - }); -}); - -describe("test headers with a dstream", () => { - let s; - const name = `${Math.random()}`; - it("creates a dstream and writes a value without a header", async () => { - s = await dstream({ name }); - expect(s.headers(s.length - 1)).toBe(undefined); - s.publish("x"); - await once(s, "change"); - const h = s.headers(s.length - 1); - for (const k in h ?? {}) { - if (!k.startsWith("Nats-") && !k.startsWith("CoCalc-")) { - throw Error("headers must start with Nats- or CoCalc-"); - } - } - }); - - it("writes a value with a header", async () => { - s.publish("y", { headers: { my: "header" } }); - // NOTE: not optimal but this is what is implemented and documented! - expect(s.headers(s.length - 1)).toEqual(undefined); - await once(s, "change"); - expect(s.headers(s.length - 1)).toEqual( - expect.objectContaining({ my: "header" }), - ); - }); - - it("header still there", async () => { - await s.close(); - s = await dstream({ name }); - expect(s.headers(s.length - 1)).toEqual( - expect.objectContaining({ my: "header" }), - ); - }); - - it("clean up", async () => { - await s.purge(); - }); -}); - -describe("test headers with low level general kv", () => { - let s, gkv; - it("creates a kv and writes a value without a header", async () => { - s = await kv({ name: `${Math.random()}` }); - gkv = s.generalKV; - const key = `${s.prefix}.x`; - expect(gkv.headers(key)).toBe(undefined); - gkv.set(key, 10); - await once(gkv, "change"); - expect(gkv.headers(key)).toBe(undefined); - }); - - it("writes a value with a header", async () => { - const key = `${s.prefix}.y`; - gkv.set(key, 20, { headers: { my: "header" } }); - await once(gkv, "change"); - expect(gkv.headers(key)).toEqual({ my: "header" }); - }); - - it("changes header without changing value", async () => { - const key = `${s.prefix}.y`; - gkv.set(key, 20, { headers: { my: "header2", second: "header" } }); - await once(gkv, "change"); - expect(gkv.headers(key)).toEqual( - expect.objectContaining({ my: "header2", second: "header" }), - ); - }); - - it("removes header without changing value", async () => { - const key = `${s.prefix}.y`; - gkv.set(key, 20, { headers: { my: null, second: "header" } }); - await once(gkv, "change"); - expect(gkv.headers(key)).toEqual( - expect.objectContaining({ second: "header" }), - ); - }); - - it("writes a large value to a kv that requires chunking and a header", async () => { - const key = `${s.prefix}.big`; - gkv.set(key, "x".repeat((await getMaxPayload()) * 2), { - headers: { the: "header" }, - }); - await once(gkv, "change"); - expect(gkv.headers(key)).toEqual( - expect.objectContaining({ the: "header" }), - ); - }); - - it("clean up", async () => { - await s.clear(); - }); -}); - -describe("test headers with a dkv", () => { - let s; - const name = `${Math.random()}`; - it("creates a dkv and writes a value without a header", async () => { - s = await dkv({ name }); - s.set("x", 10); - await once(s, "change"); - const h = s.headers("x"); - for (const k in h ?? {}) { - if (!k.startsWith("Nats-") && !k.startsWith("CoCalc-")) { - throw Error("headers must start with Nats- or CoCalc-"); - } - } - }); - - it("writes a value with a header", async () => { - s.set("y", 20, { headers: { my: "header" } }); - // NOTE: not optimal but this is what is implemented and documented! - expect(s.headers("y")).toEqual(undefined); - await once(s, "change"); - expect(s.headers("y")).toEqual(expect.objectContaining({ my: "header" })); - }); - - it("clean up", async () => { - await s.clear(); - }); -}); diff --git a/src/packages/backend/nats/test/sync/limits.test.ts b/src/packages/backend/nats/test/sync/limits.test.ts deleted file mode 100644 index 5f5d4a5751..0000000000 --- a/src/packages/backend/nats/test/sync/limits.test.ts +++ /dev/null @@ -1,259 +0,0 @@ -/* -Testing the limits. - -DEVELOPMENT: - -pnpm exec jest --forceExit "limits.test.ts" - -*/ - -import { dkv as createDkv } from "@cocalc/backend/nats/sync"; -import { dstream as createDstream } from "@cocalc/backend/nats/sync"; -import { delay } from "awaiting"; -import { once } from "@cocalc/util/async-utils"; - -describe.skip("create a dkv with limit on the total number of keys, and confirm auto-delete works", () => { - let kv; - const name = `test-${Math.random()}`; - - it("creates the dkv", async () => { - kv = await createDkv({ name, limits: { max_msgs: 2 } }); - expect(kv.getAll()).toEqual({}); - }); - - it("adds 2 keys, then a third, and sees first is gone", async () => { - kv.a = 10; - kv.b = 20; - expect(kv.a).toEqual(10); - expect(kv.b).toEqual(20); - kv.c = 30; - expect(kv.c).toEqual(30); - // have to wait until it's all saved and acknowledged before enforcing limit - if (!kv.isStable()) { - await once(kv, "stable"); - } - // cause limit enforcement immediately so unit tests aren't slow - await kv.generalDKV.kv.enforceLimitsNow(); - // next change is the enforcement happening - if (kv.has("a")) { - await once(kv, "change", 500); - } - // and confirm it - expect(kv.a).toBe(undefined); - expect(kv.getAll()).toEqual({ b: 20, c: 30 }); - }); - - it("closes the kv", async () => { - await kv.clear(); - await kv.close(); - }); -}); - -describe.skip("create a dkv with limit on age of keys, and confirm auto-delete works", () => { - let kv; - const name = `test-${Math.random()}`; - - it("creates the dkv", async () => { - kv = await createDkv({ name, limits: { max_age: 50 } }); - expect(kv.getAll()).toEqual({}); - }); - - it("adds 2 keys, then a third, and sees first two are gone due to aging out", async () => { - kv.a = 10; - kv.b = 20; - expect(kv.a).toEqual(10); - expect(kv.b).toEqual(20); - await kv.save(); - await delay(75); - kv.c = 30; - expect(kv.c).toEqual(30); - if (!kv.isStable()) { - await once(kv, "stable"); - } - await kv.generalDKV.kv.enforceLimitsNow(); - if (kv.has("a")) { - await once(kv, "change", 500); - } - expect(kv.getAll()).toEqual({ c: 30 }); - }); - - it("closes the kv", async () => { - await kv.clear(); - await kv.close(); - }); -}); - -describe("create a dkv with limit on total bytes of keys, and confirm auto-delete works", () => { - let kv; - const name = `test-${Math.random()}`; - - it("creates the dkv", async () => { - kv = await createDkv({ name, limits: { max_bytes: 100 } }); - expect(kv.getAll()).toEqual({}); - }); - - it("adds a key, then a seocnd, and sees first one is gone due to bytes", async () => { - kv.a = "x".repeat(50); - await kv.save(); - kv.b = "x".repeat(75); - if (!kv.isStable()) { - await once(kv, "stable"); - } - await delay(250); - await kv.generalDKV.kv.enforceLimitsNow(); - if (kv.has("a")) { - await once(kv, "change", 500); - } - expect(kv.getAll()).toEqual({ b: "x".repeat(75) }); - }); - - it("closes the kv", async () => { - await kv.clear(); - await kv.close(); - }); -}); - -describe.skip("create a dkv with limit on max_msg_size, and confirm writing small messages works but writing a big one result in a 'reject' event", () => { - let kv; - const name = `test-${Math.random()}`; - - it("creates the dkv", async () => { - kv = await createDkv({ name, limits: { max_msg_size: 100 } }); - expect(kv.getAll()).toEqual({}); - }); - - it("adds a key, then a second big one results in a 'reject' event", async () => { - const rejects: { key: string; value: string }[] = []; - kv.once("reject", (x) => { - rejects.push(x); - }); - kv.a = "x".repeat(50); - await kv.save(); - kv.b = "x".repeat(150); - await kv.save(); - expect(rejects).toEqual([{ key: "b", value: "x".repeat(150) }]); - expect(kv.has("b")).toBe(false); - }); - - it("closes the kv", async () => { - await kv.clear(); - await kv.close(); - }); -}); - -describe("create a dstream with limit on the total number of messages, and confirm auto-delete works", () => { - let s; - const name = `test-${Math.random()}`; - - it("creates the dstream", async () => { - s = await createDstream({ name, limits: { max_msgs: 2 } }); - expect(s.get()).toEqual([]); - }); - - it("push 2 messages, then a third, and sees first is gone", async () => { - s.push({ a: 10 }); - s.push({ b: 20 }); - expect(s.get()).toEqual([{ a: 10 }, { b: 20 }]); - s.push({ c: 30 }); - expect(s.get(2)).toEqual({ c: 30 }); - // have to wait until it's all saved and acknowledged before enforcing limit - if (!s.isStable()) { - await once(s, "stable"); - } - // cause limit enforcement immediately so unit tests aren't slow - await s.stream.enforceLimitsNow(); - expect(s.getAll()).toEqual([{ b: 20 }, { c: 30 }]); - - // also check limits was enforced if we close, then open new one: - await s.close(); - s = await createDstream({ name, limits: { max_msgs: 2 } }); - expect(s.getAll()).toEqual([{ b: 20 }, { c: 30 }]); - }); - - it("closes the stream", async () => { - await s.purge(); - await s.close(); - }); -}); - -describe("create a dstream with limit on max_age, and confirm auto-delete works", () => { - let s; - const name = `test-${Math.random()}`; - - it("creates the dstream", async () => { - s = await createDstream({ name, limits: { max_age: 50 } }); - }); - - it("push a message, then another and see first disappears", async () => { - s.push({ a: 10 }); - await delay(100); - s.push({ b: 20 }); - expect(s.get()).toEqual([{ a: 10 }, { b: 20 }]); - if (!s.isStable()) { - await once(s, "stable"); - } - await s.stream.enforceLimitsNow(); - expect(s.getAll()).toEqual([{ b: 20 }]); - }); - - it("closes the stream", async () => { - await s.purge(); - await s.close(); - }); -}); - -describe("create a dstream with limit on max_bytes, and confirm auto-delete works", () => { - let s; - const name = `test-${Math.random()}`; - - it("creates the dstream", async () => { - s = await createDstream({ name, limits: { max_bytes: 50 } }); - }); - - it("push a message, then another and see first disappears", async () => { - s.push("x".repeat(40)); - s.push("x".repeat(45)); - s.push("x"); - if (!s.isStable()) { - await once(s, "stable"); - } - await s.stream.enforceLimitsNow(); - expect(s.getAll()).toEqual(["x".repeat(45), "x"]); - }); - - it("closes the stream", async () => { - await s.purge(); - await s.close(); - }); -}); - -describe("create a dstream with limit on max_msg_size, and confirm auto-delete works", () => { - let s; - const name = `test-${Math.random()}`; - - it("creates the dstream", async () => { - s = await createDstream({ name, limits: { max_msg_size: 50 } }); - }); - - it("push a message, then another and see first disappears", async () => { - const rejects: any[] = []; - s.on("reject", ({ mesg }) => { - rejects.push(mesg); - }); - s.push("x".repeat(40)); - s.push("y".repeat(60)); // silently vanishes (well a reject event is emitted) - s.push("x"); - if (!s.isStable()) { - await once(s, "stable"); - } - await s.stream.enforceLimitsNow(); - expect(s.getAll()).toEqual(["x".repeat(40), "x"]); - - expect(rejects).toEqual(["y".repeat(60)]); - }); - - it("closes the stream", async () => { - await s.purge(); - await s.close(); - }); -}); diff --git a/src/packages/backend/package.json b/src/packages/backend/package.json index 9b6459ce34..885625b315 100644 --- a/src/packages/backend/package.json +++ b/src/packages/backend/package.json @@ -5,7 +5,7 @@ "exports": { "./*": "./dist/*.js", "./database": "./dist/database/index.js", - "./nats": "./dist/nats/index.js", + "./conat": "./dist/conat/index.js", "./server-settings": "./dist/server-settings/index.js", "./auth/*": "./dist/auth/*.js", "./auth/tokens/*": "./dist/auth/tokens/*.js" @@ -19,8 +19,9 @@ "clean": "rm -rf dist node_modules", "build": "pnpm exec tsc --build", "tsc": "pnpm exec tsc --watch --pretty --preserveWatchOutput", - "test": "pnpm exec jest --forceExit --runInBand", - "prepublishOnly": "pnpm test" + "test": "pnpm exec jest --forceExit", + "prepublishOnly": "pnpm test", + "conat-watch": "node ./bin/conat-watch.cjs" }, "files": [ "dist/**", @@ -32,28 +33,29 @@ "license": "SEE LICENSE.md", "dependencies": { "@cocalc/backend": "workspace:*", - "@cocalc/nats": "workspace:*", + "@cocalc/conat": "workspace:*", "@cocalc/util": "workspace:*", - "@nats-io/nkeys": "^2.0.3", "@types/debug": "^4.1.12", "@types/watchpack": "^2.4.4", "@types/ws": "^8.18.1", "awaiting": "^3.0.0", + "better-sqlite3": "^11.10.0", "chokidar": "^3.6.0", "debug": "^4.4.0", "fs-extra": "^11.2.0", "lodash": "^4.17.21", "lru-cache": "^7.18.3", - "nats": "^2.29.3", - "nats.ws": "^1.30.2", "password-hash": "^1.2.2", "prom-client": "^13.0.0", "rimraf": "^5.0.5", "shell-escape": "^0.2.0", + "socket.io": "^4.8.1", + "socket.io-client": "^4.8.1", "supports-color": "^9.0.2", "tmp-promise": "^3.0.3", "underscore": "^1.12.1", - "ws": "^8.18.0" + "ws": "^8.18.0", + "zstd-napi": "^0.0.10" }, "repository": { "type": "git", diff --git a/src/packages/backend/tsconfig.json b/src/packages/backend/tsconfig.json index 8d855cf7c6..148fecfd62 100644 --- a/src/packages/backend/tsconfig.json +++ b/src/packages/backend/tsconfig.json @@ -7,5 +7,5 @@ "outDir": "dist" }, "exclude": ["node_modules", "dist", "test"], - "references": [{ "path": "../util", "path": "../nats" }] + "references": [{ "path": "../util", "path": "../conat" }] } diff --git a/src/packages/comm/project-configuration.ts b/src/packages/comm/project-configuration.ts index 7e78735513..37a579d88e 100644 --- a/src/packages/comm/project-configuration.ts +++ b/src/packages/comm/project-configuration.ts @@ -10,7 +10,7 @@ export const LIBRARY_INDEX_FILE = "/ext/library/cocalc-examples/index.json"; export interface MainConfiguration { capabilities: MainCapabilities; - timestamp: string; + timestamp: Date; // disabled extensions, for opening/creating files disabled_ext: string[]; } @@ -18,7 +18,7 @@ export interface MainConfiguration { export type Capabilities = { [key: string]: boolean }; export interface X11Configuration { - timestamp: string; + timestamp: Date; capabilities: Capabilities; } diff --git a/src/packages/nats/README.md b/src/packages/conat/README.md similarity index 100% rename from src/packages/nats/README.md rename to src/packages/conat/README.md diff --git a/src/packages/nats/changefeed/client.ts b/src/packages/conat/changefeed/client.ts similarity index 60% rename from src/packages/nats/changefeed/client.ts rename to src/packages/conat/changefeed/client.ts index b5d16955b4..e4b5d53322 100644 --- a/src/packages/nats/changefeed/client.ts +++ b/src/packages/conat/changefeed/client.ts @@ -11,10 +11,9 @@ changefeed(...) -- returns async iterator that outputs: renew({id, lifetime}) -- keeps the changefeed alive for at least lifetime more ms. */ -import { getEnv } from "@cocalc/nats/client"; +import { getEnv } from "@cocalc/conat/client"; import { isValidUUID } from "@cocalc/util/misc"; -import { changefeedSubject, renewSubject, LAST_CHUNK } from "./server"; -import { waitUntilConnected } from "@cocalc/nats/util"; +import { changefeedSubject, renewSubject } from "./server"; export { DEFAULT_LIFETIME } from "./server"; export async function* changefeed({ @@ -42,25 +41,13 @@ export async function* changefeed({ const subject = changefeedSubject({ account_id }); let lastSeq = -1; - const { nc, jc } = await getEnv(); - await waitUntilConnected(); - const chunks: Uint8Array[] = []; - for await (const mesg of await nc.requestMany( + const { cn } = await getEnv(); + for await (const mesg of await cn.requestMany( subject, - jc.encode({ query, options, heartbeat, lifetime }), + { query, options, heartbeat, lifetime }, { maxWait: maxActualLifetime }, )) { - if (mesg.data.length == 0) { - // done - return; - } - chunks.push(mesg.data); - if (!isLastChunk(mesg)) { - continue; - } - const data = Buffer.concat(chunks); - chunks.length = 0; - const { error, resp, seq } = jc.decode(data); + const { error, resp, seq } = mesg.data; if (error) { throw Error(error); } @@ -72,15 +59,6 @@ export async function* changefeed({ } } -function isLastChunk(mesg) { - for (const [key, _] of mesg.headers ?? []) { - if (key == LAST_CHUNK) { - return true; - } - } - return false; -} - export async function renew({ account_id, id, @@ -91,8 +69,7 @@ export async function renew({ lifetime?: number; }) { const subject = renewSubject({ account_id }); - const { nc, jc } = await getEnv(); - await waitUntilConnected(); - const resp = await nc.request(subject, jc.encode({ id, lifetime })); - return jc.decode(resp.data); + const { cn } = await getEnv(); + const resp = await cn.request(subject, { id, lifetime }); + return resp.data; } diff --git a/src/packages/nats/changefeed/server.ts b/src/packages/conat/changefeed/server.ts similarity index 73% rename from src/packages/nats/changefeed/server.ts rename to src/packages/conat/changefeed/server.ts index 1ed0263925..820c077560 100644 --- a/src/packages/nats/changefeed/server.ts +++ b/src/packages/conat/changefeed/server.ts @@ -6,13 +6,11 @@ Multiresponse request/response NATS changefeed server. - Heartbeats */ -import { getEnv } from "@cocalc/nats/client"; -import { type Subscription, Empty, headers } from "@nats-io/nats-core"; +import { getEnv } from "@cocalc/conat/client"; +import { type Subscription } from "@cocalc/conat/core/client"; import { isValidUUID, uuid } from "@cocalc/util/misc"; -import { getLogger } from "@cocalc/nats/client"; -import { waitUntilConnected } from "@cocalc/nats/util"; +import { getLogger } from "@cocalc/conat/client"; import { delay } from "awaiting"; -import { getMaxPayload } from "@cocalc/nats/util"; export const DEFAULT_LIFETIME = 1000 * 60; export const MAX_LIFETIME = 15 * 1000 * 60; @@ -27,8 +25,6 @@ export const MAX_CHANGEFEEDS_PER_SERVER = parseInt( process.env.MAX_CHANGEFEEDS_PER_SERVER ?? "5000", ); -export const LAST_CHUNK = "last-chunk"; - const logger = getLogger("changefeed:server"); export const SUBJECT = process.env.COCALC_TEST_MODE @@ -67,36 +63,28 @@ export async function init(db) { MAX_CHANGEFEEDS_PER_SERVER, SUBJECT, }); - changefeedMainLoop(db); - renewMainLoop(); + changefeedService(db); + renewService(); } -async function changefeedMainLoop(db) { - while (!terminated) { - await waitUntilConnected(); - const { nc } = await getEnv(); - sub = nc.subscribe(`${SUBJECT}.*.api`, { queue: "q" }); - try { - await listen(db); - } catch (err) { - logger.debug(`WARNING: changefeedMainLoop error -- ${err}`); - } - await delay(15000); +async function changefeedService(db) { + const { cn } = await getEnv(); + sub = await cn.subscribe(`${SUBJECT}.*.api`, { queue: "q" }); + try { + await listen(db); + } catch (err) { + logger.debug(`WARNING: exiting changefeed service -- ${err}`); } } let renew: Subscription | null = null; -async function renewMainLoop() { - while (!terminated) { - await waitUntilConnected(); - const { nc } = await getEnv(); - renew = nc.subscribe(`${SUBJECT}.*.renew`); - try { - await listenRenew(); - } catch (err) { - logger.debug(`WARNING: renewMainLoop error -- ${err}`); - } - await delay(15000); +async function renewService() { + const { cn } = await getEnv(); + renew = await cn.subscribe(`${SUBJECT}.*.renew`); + try { + await listenRenew(); + } catch (err) { + logger.debug(`WARNING: exiting renewService error -- ${err}`); } } @@ -108,7 +96,13 @@ async function listenRenew() { if (terminated) { return; } - handleRenew(mesg); + (async () => { + try { + await handleRenew(mesg); + } catch (err) { + logger.debug(`WARNING -- issue handling a renew message -- ${err}`); + } + })(); } } @@ -132,8 +126,7 @@ function getLifetime({ lifetime }): number { } async function handleRenew(mesg) { - const { jc } = await getEnv(); - const request = jc.decode(mesg.data); + const request = mesg.data; if (!request) { return; } @@ -142,7 +135,7 @@ async function handleRenew(mesg) { // it's ours so we respond const lifetime = getLifetime(request); endOfLife[id] = Date.now() + lifetime; - mesg.respond(jc.encode({ status: "ok" })); + mesg.respond({ status: "ok" }); } } @@ -166,7 +159,14 @@ async function listen(db) { if (terminated) { return; } - handleMessage(mesg, db); + + (async () => { + try { + handleMessage(mesg, db); + } catch (err) { + logger.debug(`WARNING -- issue handling a changefeed -- ${err}`); + } + })(); } } @@ -177,31 +177,8 @@ function metrics() { logger.debug("changefeeds", { numChangefeeds }); } -async function send({ jc, mesg, resp }) { - const maxPayload = (await getMaxPayload()) - 1000; // slack for header - const data = jc.encode(resp); - const chunks: Buffer[] = []; - for (let i = 0; i < data.length; i += maxPayload) { - const slice = data.slice(i, i + maxPayload); - chunks.push(slice); - } - if (chunks.length > 1) { - logger.debug(`sending message with ${chunks.length} chunks`); - } - for (let i = 0; i < chunks.length; i++) { - if (i == chunks.length - 1) { - const h = headers(); - h.append(LAST_CHUNK, "true"); - mesg.respond(chunks[i], { headers: h }); - } else { - mesg.respond(chunks[i]); - } - } -} - async function handleMessage(mesg, db) { - const { jc } = await getEnv(); - const request = jc.decode(mesg.data); + const request = mesg.data; const account_id = getUserId(mesg.subject); const id = uuid(); @@ -215,7 +192,7 @@ async function handleMessage(mesg, db) { if (resp?.action == "close") { end(); } else { - await send({ jc, mesg, resp: { resp, error, seq } }); + mesg.respond({ resp, error, seq }); seq += 1; if (error) { end(); @@ -235,8 +212,8 @@ async function handleMessage(mesg, db) { numChangefeeds -= 1; metrics(); db().user_query_cancel_changefeed({ id }); - // end response stream with empty payload. - mesg.respond(Empty); + // end response stream: + mesg.respond(null); }; if (numChangefeedsPerAccount[account_id] > MAX_CHANGEFEEDS_PER_ACCOUNT) { diff --git a/src/packages/nats/client.ts b/src/packages/conat/client.ts similarity index 80% rename from src/packages/nats/client.ts rename to src/packages/conat/client.ts index c706c79e77..0b4bf7f142 100644 --- a/src/packages/nats/client.ts +++ b/src/packages/conat/client.ts @@ -2,18 +2,19 @@ DEVELOPMENT: ~/cocalc/src/packages/backend$ node -> require('@cocalc/backend/nats'); c = require('@cocalc/nats/client').getClient() +> require('@cocalc/backend/conat'); c = require('@cocalc/conat/client').getClient() > c.state 'connected' > Object.keys(await c.getNatsEnv()) [ 'nc', 'jc' ] */ -import type { NatsEnv, NatsEnvFunction } from "@cocalc/nats/types"; +import type { NatsEnv, NatsEnvFunction } from "@cocalc/conat/types"; import { init } from "./time"; import { EventEmitter } from "events"; import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import type { NatsConnection } from "@nats-io/nats-core"; + +type NatsConnection = any; interface Client { getNatsEnv: NatsEnvFunction; @@ -62,7 +63,6 @@ export class ClientWithState extends EventEmitter { return this.env; } this.env = await client.getNatsEnv(); - this.monitorConnectionState(this.env.nc); return this.env; }); this.account_id = client.account_id; @@ -85,7 +85,7 @@ export class ClientWithState extends EventEmitter { }; close = () => { - this.env?.nc.close(); + this.env?.nc?.close(); this.setConnectionState("closed"); this.removeAllListeners(); delete this.env; @@ -99,22 +99,6 @@ export class ClientWithState extends EventEmitter { this.emit(state); this.emit("state", state); }; - - private monitorConnectionState = async (nc) => { - this.setConnectionState("connected"); - - for await (const { type } of nc.status()) { - if (this.state == "closed") { - return; - } - if (type.includes("ping") || type == "update" || type == "reconnect") { - // connection is working well - this.setConnectionState("connected"); - } else if (type == "reconnecting") { - this.setConnectionState("connecting"); - } - } - }; } // do NOT do this until some explicit use of nats is initiated, since we shouldn't @@ -129,7 +113,7 @@ function initTime() { } let globalClient: null | ClientWithState = null; -export function setNatsClient(client: Client) { +export function setConatClient(client: Client) { globalClient = new ClientWithState(client); } @@ -186,14 +170,15 @@ export function getLogger(name) { let theConnection: NatsConnection | null = null; export const getConnection = reuseInFlight( async (): Promise => { - if (theConnection == null) { - const { nc } = await getEnv(); - if (nc == null) { - throw Error("bug"); - } - theConnection = nc; - } - return theConnection; + return null as any; +// if (theConnection == null) { +// const { nc } = await getEnv(); +// if (nc == null) { +// throw Error("bug"); +// } +// theConnection = nc; +// } +// return theConnection; }, ); diff --git a/src/packages/nats/compute/README.md b/src/packages/conat/compute/README.md similarity index 100% rename from src/packages/nats/compute/README.md rename to src/packages/conat/compute/README.md diff --git a/src/packages/nats/compute/manager.ts b/src/packages/conat/compute/manager.ts similarity index 96% rename from src/packages/nats/compute/manager.ts rename to src/packages/conat/compute/manager.ts index 9f33ff85b2..4b356c87c3 100644 --- a/src/packages/nats/compute/manager.ts +++ b/src/packages/conat/compute/manager.ts @@ -5,11 +5,11 @@ is used to edit a given file. Access this in the browser for the project you have open: -> m = await cc.client.nats_client.computeServerManager({project_id:cc.current().project_id}) +> m = await cc.client.conat_client.computeServerManager({project_id:cc.current().project_id}) */ -import { dkv, type DKV } from "@cocalc/nats/sync/dkv"; +import { dkv, type DKV } from "@cocalc/conat/sync/dkv"; import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; import { EventEmitter } from "events"; import { delay } from "awaiting"; diff --git a/src/packages/conat/core/client.ts b/src/packages/conat/core/client.ts new file mode 100644 index 0000000000..1d7657bf0c --- /dev/null +++ b/src/packages/conat/core/client.ts @@ -0,0 +1,1093 @@ +/* +core/client.s -- core conats client + +This is a client that has a +similar API to NATS / Socket.io, but is much, much better in so many ways: + +- It has global pub/sub just like with NATS. This uses the server to + rebroadcast messages, and for authentication. + Better than NATS: Authentication is done for a subject *as + needed* instead of at connection time. + +- Message can be arbitrarily large and they are *automatically* divided + into chunks and reassembled. Better than both NATS and socket.io. + +- There are multiple supported ways of encoding messages, and + no coordination is required with the server or other clients! E.g., + one message can be sent with one encoding and the next with a different + encoding and that's fine. + - MsgPack: https://msgpack.org/ -- a very compact encoding that handles + dates nicely and small numbers efficiently. This also works + well with binary Buffer objects, which is nice. + - JsonCodec: uses JSON.stringify and TextEncoder. This does not work + with Buffer or Date and is less compact, but can be very fast. + + +THE CORE API + +This section contains the crucial information you have to know to build a distributed +system using Conat. It's our take on the NATS primitives (it's not exactly the +same, but it is close). It's basically a symmetrical pub/sub/reqest/respond model +for messaging on which you can build distributed systems. The tricky part, which +NATS.js gets wrong (in my opinion), is implementing this in a way that is robust +and scalable, in terms for authentication, real world browser connectivity and +so on. Our approach is to use proven mature technology like socket.io, sqlite +and valkey, instead of writing everything from scratch. + +Clients: We view all clients as plugged into a common "dial tone", +except for optional permissions that are configured when starting the server. +The methods you call on the client to build everything are: + + - subscribe, subscribeSync - subscribe to a subject which returns an + async iterator over all messages that match the subject published by + anyone with permission to do so. If you provide the same optional + queue parameter for multiple subscribers, then one subscriber in each queue group + receives each message. The async form confirms the subscription was created + before returning. A client may only create one subscription to a + given subject at a time, to greatly reduce the chance of leaks and + simplify code. **There is no size limit on messages.** + Subscriptions are guaranteed to stay valid until the client ends them; + they do not stop working due to client or server reconnects or restarts. + + - publish, publishSync - publish to a subject. The async version returns + a count of the number of recipients, whereas the sync version is + fire-and-forget. + + - request - send a message to a subject, and if there is at least one + subscriber listening, it may respond. If there are no subscribers, + it throws a 503 error. To create a microservice, subscribe + to a subject pattern and called mesg.respond(...) on each message you + receive. + + - requestMany - send a message to a subject, and receive many + messages in reply. Typically you end the response stream by sending + a null message, but what you do is up to you. This is very useful + for streaming arbitrarily large data, long running changefeeds, LLM + responses, etc. + + +Messages: A message mesg is: + + - Data: + - subject - the subject the message was sent to + - encoding - usually MessagePack + - raw - encoded binary data + - headers - a JSON-able Javascript object. + + - Methods: + - data: this is a property, so if you do mesg.data, then it decodes raw + and returns the resulting Javascript object. + - respond, respondSync: if REPLY_HEADER is set, calling this publishes a + respond message to the original sender of the message. + + +Persistence: + +We also implement persistent streams, where you can also set a key. This can +be used to build the analogue of Jetstream's streams and kv stores. The object +store isn't necessary since there is no limit on message size. Conat's persistent +streams are compressed by default and backed by individual sqlite files, which +makes them very memory efficient and it is easy to tier storage to cloud storage. + +UNIT TESTS: See packages/server/nats/test/core + +MISC NOTES: + +NOTE: There is a socketio msgpack parser, but it just doesn't +work at all, which is weird. Also, I think it's impossible to +do the sort of chunking we want at the level of a socket.io +parser -- it's just not possible in that the encoding. We customize +things purely client side without using a parser, and get a much +simpler and better result, inspired by how NATS approaches things +with opaque messages. + + +SUBSCRIPTION ROBUSTNESS: When you call client.subscribe(...) you get back an async iterator. +It ONLY ends when you explicitly do the standard ways of terminating +such an iterator, including calling .close() on it. It is a MAJOR BUG +if it were to terminate for any other reason. In particular, the subscription +MUST NEVER throw an error or silently end when the connection is dropped +then resumed, or the server is restarted, or the client connects to +a different server! These situations can, of course, result in missing +some messages, but that's understood. There are no guarantees at all with +a subscription that every message is received. That said, we have enabled +connectionStateRecovery (and added special conat support for it) so no messages +are dropped for temporary disconnects, even up to several minutes, +and even in valkey cluster mode! Finally, any time a client disconnects +and reconnects, the client ensures that all subscriptions exist for it on the server +via a sync process. + +Subscription robustness is a major difference with NATS.js, which would +mysteriously terminate subscriptions for a variety of reasons, meaning that any +code using subscriptions had to be wrapped in ugly complexity to be +usable in production. + +USAGE: + +The following should mostly work to interactively play around with this +code and develop it. It's NOT automatically tested and depends on your +environment though, so may break. See the unit tests in + + packages/server/nats/test/core/ + +for something that definitely works perfectly. + + +For developing at the command line, cd to packages/backend, then in node: + + c = require('@cocalc/backend/conat/conat').connect() + +or + + c = require('@cocalc/conat/core/client').connect('http://localhost:3000') + + c.watch('a') + + s = await c.subscribe('a'); for await (const x of s) { console.log(x.length)} + +// in another console + + c = require('@cocalc/backend/conat/conat').connect() + c.publish('a', 'hello there') + +// in browser (right now) + + cc.nats.conat() + +// client server: + + s = await c.subscribe('eval'); for await(const x of s) { x.respond(eval(x.data)) } + +then in another console + + f = async () => (await c.request('eval', '2+3')).data + await f() + + t = Date.now(); for(i=0;i<1000;i++) { await f()} ; Date.now()-t + +// slower, but won't silently fail due to errors, etc. + + f2 = async () => (await c.request('eval', '2+3', {confirm:true})).data + +Wildcard subject: + + + c = require('@cocalc/conat/core/client').connect(); c.watch('a.*'); + + + c = require('@cocalc/conat/core/client').connect(); c.publish('a.x', 'foo') + + +Testing disconnect + + c.sub('>') + c.conn.io.engine.close();0; + +other: + + a=0; setInterval(()=>c.pub('a',a++), 250) + +*/ + +import { + connect as connectToSocketIO, + type SocketOptions, + type ManagerOptions, +} from "socket.io-client"; +import { EventIterator } from "@cocalc/util/event-iterator"; +import type { ServerInfo } from "./types"; +import * as msgpack from "@msgpack/msgpack"; +import { randomId } from "@cocalc/conat/names"; +import type { JSONValue } from "@cocalc/util/types"; +import { EventEmitter } from "events"; +import { callback } from "awaiting"; +import { + isValidSubject, + isValidSubjectWithoutWildcards, +} from "@cocalc/conat/util"; +import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; +import { once } from "@cocalc/util/async-utils"; +import { getLogger } from "@cocalc/conat/client"; + +const logger = getLogger("core/client"); + +interface Options { + inboxPrefix?: string; + path?: string; +} + +export type ConnectOptions = Options & { + noCache?: boolean; +} & Partial & + Partial; + +let theClient: Client | undefined = undefined; +export function connect( + address = "http://localhost:3000", + options?: ConnectOptions, +) { + const noCache = options?.noCache; + if (!noCache && theClient !== undefined) { + return theClient; + } + const client = new Client(address, options); + if (!noCache) { + theClient = client; + } + return client; +} + +const INBOX_PREFIX = "_INBOX"; +const REPLY_HEADER = "CoCalc-Reply"; +const DEFAULT_MAX_WAIT = 30000; +const DEFAULT_REQUEST_TIMEOUT = 10000; +const MAX_HEADER_SIZE = 100000; + +export enum DataEncoding { + MsgPack = 0, + JsonCodec = 1, +} + +interface SubscriptionOptions { + maxWait?: number; + mesgLimit?: number; + queue?: string; + ephemeral?: boolean; + respond?: Function; + timeout?: number; +} + +// WARNING! This is the default and you can't just change it! +// Yes, for specific messages you can, but in general DO NOT. The reason is because, e.g., +// JSON will turn Dates into strings, and we no longer fix that. So unless you modify the +// JsonCodec to handle Date's properly, don't change this!! +const DEFAULT_ENCODING = DataEncoding.MsgPack; + +interface ClientOptions { + inboxPrefix?: string; +} + +export class Client { + public conn: ReturnType; + // queueGroups is a map from subject to the queue group for the subscription to that subject + private queueGroups: { [subject: string]: string } = {}; + public info: ServerInfo | undefined = undefined; + private readonly options: ClientOptions & { address: string }; + private inboxSubject: string; + private inbox?: EventEmitter; + + constructor(address: string, options: Options = {}) { + this.options = { address, ...options }; + + this.conn = connectToSocketIO(address, { + // cocalc itself only works with new clients. + // TODO: chunking + long polling is tricky; need to shrink chunk size a lot, since + // I guess no binary protocol. + // Also a major problem if we allow long polling is that we must always use at most + // half the chunk size... because there is no way to know if recipients will be + // using long polling to RECEIVE messages. + transports: ["websocket"], + ...options, + }); + this.conn.on("info", (info) => { + this.info = info; + }); + this.conn.on("connect", () => { + logger.debug(`Conat: Connected to ${this.options.address}`); + this.syncSubscriptions(); + }); + + this.initInbox(); + } + + private temporaryInboxSubject = () => { + if (!this.inboxSubject) { + throw Error("inbox not setup properly"); + } + return `${this.inboxSubject}.${randomId()}`; + }; + + private initInbox = () => { + // For request/respond instead of setting up one + // inbox *every time there is a request*, we setup a single + // inbox once and for all for all responses. We listen for + // everything to inbox...Prefix.* and emit it via this.inbox. + // The request sender then listens on this.inbox for the response. + // We *could* use a regular subscription for each request, + // but (1) that massively increases the load on the server for + // every single request (having to create and destroy subscriptions) + // and (2) there is a race condition between creating that subscription + // and getting the response; it's fine with one server, but with + // multiple servers solving the race condition would slow everything down + // due to having to wait for so many acknowledgements. Instead, we + // remove all those problems by just using a single inbox subscription. + const inboxPrefix = this.options.inboxPrefix ?? INBOX_PREFIX; + if (!inboxPrefix.startsWith(INBOX_PREFIX)) { + throw Error(`custom inboxPrefix must start with '${INBOX_PREFIX}'`); + } + this.inboxSubject = `${inboxPrefix}.${randomId()}`; + let sub; + try { + sub = this.subscribeSync(this.inboxSubject + ".*"); + } catch (err) { + // this should only fail due to permissions issues, at which point + // request can't work, but pub/sub can. + logger.debug(`WARNING: inbox not available -- ${err}`); + this.inboxSubject = ""; + return; + } + this.inbox = new EventEmitter(); + (async () => { + for await (const mesg of sub) { + if (this.inbox == null) { + return; + } + this.inbox.emit(mesg.subject, mesg); + } + })(); + }; + + // There should usually be no reason to call this because socket.io + // is so good at abstracting this away. It's useful for unit testing. + waitUntilConnected = reuseInFlight(async () => { + if (this.conn.connected) { + return; + } + // @ts-ignore + await once(this.conn, "connect"); + }); + + close = () => { + for (const subject in this.queueGroups) { + this.conn.emit("unsubscribe", { subject }); + delete this.queueGroups[subject]; + } + // @ts-ignore + delete this.queueGroups; + this.conn.close(); + theClient = undefined; + // @ts-ignore + delete this.inboxSubject; + delete this.inbox; + // @ts-ignore + delete this.options; + // @ts-ignore + delete this.info; + }; + + // syncSubscriptions ensures that we're subscribed on server + // to what we think we're subscribed to. + private syncSubscriptions = async () => { + const subs = await this.getSubscriptions(); + // console.log("syncSubscriptions", { + // server: subs, + // clent: Object.keys(this.queueGroups), + // }); + // logger.debug`Conat: restoring subscriptions`, Array.from(subs)); + for (const subject in this.queueGroups) { + // subscribe on backend to all subscriptions we think we should have that + // the server does not have + if (!subs.has(subject)) { + this.conn.emit("subscribe", { + subject, + queue: this.queueGroups[subject], + }); + } + } + for (const subject in subs) { + if (this.queueGroups[subject] != null) { + // server thinks we're subscribed but we do not, so cancel + this.conn.emit("unsubscribe", { subject }); + } + } + }; + + numSubscriptions = () => Object.keys(this.queueGroups).length; + + private getSubscriptions = async (): Promise> => { + const f = (cb) => + this.conn.emit("subscriptions", null, (subs) => cb(undefined, subs)); + return new Set(await callback(f)); + }; + + // returns EventEmitter that emits 'message', mesg: Message + private subscriptionEmitter = ( + subject: string, + { + closeWhenOffCalled, + queue, + confirm, + ephemeral, + timeout, + }: { + // if true, when the off method of the event emitter is called, then + // the entire subscription is closed. This is very useful when we wrap the + // EvenEmitter in an async iterator. + closeWhenOffCalled?: boolean; + + // the queue group -- if not given, then one is randomly assigned. + queue?: string; + + // confirm -- get confirmation back from server that subscription was created + confirm?: boolean; + + // If ephemeral is true this subscription is deleted from the server + // the moment the client disconnects, so that the server doesn't queue + // up messages to to this subscription. + // IMPORTANT: the *subscription itself* doesn't get killed on disconnect! + // The subscription will be resumed automatically when the client reconnects. + // The idea is that there will be no old queued up messages waiting for it. + // + // If ephemeral is false, this subscription is acting more as a client + // to receive data, so we want it to persist on the server longterm + // even while disconnected (and leverage connectionStateRecovery). E.g., + // it might be listening for updates to a stream. After a few minutes + // it still stops queuing up messages though. + ephemeral?: boolean; + + // how long to wait to confirm creation of the subscription; + // only used when confirm=true. + timeout?: number; + } = {}, + ): { sub: SubscriptionEmitter; promise? } => { + if (!isValidSubject(subject)) { + throw Error(`invalid subscribe subject '${subject}'`); + } + if (!queue) { + queue = randomId(); + } + if (this.queueGroups[subject] != null) { + throw Error(`already subscribed to '${subject}'`); + } + this.queueGroups[subject] = queue; + const sub = new SubscriptionEmitter({ + client: this, + subject, + closeWhenOffCalled, + }); + let promise; + if (confirm) { + const f = (cb) => { + const handle = (response) => { + if (response?.error) { + cb(new ConatError(response.error, { code: response.code })); + } else { + cb(response?.error, response); + } + }; + if (timeout) { + this.conn + .timeout(timeout) + .emit( + "subscribe", + { subject, queue, ephemeral }, + (err, response) => { + if (err) { + handle({ error: `${err}`, code: 408 }); + } else { + handle(response); + } + }, + ); + } else { + this.conn.emit("subscribe", { subject, queue, ephemeral }, handle); + } + }; + promise = callback(f); + } else { + this.conn.emit("subscribe", { subject, queue, ephemeral }); + promise = undefined; + } + sub.once("close", () => { + if (this.queueGroups?.[subject] == null) { + return; + } + this.conn.emit("unsubscribe", { subject }); + delete this.queueGroups[subject]; + }); + return { sub, promise }; + }; + + private subscriptionIterator = ( + sub, + opts?: SubscriptionOptions, + ): Subscription => { + // @ts-ignore + const iter = new EventIterator(sub, "message", { + idle: opts?.maxWait, + limit: opts?.mesgLimit, + map: (args) => args[0], + }); + return iter; + }; + + subscribeSync = ( + subject: string, + opts?: SubscriptionOptions, + ): Subscription => { + const { sub } = this.subscriptionEmitter(subject, { + confirm: false, + closeWhenOffCalled: true, + queue: opts?.queue, + ephemeral: opts?.ephemeral, + }); + return this.subscriptionIterator(sub, opts); + }; + + subscribe = async ( + subject: string, + opts?: SubscriptionOptions, + ): Promise => { + const { sub, promise } = this.subscriptionEmitter(subject, { + confirm: true, + closeWhenOffCalled: true, + queue: opts?.queue, + ephemeral: opts?.ephemeral, + timeout: opts?.timeout, + }); + await promise; + return this.subscriptionIterator(sub, opts); + }; + + sub = this.subscribe; + + /* + A service is a subscription with a function to respond to requests by name. + Call service with an implementation: + + service = await client1.service('arith', {mul : async (a,b)=>{a*b}, add : async (a,b)=>a+b}, {ephemeral:true}) + + Use the service: + arith = await client2.call('arith') + await arith.mul(2,3) + await arith.add(2,3) + + There's by default a single queue group '0', so if you create multiple services on various + computers, then requests are load balanced across them automatically. + + Close the service when done: + + service.close(); + */ + service: ( + subject: string, + impl: T, + // default to ephemeral:true for services + opts?: SubscriptionOptions, + ) => Promise = async (subject, impl, opts) => { + const sub = await this.subscribe(subject, { + ephemeral: true, + ...opts, + queue: "0", + }); + const respond = async (mesg: Message) => { + try { + const [name, args] = mesg.data; + mesg.respondSync(await impl[name](...args)); + } catch (err) { + mesg.respondSync(null, { headers: { error: `${err}` } }); + } + }; + const loop = async () => { + // todo -- param to set max number of responses at once. + for await (const mesg of sub) { + respond(mesg); + } + }; + loop(); + return sub; + }; + + // Call a service as defined above. + call(subject: string, opts?: PublishOptions): T { + const call = async (name: string, args: any[]) => { + const resp = await this.request(subject, [name, args], opts); + if (resp.headers?.error) { + throw Error(`${resp.headers.error}`); + } else { + return resp.data; + } + }; + + return new Proxy( + {}, + { + get: (_, name) => { + if (typeof name !== "string") { + return undefined; + } + return async (...args) => await call(name, args); + }, + }, + ) as T; + } + + publishSync = ( + subject: string, + mesg, + opts?: PublishOptions, + ): { bytes: number } => { + return this._publish(subject, mesg, opts); + }; + + publish = async ( + subject: string, + mesg, + opts?: PublishOptions, + ): Promise<{ + // bytes encoded (doesn't count some extra wrapping) + bytes: number; + // count is the number of matching subscriptions + // that the server *sent* this message to since the server knows about them. + // However, there's no guaranteee that the subscribers actually exist + // **right now** or received these messages. + count: number; + }> => { + const { bytes, getCount, promise } = this._publish(subject, mesg, { + ...opts, + confirm: true, + }); + await promise; + return { bytes, count: getCount?.()! }; + }; + + private _publish = ( + subject: string, + mesg, + { + headers, + raw, + encoding = DEFAULT_ENCODING, + confirm, + timeout, + }: PublishOptions & { confirm?: boolean } = {}, + ) => { + if (!isValidSubjectWithoutWildcards(subject)) { + throw Error(`invalid publish subject ${subject}`); + } + raw = raw ?? encode({ encoding, mesg }); + // default to 1MB is safe since it's at least that big. + const chunkSize = Math.max( + 1000, + (this.info?.max_payload ?? 1e6) - MAX_HEADER_SIZE, + ); + let seq = 0; + let id = randomId(); + const promises: any[] = []; + let count = 0; + for (let i = 0; i < raw.length; i += chunkSize) { + const done = i + chunkSize >= raw.length ? 1 : 0; + const v: any[] = [ + subject, + id, + seq, + done, + encoding, + raw.slice(i, i + chunkSize), + ]; + if (done && headers) { + v.push(headers); + } + if (confirm) { + const f = (cb) => { + const handle = (response) => { + if (response?.error) { + cb(new ConatError(response.error, { code: response.code })); + } else { + cb(response?.error, response); + } + }; + if (timeout) { + this.conn.timeout(timeout).emit("publish", v, (err, response) => { + if (err) { + handle({ error: `${err}`, code: 408 }); + } else { + handle(response); + } + }); + } else { + this.conn.emit("publish", v, handle); + } + }; + const promise = (async () => { + const response = await callback(f); + count = Math.max(count, response.count ?? 0); + })(); + promises.push(promise); + } else { + this.conn.emit("publish", v); + } + seq += 1; + } + if (confirm) { + return { + bytes: raw.length, + getCount: () => count, + promise: Promise.all(promises), + }; + } + return { bytes: raw.length }; + }; + + pub = this.publish; + + request = async ( + subject: string, + mesg: any, + { + timeout = DEFAULT_REQUEST_TIMEOUT, + ...options + }: PublishOptions & { timeout?: number } = {}, + ): Promise => { + if (timeout <= 0) { + throw Error("timeout must be positive"); + } + const inboxSubject = this.temporaryInboxSubject(); + if (this.inbox == null) { + throw Error("inbox not configured"); + } + const sub = new EventIterator(this.inbox, inboxSubject, { + idle: timeout, + limit: 1, + map: (args) => args[0], + }); + + const { count } = await this.publish(subject, mesg, { + ...options, + timeout, + headers: { ...options?.headers, [REPLY_HEADER]: inboxSubject }, + }); + if (!count) { + sub.stop(); + throw new ConatError(`request -- no subscribers matching '${subject}'`, { + code: 503, + }); + } + for await (const resp of sub) { + sub.stop(); + return resp; + } + sub.stop(); + throw new ConatError("timeout", { code: 408 }); + }; + + async *requestMany( + subject: string, + mesg: any, + { + maxMessages, + maxWait = DEFAULT_MAX_WAIT, + ...options + }: PublishOptions & { + maxWait?: number; + maxMessages?: number; + } = {}, + ) { + if (maxMessages != null && maxMessages <= 0) { + throw Error("maxMessages must be positive"); + } + if (maxWait != null && maxWait <= 0) { + throw Error("maxWait must be positive"); + } + const inboxSubject = this.temporaryInboxSubject(); + if (this.inbox == null) { + throw Error("inbox not configured"); + } + const sub = new EventIterator(this.inbox, inboxSubject, { + idle: maxWait, + limit: maxMessages, + map: (args) => args[0], + }); + const { count } = await this.publish(subject, mesg, { + headers: { ...options?.headers, [REPLY_HEADER]: inboxSubject }, + }); + if (!count) { + sub.stop(); + throw new ConatError( + `requestMany -- no subscribers matching ${subject}`, + { code: 503 }, + ); + } + let numMessages = 0; + for await (const resp of sub) { + yield resp; + numMessages += 1; + if (maxMessages && numMessages >= maxMessages) { + sub.end(); + return; + } + } + sub.end(); + throw new ConatError("timeout", { code: 408 }); + } + + // watch: this is mainly for debugging and interactive use. + watch = ( + subject: string, + cb = (x) => console.log(`${x.subject}:`, x.data, x.headers), + opts?, + ) => { + const sub = this.subscribeSync(subject, opts); + const f = async () => { + for await (const x of sub) { + cb(x); + } + }; + f(); + return sub; + }; +} + +interface PublishOptions { + headers?: Headers; + // if encoding is given, it specifies the encoding used to encode the message + encoding?: DataEncoding; + // if raw is given, then it is assumed to be the raw binary + // encoded message (using encoding) and any mesg parameter + // is *IGNORED*. + raw?; + // timeout used when publishing a message and awaiting a response. + timeout?: number; +} + +export function encode({ + encoding, + mesg, +}: { + encoding: DataEncoding; + mesg: any; +}) { + if (encoding == DataEncoding.MsgPack) { + return msgpack.encode(mesg); + } else if (encoding == DataEncoding.JsonCodec) { + return jsonEncoder(mesg); + } else { + throw Error(`unknown encoding ${encoding}`); + } +} + +export function decode({ + encoding, + data, +}: { + encoding: DataEncoding; + data; +}): any { + if (encoding == DataEncoding.MsgPack) { + return msgpack.decode(data); + } else if (encoding == DataEncoding.JsonCodec) { + return jsonDecoder(data); + } else { + throw Error(`unknown encoding ${encoding}`); + } +} + +let textEncoder: TextEncoder | undefined = undefined; +let textDecoder: TextDecoder | undefined = undefined; + +function jsonEncoder(obj: any) { + if (textEncoder === undefined) { + textEncoder = new TextEncoder(); + } + return textEncoder.encode(JSON.stringify(obj)); +} + +function jsonDecoder(data: Buffer): any { + if (textDecoder === undefined) { + textDecoder = new TextDecoder(); + } + return JSON.parse(textDecoder.decode(data)); +} + +interface Chunk { + id: string; + seq: number; + done: number; + buffer: Buffer; + headers?: any; +} + +class SubscriptionEmitter extends EventEmitter { + private incoming: { [id: string]: Partial[] } = {}; + private client: Client; + private closeWhenOffCalled?: boolean; + private subject: string; + + constructor({ client, subject, closeWhenOffCalled }) { + super(); + this.client = client; + this.subject = subject; + this.client.conn.on(subject, this.handle); + this.closeWhenOffCalled = closeWhenOffCalled; + } + + close = () => { + this.emit("close"); + this.client.conn.removeListener(this.subject, this.handle); + // @ts-ignore + delete this.incoming; + // @ts-ignore + delete this.client; + // @ts-ignore + delete this.subject; + // @ts-ignore + delete this.closeWhenOffCalled; + }; + + off(a, b) { + super.off(a, b); + if (this.closeWhenOffCalled) { + this.close(); + } + return this; + } + + private handle = ({ subject, data }) => { + if (this.client == null) { + return; + } + const [id, seq, done, encoding, buffer, headers] = data; + // console.log({ id, seq, done, encoding, buffer, headers }); + const chunk = { seq, done, encoding, buffer, headers }; + const { incoming } = this; + if (incoming[id] == null) { + if (seq != 0) { + // part of a dropped message -- by definition this should just + // silently happen and be handled via application level encodings + // elsewhere + console.log("WARNING: drop -- first message has wrong seq", { seq }); + this.emit("drop"); + return; + } + incoming[id] = []; + } else { + const prev = incoming[id].slice(-1)[0].seq ?? -100; + if (prev + 1 != seq) { + console.log("WARNING: drop -- seq mismatch", { prev, seq }); + // part of message was dropped -- discard everything + delete incoming[id]; + this.emit("drop"); + return; + } + } + incoming[id].push(chunk); + if (chunk.done) { + // console.log("assembling ", incoming[id].length, "chunks"); + const chunks = incoming[id].map((x) => x.buffer!); + const raw = concatArrayBuffers(chunks); + delete incoming[id]; + const mesg = new Message({ + encoding, + raw, + headers, + client: this.client, + subject, + }); + this.emit("message", mesg); + } + }; +} + +function concatArrayBuffers(buffers) { + if (buffers.length == 1) { + return buffers[0]; + } + if (Buffer.isBuffer(buffers[0])) { + return Buffer.concat(buffers); + } + // browser fallback + const totalLength = buffers.reduce((sum, buf) => sum + buf.byteLength, 0); + const result = new Uint8Array(totalLength); + let offset = 0; + for (const buf of buffers) { + result.set(new Uint8Array(buf), offset); + offset += buf.byteLength; + } + + return result.buffer; +} + +export type Headers = { [key: string]: JSONValue }; + +export class MessageData { + public readonly encoding: DataEncoding; + public readonly raw; + public readonly headers?: Headers; + + constructor({ encoding, raw, headers }) { + this.encoding = encoding; + this.raw = raw; + this.headers = headers; + } + + get data(): T { + return decode({ encoding: this.encoding, data: this.raw }); + } + + get length(): number { + // raw is binary data so it's the closest thing we have to the + // size of this message. It would also make sense to include + // the headers, but JSON'ing them would be expensive, so we don't. + return this.raw.length; + } +} + +export class Message extends MessageData { + private client: Client; + public readonly subject; + + constructor({ encoding, raw, headers, client, subject }) { + super({ encoding, raw, headers }); + this.client = client; + this.subject = subject; + } + + private respondSubject = () => { + const subject = this.headers?.[REPLY_HEADER]; + if (!subject) { + console.log( + `WARNING: respond -- message to '${this.subject}' is not a request`, + ); + return; + } + return `${subject}`; + }; + + respondSync = (mesg, opts?: PublishOptions): { bytes: number } => { + const subject = this.respondSubject(); + if (!subject) return { bytes: 0 }; + return this.client.publishSync(subject, mesg, opts); + }; + + respond = async ( + mesg, + opts: PublishOptions = {}, + ): Promise<{ bytes: number; count: number }> => { + const subject = this.respondSubject(); + if (!subject) { + return { bytes: 0, count: 0 }; + } + return await this.client.publish(subject, mesg, opts); + }; +} + +export function messageData( + mesg, + { headers, raw, encoding = DEFAULT_ENCODING }: PublishOptions = {}, +) { + return new MessageData({ + encoding, + raw: raw ?? encode({ encoding, mesg }), + headers, + }); +} + +export type Subscription = EventIterator; + +export class ConatError extends Error { + code: string | number; + constructor(mesg: string, { code }) { + super(mesg); + this.code = code; + } +} diff --git a/src/packages/conat/core/server.ts b/src/packages/conat/core/server.ts new file mode 100644 index 0000000000..18e1241d7a --- /dev/null +++ b/src/packages/conat/core/server.ts @@ -0,0 +1,477 @@ +/* + + +cd packages/server + + + s = await require('@cocalc/server/conat/socketio').initConatServer() + + s0 = await require('@cocalc/server/conat/socketio').initConatServer({port:3000}) + + +For clustering: + + s0 = await require('@cocalc/server/conat/socketio').initConatServer({valkey:'redis://localhost:6379', port:3000}) + + s1 = await require('@cocalc/server/conat/socketio').initConatServer({valkey:'redis://localhost:6379', port:3001}) + +Corresponding clients: + + c0 = require('@cocalc/conat/core/client').connect('http://localhost:3000') + + c1 = require('@cocalc/conat/core/client').connect('http://localhost:3001') + +--- + +Or from cocalc/src + + pnpm conat-server + +*/ + +import type { ServerInfo } from "./types"; +import { + matchesPattern, + isValidSubject, + isValidSubjectWithoutWildcards, +} from "@cocalc/conat/util"; +import { createAdapter } from "@socket.io/redis-streams-adapter"; +import Valkey from "iovalkey"; +import { delay } from "awaiting"; +import { + ConatError, + connect, + type Client, + type ConnectOptions, +} from "./client"; + +// This is just the default with socket.io, but we might want a bigger +// size, which could mean more RAM usage by the servers. +// Our client protocol automatically chunks messages, so this payload +// size ONLY impacts performance, never application level constraints. +const MB = 1e6; +const MAX_PAYLOAD = 1 * MB; + +const MAX_DISCONNECTION_DURATION = 2 * 60 * 1000; + +const MAX_SUBSCRIPTIONS_PER_CLIENT = 250; + +const DEBUG = false; + +interface InterestUpdate { + op: "add" | "delete"; + subject: string; + queue?: string; + room: string; +} + +export function init(opts) { + return new ConatServer(opts); +} + +export type UserFunction = (socket) => Promise; +export type AllowFunction = (opts: { + type: "pub" | "sub"; + user: any; + subject: string; +}) => Promise; + +export interface Options { + Server; + httpServer?; + port?: number; + id?: number; + logger?; + path?: string; + getUser?: UserFunction; + isAllowed?: AllowFunction; + valkey?: string; + maxDisconnectionDuration?: number; + maxSubscriptionsPerClient?: number; +} + +export class ConatServer { + public readonly io; + public readonly id: number; + private readonly logger: (...args) => void; + private interest: { [subject: string]: { [queue: string]: Set } } = + {}; + private subscriptions: { [socketId: string]: Set } = {}; + private getUser: UserFunction; + private isAllowed: AllowFunction; + readonly options: Partial; + private readonly valkey?: { adapter: Valkey; pub: Valkey; sub: Valkey }; + private sockets: { [id: string]: any } = {}; + // which subscriptions are ephemeral + private ephemeral: { [id: string]: Set } = {}; + + constructor(options: Options) { + const { + Server, + httpServer, + port = 3000, + id = 0, + logger, + path, + getUser, + isAllowed, + valkey, + maxDisconnectionDuration = MAX_DISCONNECTION_DURATION, + maxSubscriptionsPerClient = MAX_SUBSCRIPTIONS_PER_CLIENT, + } = options; + this.options = { + port, + id, + path, + valkey, + maxDisconnectionDuration, + maxSubscriptionsPerClient, + }; + this.getUser = getUser ?? (async () => null); + this.isAllowed = isAllowed ?? (async () => true); + this.id = id; + this.logger = logger; + if (valkey) { + this.log("Using Valkey for clustering"); + this.valkey = { + adapter: new Valkey(valkey), + pub: new Valkey(valkey), + sub: new Valkey(valkey), + }; + } + this.log("Starting Conat server...", { + id, + path, + port, + httpServer: httpServer != null, + valkey, + }); + const socketioOptions = { + maxHttpBufferSize: MAX_PAYLOAD, + path, + adapter: + this.valkey != null ? createAdapter(this.valkey.adapter) : undefined, + connectionStateRecovery: { maxDisconnectionDuration }, + // perMessageDeflate is disabled by default in socket.io due to FUD -- see https://github.com/socketio/socket.io/issues/3477#issuecomment-930503313 + perMessageDeflate: { threshold: 1024 }, + }; + this.log(socketioOptions); + if (httpServer) { + this.io = new Server(httpServer, socketioOptions); + } else { + this.io = new Server(port, socketioOptions); + this.log(`listening on port ${port}`); + } + this.init(); + } + + private init = () => { + this.io.on("connection", this.handleSocket); + if (this.valkey != null) { + this.initInterestSubscription(); + } + }; + + close = async () => { + await this.io.close(); + for (const prop of ["interest", "subscriptions", "sockets", "services"]) { + delete this[prop]; + } + }; + + private info = (): ServerInfo => { + return { + max_payload: MAX_PAYLOAD, + }; + }; + + private log = (...args) => { + this.logger?.(new Date().toISOString(), "conat", this.id, ":", ...args); + }; + + private unsubscribe = async ({ socket, subject }) => { + if (DEBUG) { + this.log("unsubscribe ", { id: socket.id, subject }); + } + const room = socketSubjectRoom({ socket, subject }); + socket.leave(room); + (this.ephemeral[socket.id] ?? new Set()).delete(subject); + await this.updateInterest({ op: "delete", subject, room }); + }; + + private initInterestSubscription = async () => { + if (this.valkey == null) { + throw Error("valkey not defined"); + } + // [ ] TODO: we need to limit the size of the stream and/or + // timeeout interest and/or reconcile it periodically with + // actual connected users to avoid the interest object + // getting too big for now reason. E.g, maybe all subscriptions + // need to be renewed periodically + let lastId = "0"; + let d = 50; + while (true) { + // console.log("waiting for interest update"); + const results = await this.valkey.sub.xread( + "block" as any, + 0, + "STREAMS", + "interest", + lastId, + ); + // console.log("got ", results); + if (results == null) { + d = Math.min(1000, d * 1.2); + await delay(d); + continue; + } else { + d = 50; + } + const [_, messages] = results[0]; + for (const message of messages) { + const update = JSON.parse(message[1][1]); + this._updateInterest(update); + } + lastId = messages[messages.length - 1][0]; + // console.log({ lastId }); + } + }; + + private updateInterest = async (update: InterestUpdate) => { + if (this.valkey != null) { + // publish interest change to valkey. + await this.valkey.pub.xadd( + "interest", + "*", + "update", + JSON.stringify(update), + ); + } + this._updateInterest(update); + }; + + private _updateInterest = async ({ + op, + subject, + queue, + room, + }: InterestUpdate) => { + if (op == "add") { + if (typeof queue != "string") { + throw Error("queue must not be null for add"); + } + if (this.interest[subject] == null) { + this.interest[subject] = { [queue]: new Set([room]) }; + } else if (this.interest[subject][queue] == null) { + this.interest[subject][queue] = new Set([room]); + } else { + this.interest[subject][queue].add(room); + } + } else if (op == "delete") { + const groups = this.interest[subject]; + if (groups != null) { + let nonempty = false; + for (const queue in groups) { + groups[queue].delete(room); + if (groups[queue].size == 0) { + delete groups[queue]; + } else { + nonempty = true; + } + } + if (!nonempty) { + // no interest anymore + delete this.interest[subject]; + } + } + } else { + throw Error(`invalid op ${op}`); + } + }; + + private subscribe = async ({ socket, subject, queue, ephemeral, user }) => { + if (DEBUG) { + this.log("subscribe ", { id: socket.id, subject, queue }); + } + if (typeof queue != "string") { + throw Error("queue must be defined"); + } + if (!isValidSubject(subject)) { + throw Error("invalid subject"); + return; + } + if (!(await this.isAllowed({ user, subject, type: "sub" }))) { + throw new ConatError(`permission denied subscribing to '${subject}'`, { + code: 403, + }); + } + const maxSubs = this.options.maxSubscriptionsPerClient ?? 0; + if (maxSubs) { + const numSubs = this.subscriptions?.[socket.id]?.size ?? 0; + if (numSubs >= maxSubs) { + // error 429 == "too many requests" + throw new ConatError( + `there is a limit of at most ${maxSubs} subscriptions and you currently have ${numSubs} subscriptions`, + { code: 429 }, + ); + } + } + const room = socketSubjectRoom({ socket, subject }); + // critical to await socket.join so we don't advertise that there is + // a subscriber before the socket is actually getting messages. + await socket.join(room); + await this.updateInterest({ op: "add", subject, room, queue }); + if (this.ephemeral[socket.id] === undefined) { + this.ephemeral[socket.id] = new Set(); + } + if (ephemeral) { + this.ephemeral[socket.id].add(subject); + } else { + this.ephemeral[socket.id].delete(subject); + } + }; + + private publish = async ({ subject, data, from }): Promise => { + if (!isValidSubjectWithoutWildcards(subject)) { + throw Error("invalid subject"); + } + if (!(await this.isAllowed({ user: from, subject, type: "pub" }))) { + throw new ConatError(`permission denied publishing to '${subject}'`, { + code: 403, + }); + } + let count = 0; + for (const pattern in this.interest) { + if (!matchesPattern({ pattern, subject })) { + continue; + } + const g = this.interest[pattern]; + if (g === undefined) { + continue; + } + if (DEBUG) { + this.log("publishing", { subject, data, g }); + } + // send to exactly one in each queue group + for (const queue in g) { + const choice = randomChoice(g[queue]); + if (choice !== undefined) { + this.io.to(choice).emit(pattern, { subject, data }); + count += 1; + } + } + } + return count; + }; + + private handleSocket = async (socket) => { + this.sockets[socket.id] = socket; + socket.on("close", () => delete this.sockets[socket.id]); + + let user: any = null; + user = await this.getUser(socket); + const id = socket.id; + this.log("got connection", { id, user }); + if (this.subscriptions[id] == null) { + this.subscriptions[id] = new Set(); + } + + socket.emit("info", { ...this.info(), user }); + + socket.on("publish", async ([subject, ...data], respond) => { + try { + const count = await this.publish({ subject, data, from: user }); + respond?.({ count }); + } catch (err) { + respond?.({ error: `${err}`, code: err.code }); + } + }); + + socket.on("subscribe", async ({ subject, queue, ephemeral }, respond) => { + try { + if (this.subscriptions[id].has(subject)) { + throw Error(`already subscribed to '${subject}'`); + } + await this.subscribe({ socket, subject, queue, user, ephemeral }); + this.subscriptions[id].add(subject); + respond?.({ status: "added" }); + } catch (err) { + respond?.({ error: `${err}`, code: err.code }); + } + }); + + socket.on("subscriptions", (_, respond) => { + if (respond == null) { + return; + } + respond(Array.from(this.subscriptions[id])); + }); + + socket.on("unsubscribe", ({ subject }, respond) => { + if (!this.subscriptions[id].has(subject)) { + return; + } + this.unsubscribe({ socket, subject }); + this.subscriptions[id].delete(subject); + respond?.(); + }); + + socket.on("disconnecting", async () => { + for (const subject of this.ephemeral[socket.id] ?? []) { + this.unsubscribe({ socket, subject }); + this.subscriptions[id].delete(subject); + } + const rooms = Array.from(socket.rooms) as string[]; + const d = this.options.maxDisconnectionDuration ?? 0; + // console.log(`will unsubscribe in ${d}ms unless client reconnects`); + await delay(d); + if (!this.io.of("/").adapter.sids.has(id)) { + // console.log("client not back"); + for (const room of rooms) { + const subject = getSubjectFromRoom(room); + this.unsubscribe({ socket, subject }); + } + delete this.subscriptions[id]; + } else { + // console.log("client is back!"); + } + }); + }; + + // create new client in the same process connected to this server. + // This is useful for unit testing and is not cached (i.e., multiple + // calls return distinct clients). + client = (options?: ConnectOptions): Client => { + return connect(`http://localhost:${this.options.port}`, { + path: this.options.path, + noCache: true, + ...options, + }); + }; +} + +function getSubjectFromRoom(room: string) { + if (room.startsWith("{")) { + return JSON.parse(room).subject; + } else { + return room; + } +} + +function socketSubjectRoom({ socket, subject }) { + return JSON.stringify({ id: socket.id, subject }); +} + +function randomChoice(v: Set): any { + if (v.size == 0) { + return undefined; + } + if (v.size == 1) { + for (const x of v) { + return x; + } + } + const w = Array.from(v); + const i = Math.floor(Math.random() * w.length); + return w[i]; +} diff --git a/src/packages/conat/core/types.ts b/src/packages/conat/core/types.ts new file mode 100644 index 0000000000..d477dd31e7 --- /dev/null +++ b/src/packages/conat/core/types.ts @@ -0,0 +1,3 @@ +export interface ServerInfo { + max_payload: number; +} diff --git a/src/packages/nats/files/read.ts b/src/packages/conat/files/read.ts similarity index 65% rename from src/packages/nats/files/read.ts rename to src/packages/conat/files/read.ts index 619fbd6a36..1370b46657 100644 --- a/src/packages/nats/files/read.ts +++ b/src/packages/conat/files/read.ts @@ -24,13 +24,13 @@ over a websocket for compute servers, so would just copy that code. DEVELOPMENT: -See src/packages/backend/nats/test/files/read.test.ts for unit tests. +See src/packages/backend/conat/test/files/read.test.ts for unit tests. ~/cocalc/src/packages/backend$ node -require('@cocalc/backend/nats'); a = require('@cocalc/nats/files/read'); a.createServer({project_id:'00847397-d6a8-4cb0-96a8-6ef64ac3e6cf',compute_server_id:0,createReadStream:require('fs').createReadStream}) +require('@cocalc/backend/conat'); a = require('@cocalc/conat/files/read'); a.createServer({project_id:'00847397-d6a8-4cb0-96a8-6ef64ac3e6cf',compute_server_id:0,createReadStream:require('fs').createReadStream}) -for await (const chunk of await a.readFile({project_id:'00847397-d6a8-4cb0-96a8-6ef64ac3e6cf',compute_server_id:0,path:'/tmp/a.py'})) { console.log({chunk}); } +for await (const chunk of await a.readFile({project_id:'00847397-d6a8-4cb0-96a8-6ef64ac3e6cf',compute_server_id:0,path:'/tmp/a'})) { console.log({chunk}); } for await (const chunk of await a.readFile({project_id:'00847397-d6a8-4cb0-96a8-6ef64ac3e6cf',compute_server_id:0,path:'/projects/6b851643-360e-435e-b87e-f9a6ab64a8b1/cocalc/.git/objects/pack/pack-771f7fe4ee855601463be070cf9fb9afc91f84ac.pack'})) { console.log({chunk}); } @@ -38,19 +38,18 @@ for await (const chunk of await a.readFile({project_id:'00847397-d6a8-4cb0-96a8- */ -import { getEnv } from "@cocalc/nats/client"; -import { projectSubject } from "@cocalc/nats/names"; -import { Empty, headers, type Subscription } from "@nats-io/nats-core"; -import { runLoop } from "./util"; +import { getEnv } from "@cocalc/conat/client"; +import { projectSubject } from "@cocalc/conat/names"; +import { type Subscription } from "@cocalc/conat/core/client"; let subs: { [name: string]: Subscription } = {}; export async function close({ project_id, compute_server_id, name = "" }) { - const key = getSubject({ project_id, compute_server_id, name }); - if (subs[key] == null) { + const subject = getSubject({ project_id, compute_server_id, name }); + if (subs[subject] == null) { return; } - const sub = subs[key]; - delete subs[key]; + const sub = subs[subject]; + delete subs[subject]; await sub.drain(); } @@ -73,17 +72,10 @@ export async function createServer({ compute_server_id, name, }); - if (subs[subject] != null) { - return; - } - const { nc } = await getEnv(); - runLoop({ - listen, - subs, - subject, - nc, - opts: { createReadStream }, - }); + const { cn } = await getEnv(); + const sub = await cn.subscribe(subject); + subs[subject] = sub; + listen({ sub, createReadStream }); } async function listen({ sub, createReadStream }) { @@ -99,28 +91,21 @@ async function listen({ sub, createReadStream }) { async function handleMessage(mesg, createReadStream) { try { await sendData(mesg, createReadStream); - const h = headers(); - h.append("done", ""); - mesg.respond(Empty, { headers: h }); + mesg.respond(null, { headers: { done: true } }); } catch (err) { - const h = headers(); - h.append("error", `${err}`); // console.log("sending ERROR", err); - mesg.respond(Empty, { headers: h }); + mesg.respond(null, { headers: { error: `${err}` } }); } } const MAX_NATS_CHUNK_SIZE = 16384 * 16 * 3; function getSeqHeader(seq) { - const h = headers(); - h.append("seq", `${seq}`); - return { headers: h }; + return { headers: { seq } }; } async function sendData(mesg, createReadStream) { - const { jc } = await getEnv(); - const { path } = jc.decode(mesg.data); + const { path } = mesg.data; let seq = 0; for await (let chunk of createReadStream(path, { highWaterMark: 16384 * 16 * 3, @@ -152,7 +137,7 @@ export async function* readFile({ name = "", maxWait = 1000 * 60 * 10, // 10 minutes }: ReadFileOptions) { - const { nc, jc } = await getEnv(); + const { cn } = await getEnv(); const subject = getSubject({ project_id, compute_server_id, @@ -161,23 +146,30 @@ export async function* readFile({ const v: any = []; let seq = 0; let bytes = 0; - for await (const resp of await nc.requestMany(subject, jc.encode({ path }), { - maxWait, - })) { - for (const [key, value] of resp.headers ?? []) { - if (key == "error") { - throw Error(value[0] ?? "bug"); - } else if (key == "done") { - return; - } else if (key == "seq") { - const next = parseInt(value[0]); - bytes = resp.data.length; - // console.log("received seq", { seq: next, bytes }); - if (next != seq + 1) { - throw Error(`lost data: seq=${seq}, next=${next}`); - } - seq = next; + for await (const resp of await cn.requestMany( + subject, + { path }, + { + maxWait, + }, + )) { + if (resp.headers == null) { + continue; + } + if (resp.headers.error) { + throw Error(`${resp.headers.error}`); + } + if (resp.headers.done) { + return; + } + if (resp.headers.seq) { + const next = resp.headers.seq as number; + bytes = resp.data.length; + // console.log("received seq", { seq: next, bytes }); + if (next != seq + 1) { + throw Error(`lost data: seq=${seq}, next=${next}`); } + seq = next; } yield resp.data; } diff --git a/src/packages/nats/files/write.ts b/src/packages/conat/files/write.ts similarity index 81% rename from src/packages/nats/files/write.ts rename to src/packages/conat/files/write.ts index f3109d1774..961235476a 100644 --- a/src/packages/nats/files/write.ts +++ b/src/packages/conat/files/write.ts @@ -10,7 +10,7 @@ INSTRUCTIONS: Import writeFile: - import { writeFile } from "@cocalc/nats/files/write"; + import { writeFile } from "@cocalc/conat/files/write"; Now you can write a given path to a project (or compute_server) as simply as this: @@ -50,11 +50,11 @@ Here's how it works from the side of whoever is sending the file: DEVELOPMENT: -See src/packages/backend/nats/test/files/write.test.ts for unit tests. +See src/packages/backend/conat/test/files/write.test.ts for unit tests. ~/cocalc/src/packages/backend$ node -require('@cocalc/backend/nats'); a = require('@cocalc/nats/files/write'); +require('@cocalc/backend/conat'); a = require('@cocalc/conat/files/write'); project_id = '00847397-d6a8-4cb0-96a8-6ef64ac3e6cf'; compute_server_id = 0; await a.createServer({project_id,compute_server_id,createWriteStream:require('fs').createWriteStream}); @@ -63,17 +63,16 @@ await a.writeFile({stream, project_id, compute_server_id, path:'/tmp/a.ts'}) */ -import { getEnv } from "@cocalc/nats/client"; -import { readFile } from "./read"; -import { randomId } from "@cocalc/nats/names"; +import { getEnv } from "@cocalc/conat/client"; +import { randomId } from "@cocalc/conat/names"; import { close as closeReadService, createServer as createReadServer, + readFile, } from "./read"; -import { projectSubject } from "@cocalc/nats/names"; -import { type Subscription } from "@nats-io/nats-core"; +import { projectSubject } from "@cocalc/conat/names"; +import { type Subscription } from "@cocalc/conat/core/client"; import { type Readable } from "node:stream"; -import { runLoop } from "./util"; function getWriteSubject({ project_id, compute_server_id }) { return projectSubject({ @@ -85,12 +84,12 @@ function getWriteSubject({ project_id, compute_server_id }) { let subs: { [name: string]: Subscription } = {}; export async function close({ project_id, compute_server_id }) { - const key = getWriteSubject({ project_id, compute_server_id }); - if (subs[key] == null) { + const subject = getWriteSubject({ project_id, compute_server_id }); + if (subs[subject] == null) { return; } - const sub = subs[key]; - delete subs[key]; + const sub = subs[subject]; + delete subs[subject]; await sub.drain(); } @@ -111,14 +110,10 @@ export async function createServer({ if (sub != null) { return; } - const { nc } = await getEnv(); - runLoop({ - listen, - subs, - subject, - nc, - opts: { createWriteStream, project_id, compute_server_id }, - }); + const { cn } = await getEnv(); + sub = await cn.subscribe(subject); + subs[subject] = sub; + listen({ sub, createWriteStream, project_id, compute_server_id }); } async function listen({ @@ -143,15 +138,14 @@ async function handleMessage({ compute_server_id, }) { let error = ""; - const { jc } = await getEnv(); let writeStream: null | Awaited> = null; try { - const { path, name, maxWait } = jc.decode(mesg.data); + const { path, name, maxWait } = mesg.data; writeStream = await createWriteStream(path); // console.log("created writeStream"); writeStream.on("error", (err) => { error = `${err}`; - mesg.respond(jc.encode({ error, status: "error" })); + mesg.respond({ error, status: "error" }); console.warn(`error writing ${path}: ${error}`); writeStream.emit("remove"); }); @@ -176,10 +170,10 @@ async function handleMessage({ } writeStream.end(); writeStream.emit("rename"); - mesg.respond(jc.encode({ status: "success", bytes, chunks })); + mesg.respond({ status: "success", bytes, chunks }); } catch (err) { if (!error) { - mesg.respond(jc.encode({ error: `${err}`, status: "error" })); + mesg.respond({ error: `${err}`, status: "error" }); writeStream?.emit("remove"); } } @@ -213,13 +207,13 @@ export async function writeFile({ name, }); // tell compute server to start reading our file. - const { nc, jc } = await getEnv(); - const resp = await nc.request( + const { cn } = await getEnv(); + const resp = await cn.request( getWriteSubject({ project_id, compute_server_id }), - jc.encode({ name, path, maxWait }), + { name, path, maxWait }, { timeout: maxWait }, ); - const { error, bytes, chunks } = jc.decode(resp.data); + const { error, bytes, chunks } = resp.data; if (error) { throw Error(error); } diff --git a/src/packages/nats/hub-api/db.ts b/src/packages/conat/hub-api/db.ts similarity index 93% rename from src/packages/nats/hub-api/db.ts rename to src/packages/conat/hub-api/db.ts index b3e94ceb19..850eeffd59 100644 --- a/src/packages/nats/hub-api/db.ts +++ b/src/packages/conat/hub-api/db.ts @@ -33,8 +33,6 @@ export interface DB { getLegacyTimeTravelPatches: (opts: { account_id?: string; uuid: string; - // you should set this to true to enable potentially very large response support - requestMany?: boolean; // also, make this bigger: timeout?: number; }) => Promise; diff --git a/src/packages/nats/hub-api/index.ts b/src/packages/conat/hub-api/index.ts similarity index 79% rename from src/packages/nats/hub-api/index.ts rename to src/packages/conat/hub-api/index.ts index 323846b9f7..720f16c064 100644 --- a/src/packages/nats/hub-api/index.ts +++ b/src/packages/conat/hub-api/index.ts @@ -1,17 +1,9 @@ -/* -NOTE: If you need to send *very large responses* to a message or increase timeouts, -see getLegacyTimeTravelPatches in db.ts. You just have to allow the keys requestMany -and timeout to the *first* argument of the function (which must be an object). -The framework will then automatically allow large responses when the user sets -requestMany:true. -*/ - import { isValidUUID } from "@cocalc/util/misc"; import { type Purchases, purchases } from "./purchases"; import { type System, system } from "./system"; import { type Projects, projects } from "./projects"; import { type DB, db } from "./db"; -import { handleErrorMessage } from "@cocalc/nats/util"; +import { handleErrorMessage } from "@cocalc/conat/util"; export interface HubApi { system: System; @@ -47,7 +39,6 @@ export function initHubApi(callHubApi): HubApi { const resp = await callHubApi({ name: `${group}.${functionName}`, args, - requestMany: args[0]?.requestMany, timeout: args[0]?.timeout, }); return handleErrorMessage(resp); diff --git a/src/packages/nats/hub-api/projects.ts b/src/packages/conat/hub-api/projects.ts similarity index 100% rename from src/packages/nats/hub-api/projects.ts rename to src/packages/conat/hub-api/projects.ts diff --git a/src/packages/nats/hub-api/purchases.ts b/src/packages/conat/hub-api/purchases.ts similarity index 100% rename from src/packages/nats/hub-api/purchases.ts rename to src/packages/conat/hub-api/purchases.ts diff --git a/src/packages/nats/hub-api/system.ts b/src/packages/conat/hub-api/system.ts similarity index 100% rename from src/packages/nats/hub-api/system.ts rename to src/packages/conat/hub-api/system.ts diff --git a/src/packages/nats/hub-api/util.ts b/src/packages/conat/hub-api/util.ts similarity index 100% rename from src/packages/nats/hub-api/util.ts rename to src/packages/conat/hub-api/util.ts diff --git a/src/packages/nats/llm/client.ts b/src/packages/conat/llm/client.ts similarity index 76% rename from src/packages/nats/llm/client.ts rename to src/packages/conat/llm/client.ts index 4613b18d8f..02857fb647 100644 --- a/src/packages/nats/llm/client.ts +++ b/src/packages/conat/llm/client.ts @@ -2,11 +2,10 @@ Client for the nats server in server.ts. */ -import { getEnv } from "@cocalc/nats/client"; +import { getEnv } from "@cocalc/conat/client"; import type { ChatOptions } from "@cocalc/util/types/llm"; import { isValidUUID } from "@cocalc/util/misc"; import { llmSubject } from "./server"; -import { waitUntilConnected } from "@cocalc/nats/util"; export async function llm(options: ChatOptions): Promise { if (!options.system?.trim()) { @@ -20,18 +19,17 @@ export async function llm(options: ChatOptions): Promise { let all = ""; let lastSeq = -1; - const { nc, jc } = await getEnv(); + const { cn } = await getEnv(); let { stream, ...opts } = options; - await waitUntilConnected(); - for await (const resp of await nc.requestMany(subject, jc.encode(opts), { + for await (const resp of await cn.requestMany(subject, opts, { maxWait: opts.timeout ?? 1000 * 60 * 10, })) { - if (resp.data.length == 0) { + if (resp.data == null) { // client code also expects null token to know when stream is done. stream?.(null); break; } - const { error, text, seq } = jc.decode(resp.data); + const { error, text, seq } = resp.data; if (error) { throw Error(error); } diff --git a/src/packages/nats/llm/server.ts b/src/packages/conat/llm/server.ts similarity index 86% rename from src/packages/nats/llm/server.ts rename to src/packages/conat/llm/server.ts index 58fbcbe519..ec7eb7a989 100644 --- a/src/packages/nats/llm/server.ts +++ b/src/packages/conat/llm/server.ts @@ -11,9 +11,9 @@ it so projects can directly use llm's... but first we need to figure out how paying for that would work. */ -import { getEnv } from "@cocalc/nats/client"; -import { type Subscription, Empty } from "@nats-io/nats-core"; +import { getEnv } from "@cocalc/conat/client"; import { isValidUUID } from "@cocalc/util/misc"; +import type { Subscription } from "@cocalc/conat/core/client"; export const SUBJECT = process.env.COCALC_TEST_MODE ? "llm-test" : "llm"; @@ -52,8 +52,8 @@ function getUserId(subject: string): string { let sub: Subscription | null = null; export async function init(evaluate) { - const { nc } = await getEnv(); - sub = nc.subscribe(`${SUBJECT}.*.api`, { queue: "q" }); + const { cn } = await getEnv(); + sub = await cn.subscribe(`${SUBJECT}.*.api`, { queue: "q" }); listen(evaluate); } @@ -75,12 +75,11 @@ async function listen(evaluate) { } async function handleMessage(mesg, evaluate) { - const { jc } = await getEnv(); - const options = jc.decode(mesg.data); + const options = mesg.data; let seq = 0; const respond = ({ text, error }: { text?: string; error?: string }) => { - mesg.respond(jc.encode({ text, error, seq })); + mesg.respond({ text, error, seq }); seq += 1; }; @@ -88,8 +87,8 @@ async function handleMessage(mesg, evaluate) { const end = () => { if (done) return; done = true; - // end response stream with empty payload. - mesg.respond(Empty); + // end response stream with null payload. + mesg.respond(null); }; const stream = (text?) => { diff --git a/src/packages/nats/names.ts b/src/packages/conat/names.ts similarity index 98% rename from src/packages/nats/names.ts rename to src/packages/conat/names.ts index b01d071dfc..1caf0d0e4c 100644 --- a/src/packages/nats/names.ts +++ b/src/packages/conat/names.ts @@ -13,7 +13,7 @@ For Subjects: import generateVouchers from "@cocalc/util/vouchers"; import type { Location } from "./types"; -import { encodeBase64 } from "@cocalc/nats/util"; +import { encodeBase64 } from "@cocalc/conat/util"; // nice alphanumeric string that can be used as nats subject, and very // unlikely to randomly collide with another browser tab from this account. diff --git a/src/packages/nats/package.json b/src/packages/conat/package.json similarity index 73% rename from src/packages/nats/package.json rename to src/packages/conat/package.json index acc34d9d60..81f2fe1b5b 100644 --- a/src/packages/nats/package.json +++ b/src/packages/conat/package.json @@ -1,7 +1,7 @@ { - "name": "@cocalc/nats", + "name": "@cocalc/conat", "version": "1.0.0", - "description": "CoCalc NATS integration code. Usable by both nodejs and browser.", + "description": "Conat -- pub/sub framework. Usable by both nodejs and browser.", "exports": { "./sync/*": "./dist/sync/*.js", "./llm/*": "./dist/llm/*.js", @@ -18,7 +18,7 @@ "build": "pnpm exec tsc --build", "tsc": "pnpm exec tsc --watch --pretty --preserveWatchOutput", "prepublishOnly": "pnpm test", - "test": "echo 'see packages/backend/nats/tests instead'" + "test": "echo 'see packages/backend/conat/tests instead'" }, "files": [ "dist/**", @@ -28,26 +28,28 @@ "author": "SageMath, Inc.", "keywords": [ "utilities", - "nats", + "conat", "cocalc" ], "license": "SEE LICENSE.md", "dependencies": { "@cocalc/comm": "workspace:*", - "@cocalc/nats": "workspace:*", + "@cocalc/conat": "workspace:*", "@cocalc/util": "workspace:*", - "@nats-io/jetstream": "3.0.0", - "@nats-io/kv": "3.0.0", - "@nats-io/nats-core": "3.0.0", - "@nats-io/services": "3.0.0", + "@isaacs/ttlcache": "^1.4.1", + "@msgpack/msgpack": "^3.1.1", + "@socket.io/redis-streams-adapter": "^0.2.2", "awaiting": "^3.0.0", "events": "3.3.0", "immutable": "^4.3.0", + "iovalkey": "^0.3.1", "js-base64": "^3.7.7", "json-stable-stringify": "^1.0.1", - "lodash": "^4.17.21" + "lodash": "^4.17.21", + "socket.io-client": "^4.8.1" }, "devDependencies": { + "@types/better-sqlite3": "^7.6.13", "@types/json-stable-stringify": "^1.0.32", "@types/lodash": "^4.14.202", "@types/node": "^18.16.14" @@ -56,5 +58,5 @@ "type": "git", "url": "https://github.com/sagemathinc/cocalc" }, - "homepage": "https://github.com/sagemathinc/cocalc/tree/master/src/packages/nats" + "homepage": "https://github.com/sagemathinc/cocalc/tree/master/src/packages/conat" } diff --git a/src/packages/conat/persist/README.md b/src/packages/conat/persist/README.md new file mode 100644 index 0000000000..5677e89586 --- /dev/null +++ b/src/packages/conat/persist/README.md @@ -0,0 +1,59 @@ +# Persistence Service + +The goal is to provide a tiered, memory efficient, scalable way to persist +streams and kv stores, without using Jetstream. This should use only the core +pub/sub functionality of NATS, so we can switch to other pub/sub systems later. + +## API + +Given a subject that the requesting user has access to, this service can do the following. + +Message = {value?:Buffer, headers?:Headers, timestamp?:number} + +- set: \(subject, seq:number, message}\) +- get: \({subject, seq:number}\) => Message +- delete: \({subject, seq:number}\) +- getAll: \({subject, start\_seq?:number}\) => Message\[\], as sequence of messages + - if start\_seq given, gets only messages >= start\_seq +- deleteAll:\({subject, end\_seq?:number}\) + - if end\_seq given, deletes only messages <= end\_seq + +Moreover, every time one client makes a change, a corresponding message gets +published so all other clients can update their state. This will use exactly +the protocol implemented in core-stream.ts right now. + +Notes: + +- We use chunking so there are no limits on message size. +- There is no history for kv, i.e., only the last value is saved. (kv is **not** implemented + on top of streams like in NATS; it is its own thing) +- Messages can be deleted in a stream. + +## Architecture: + +- many persistence servers + +- The persistence servers have local persistent disk storage and access to a common cloud + storage bucket (or common NFS mount) for longterm cheap high-latency tiered storage. + +- One coordinator, which knows state of persistence servers. It has persistent disk + storage to maintain state, even if it is restarted. + +## Protocol: + +- When any client wants to use a subject, it makes a request to the coordinator asking which + persistence server it should use. The coordinator selects from active persistence servers + and it makes a consistent assignment. If a persistence servers stops working or vanishes, + clients will again make a request, and the coordinator will answer, possibly with a + different server. + - A persistence server is the analogue of a NATS jetstream node. We use + a coordinator so there is no need for RAFT. Using cloud storage provides + tiered storage. Only accessing the sqlite file when there's a request lets + us scale to an unlimited number of subjects but maintain very fast + startup time. +- Client makes requests as mentioned above to a specific named persistence server. + +- When server gets such a request, it opens the subject by copying the sqlite3 file from + cloud storage to a local disk if necessary, then queries it and responds. +- Periodically the server copies the sqlite3 file from local disk to cloud storage. + diff --git a/src/packages/conat/persist/client.ts b/src/packages/conat/persist/client.ts new file mode 100644 index 0000000000..1bc63940c5 --- /dev/null +++ b/src/packages/conat/persist/client.ts @@ -0,0 +1,318 @@ +import { getEnv } from "@cocalc/conat/client"; +import { persistSubject, renewSubject, type User } from "./server"; +export { DEFAULT_LIFETIME } from "./server"; +import type { + Options as Storage, + SetOperation, + DeleteOperation, + Configuration, +} from "./storage"; +export type { Storage, SetOperation, DeleteOperation, Configuration }; +import { + Message as ConatMessage, + MessageData, + ConatError, +} from "@cocalc/conat/core/client"; + +interface ConnectionOptions { + // maximum amount of time the persist can possibly stay alive, even with + // many calls to extend it. + maxActualLifetime?: number; + // server will send resp='' to ensure there is at least one message every this many ms. + heartbeat?: number; + // persist will live at most this long, then definitely die unless renewed. + lifetime?: number; +} + +export async function getAll({ + user, + storage, + start_seq, + end_seq, + options, +}: { + user: User; + storage: Storage; + start_seq?: number; + end_seq?: number; + options?: ConnectionOptions; +}): Promise<{ id?: string; lifetime?: number; stream }> { + const stream = await callApiGetAll({ + user, + storage, + options, + start_seq, + end_seq, + }); + if (end_seq) { + return { stream }; + } + // the first element of the stream has the id, and the rest is the + // stream user will consume + const { value, done } = await stream.next(); + if (done) { + throw Error("got no response"); + } + + const x = value?.headers?.content as any; + if (typeof x?.id != "string" || typeof x?.lifetime != "number") { + throw Error("invalid data from server"); + } + return { ...x, stream }; +} + +export async function set({ + user, + storage, + key, + ttl, + previousSeq, + msgID, + messageData, + timeout, +}: { + user: User; + storage: Storage; + key?: string; + ttl?: number; + previousSeq?: number; + msgID?: string; + messageData: MessageData; + timeout?: number; +}): Promise<{ seq: number; time: number }> { + const subject = persistSubject(user); + const { cn } = await getEnv(); + + const reply = await cn.request(subject, null, { + raw: messageData.raw, + encoding: messageData.encoding, + headers: { + headers: messageData.headers, + cmd: "set", + key, + ttl, + previousSeq, + msgID, + storage, + } as any, + timeout, + }); + const { error, code, resp } = reply.data; + if (error) { + throw new ConatError(error, { code }); + } + return resp; +} + +export async function deleteMessages({ + user, + storage, + timeout, + seq, + last_seq, + all, +}: { + user: User; + storage: Storage; + timeout?: number; + seq?: number; + last_seq?: number; + all?: boolean; +}): Promise<{ seqs: number[] }> { + const subject = persistSubject(user); + const { cn } = await getEnv(); + + const reply = await cn.request(subject, null, { + headers: { + storage: storage as any, + cmd: "delete", + seq, + last_seq, + all, + } as any, + timeout, + }); + const { error, resp } = reply.data; + if (error) { + throw Error(error); + } + return resp; +} + +export async function config({ + user, + storage, + config, + timeout, +}: { + user: User; + storage: Storage; + config?: Partial; + timeout?: number; +}): Promise { + const subject = persistSubject(user); + const { cn } = await getEnv(); + + const reply = await cn.request(subject, null, { + headers: { + storage: storage as any, + cmd: "config", + config, + } as any, + timeout, + }); + const { error, resp } = reply.data; + if (error) { + throw Error(error); + } + return resp; +} + +export async function get({ + user, + storage, + seq, + key, + timeout, +}: { + user; + storage; + timeout?: number; +} & ( + | { seq: number; key?: undefined } + | { key: string; seq?: undefined } +)): Promise { + const subject = persistSubject(user); + const { cn } = await getEnv(); + + const resp = await cn.request(subject, null, { + headers: { cmd: "get", storage, seq, key } as any, + timeout, + }); + if (resp.headers == null) { + return undefined; + } + return resp; +} + +export async function keys({ + user, + storage, + timeout, +}: { + user; + storage; + timeout?: number; +}): Promise { + const subject = persistSubject(user); + const { cn } = await getEnv(); + + const reply = await cn.request(subject, null, { + headers: { cmd: "keys", storage } as any, + timeout, + }); + const { error, resp } = reply.data; + if (error) { + throw Error(error); + } + return resp; +} + +export async function sqlite({ + user, + storage, + timeout, + statement, + params, +}: { + user; + storage; + timeout?: number; + statement: string; + params?: any[]; +}): Promise { + const subject = persistSubject(user); + const { cn } = await getEnv(); + + const reply = await cn.request(subject, null, { + headers: { cmd: "sqlite", storage, statement, params } as any, + timeout, + }); + const { error, resp } = reply.data; + if (error) { + throw Error(error); + } + return resp; +} + +async function* callApiGetAll({ + start_seq, + end_seq, + // who is accessing persistent storage + user, + // what storage they are accessing + storage, + // options for persistent connection + options, +}: { + start_seq?: number; + end_seq?: number; + user: User; + storage: Storage; + options?: ConnectionOptions; +}) { + const subject = persistSubject(user); + const { cn } = await getEnv(); + + const { + heartbeat, + lifetime, + maxActualLifetime = 1000 * 60 * 60 * 2, + } = options ?? {}; + + let lastSeq = -1; + for await (const resp of await cn.requestMany(subject, null, { + maxWait: maxActualLifetime, + headers: { + cmd: "getAll", + start_seq, + end_seq, + heartbeat, + lifetime, + storage, + } as any, + })) { + if (resp.headers == null) { + // terminate requestMany + return; + } + + const { error, seq } = resp.headers; + if (error) { + throw Error(`${error}`); + } + if (typeof seq != "number") { + throw Error("seq must be a number"); + } + if (lastSeq + 1 != seq) { + throw Error("missed response"); + } + lastSeq = seq; + yield resp; + } +} + +export async function renew({ + user, + id, + lifetime, +}: { + user: User; + id: string; + lifetime?: number; +} & User) { + const subject = renewSubject(user); + const { cn } = await getEnv(); + const resp = await cn.request(subject, { id, lifetime }); + return resp.data; +} diff --git a/src/packages/conat/persist/context.ts b/src/packages/conat/persist/context.ts new file mode 100644 index 0000000000..85e845bb9a --- /dev/null +++ b/src/packages/conat/persist/context.ts @@ -0,0 +1,51 @@ +/* +Define functions for using sqlite, the filesystem, compression, etc. +These are functions that typically get set via nodejs on the backend, +not from a browser. Making this explicit helps clarify the dependence +on the backend and make the code more unit testable. +*/ + +import type BetterSqlite3 from "better-sqlite3"; +type Database = BetterSqlite3.Database; +export { type Database }; + +let betterSqlite3: any = null; + +export let compress: (data: Buffer) => Buffer = () => { + throw Error("must initialize persist.sqlite"); +}; + +export let decompress: (data: Buffer) => Buffer = () => { + throw Error("must initialize persist.sqlite"); +}; + +export let syncFiles = { local: "", archive: "" }; + +export let ensureContainingDirectoryExists: (path: string) => Promise = ( + _path, +) => { + throw Error("must initialize persiste.sqlite"); +}; + +export function initContext(opts: { + betterSqlite3; + compress: (Buffer) => Buffer; + decompress: (Buffer) => Buffer; + syncFiles: { local: string; archive: string }; + ensureContainingDirectoryExists: (path: string) => Promise; +}) { + betterSqlite3 = opts.betterSqlite3; + compress = opts.compress; + decompress = opts.decompress; + syncFiles = opts.syncFiles; + ensureContainingDirectoryExists = opts.ensureContainingDirectoryExists; +} + +export function createDatabase(...args): Database { + if (betterSqlite3 == null) { + throw Error( + "conat/persist must be initialized with the better-sqlite3 module -- import from backend/conat/persist instead", + ); + } + return new betterSqlite3(...args); +} diff --git a/src/packages/conat/persist/server.ts b/src/packages/conat/persist/server.ts new file mode 100644 index 0000000000..f72a3150f9 --- /dev/null +++ b/src/packages/conat/persist/server.ts @@ -0,0 +1,510 @@ +/* + +Maybe storage available as a service. + +This code is similar to the changefeed server, because +it provides a changefeed on a given persist storage, +and a way to see values. + +DEVELOPMENT: + +Change to the packages/backend directory and run node. + +TERMINAL 1: This sets up the environment and starts the server running: + + require('@cocalc/backend/conat/persist').initServer() + + +TERMINAL 2: In another node session, create a client: + + user = {account_id:'00000000-0000-4000-8000-000000000000'}; storage = {path:'a.db'}; const {id, lifetime, stream} = await require('@cocalc/backend/conat/persist').getAll({user, storage, options:{lifetime:1000*60}}); console.log({id}); for await(const x of stream) { console.log(x.data) }; console.log("DONE") + +// client also does this periodically to keep subscription alive: + + await renew({user, id }) + +TERMINAL 3: + +user = {account_id:'00000000-0000-4000-8000-000000000000'}; storage = {path:'a.db'}; const {set,get} = require('@cocalc/backend/conat/persist'); const { messageData } =require("@cocalc/conat/core/client"); 0; + + await set({user, storage, messageData:messageData('hi')}) + + await get({user, storage, seq:1}) + + await set({user, storage, key:'bella', messageData:messageData('hi', {headers:{x:10}})}) + + await get({user, storage, key:'bella'}) + +Also getAll using start_seq: + + cf = const {id, lifetime, stream} = await require('@cocalc/backend/conat/persist').getAll({user, storage, start_seq:10, options:{lifetime:1000*60}}); for await(const x of stream) { console.log(x) }; +*/ + +import { + pstream, + type Message as StoredMessage, + PersistentStream, +} from "./storage"; +import { getEnv } from "@cocalc/conat/client"; +import { type Client, type Subscription } from "@cocalc/conat/core/client"; +import { uuid } from "@cocalc/util/misc"; +import { getLogger } from "@cocalc/conat/client"; +import { delay } from "awaiting"; +import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; +import { join } from "path"; +import { syncFiles, ensureContainingDirectoryExists } from "./context"; +import { is_array } from "@cocalc/util/misc"; + +// I added an experimental way to run any sqlite query... but it is disabled +// since of course there are major DOS and security concerns. +const ENABLE_SQLITE_GENERAL_QUERIES = false; + +// When sending a large number of message for +// getAll or change updates, we combine together messages +// until hitting this size, then send them all at once. +// This bound is to avoid potentially using a huge amount of RAM +// when streaming a large saved database to the client. +// Note: if a single message is larger than this, it still +// gets sent, just individually. +const DEFAULT_MESSAGES_THRESH = 20 * 1e6; +//const DEFAULT_MESSAGES_THRESH = 1e5; + +export const DEFAULT_LIFETIME = 5 * 1000 * 60; +export const MAX_LIFETIME = 15 * 1000 * 60; +export const MIN_LIFETIME = 30 * 1000; +export const MIN_HEARTBEAT = 5000; +export const MAX_HEARTBEAT = 120000; +export const MAX_PERSISTS_PER_USER = parseInt( + process.env.MAX_PERSISTS_PER_USER ?? "100", +); + +export const MAX_PERSISTS_PER_SERVER = parseInt( + process.env.MAX_PERSISTS_PER_SERVER ?? "5000", +); + +export const LAST_CHUNK = "last-chunk"; + +const logger = getLogger("persist:server"); + +export const SUBJECT = "persist"; + +export type User = { account_id?: string; project_id?: string }; +export function persistSubject({ account_id, project_id }: User) { + if (account_id) { + return `${SUBJECT}.account-${account_id}.api`; + } else if (project_id) { + return `${SUBJECT}.project-${project_id}.api`; + } else { + return `${SUBJECT}.hub.api`; + } +} + +export function renewSubject({ account_id, project_id }: User) { + if (account_id) { + return `${SUBJECT}.account-${account_id}.renew`; + } else if (project_id) { + return `${SUBJECT}.project-${project_id}.renew`; + } else { + return `${SUBJECT}.hub.renew`; + } +} + +function getUserId(subject: string): string { + if ( + subject.startsWith(`${SUBJECT}.account-`) || + subject.startsWith(`${SUBJECT}.project-`) + ) { + // note that project and account have the same number of letters + return subject.slice( + `${SUBJECT}.account-`.length, + `${SUBJECT}.account-`.length + 36, + ); + } + return ""; +} + +let terminated = false; +let sub: Subscription | null = null; + +interface Options { + messagesThresh?: number; + client?: Client; +} + +export async function init({ + client, + messagesThresh = DEFAULT_MESSAGES_THRESH, +}: Options = {}) { + logger.debug("starting persist server"); + logger.debug({ + DEFAULT_LIFETIME, + MAX_LIFETIME, + MIN_LIFETIME, + MIN_HEARTBEAT, + MAX_HEARTBEAT, + MAX_PERSISTS_PER_USER, + MAX_PERSISTS_PER_SERVER, + SUBJECT, + }); + client = client ?? (await getEnv()).cn; + // this returns one the service is listening + await persistService({ client, messagesThresh }); + await renewService({ client }); +} + +async function noThrow(f) { + try { + await f(); + } catch (err) { + logger.debug(`WARNING -- ${err}`); + } +} + +async function persistService({ client, messagesThresh }) { + sub = await client.subscribe(`${SUBJECT}.*.api`, { + queue: "q", + ephemeral: true, + }); + listenPersist({ messagesThresh }); +} + +let renew: Subscription | null = null; +async function renewService({ client }) { + renew = await client.subscribe(`${SUBJECT}.*.renew`); + listenRenew(); +} + +async function listenRenew() { + if (renew == null) { + throw Error("must call init first"); + } + for await (const mesg of renew) { + if (terminated) { + return; + } + noThrow(async () => await handleRenew(mesg)); + } +} + +const endOfLife: { [id: string]: number } = {}; +function getLifetime({ lifetime }): number { + if (lifetime === -1) { + // special case of -1 used for cancel + return lifetime; + } + if (!lifetime) { + return DEFAULT_LIFETIME; + } + lifetime = parseFloat(lifetime); + if (lifetime > MAX_LIFETIME) { + return MAX_LIFETIME; + } + if (lifetime < MIN_LIFETIME) { + return MIN_LIFETIME; + } + return lifetime; +} + +async function handleRenew(mesg) { + const request = mesg.data; + if (!request) { + return; + } + let { id } = request; + if (endOfLife[id]) { + // it's ours so we respond + const lifetime = getLifetime(request); + endOfLife[id] = Date.now() + lifetime; + mesg.respond({ status: "ok" }); + } +} + +export async function terminate() { + terminated = true; + if (sub != null) { + sub.drain(); + sub = null; + } + if (renew != null) { + renew.drain(); + renew = null; + } +} + +async function listenPersist({ messagesThresh }) { + if (sub == null) { + throw Error("must call init first"); + } + for await (const mesg of sub) { + //console.log("got mesg = ", { data: mesg.data, headers: mesg.headers }); + if (terminated) { + return; + } + noThrow(async () => await handleMessage({ mesg, messagesThresh })); + } +} + +let numPersists = 0; +const numPersistsPerUser: { [user_id: string]: number } = {}; + +function metrics() { + logger.debug("persist", { numPersists }); +} + +async function handleMessage({ mesg, messagesThresh }) { + const request = mesg.headers; + //console.log("handleMessage", { data: mesg.data, headers: mesg.headers }); + const user_id = getUserId(mesg.subject); + + // [ ] TODO: more permissions and other sanity checks! + + const path = join(syncFiles.local, request.storage.path); + await ensureContainingDirectoryExists(path); + let stream: undefined | PersistentStream = undefined; + const respond = (...args) => { + stream?.close(); + mesg.respond(...args); + }; + + stream = pstream({ ...request.storage, path }); + // get and set using normal request/respond + try { + if (request.cmd == "set") { + const resp = stream.set({ + key: request.key, + previousSeq: request.previousSeq, + raw: mesg.raw, + ttl: request.ttl, + encoding: mesg.encoding, + headers: request.headers, + msgID: request.msgID, + }); + respond({ resp }); + } else if (request.cmd == "delete") { + respond({ resp: stream.delete(request) }); + } else if (request.cmd == "config") { + respond({ resp: stream.config(request.config) }); + } else if (request.cmd == "get") { + const resp = stream.get({ key: request.key, seq: request.seq }); + //console.log("got resp = ", resp); + if (resp == null) { + respond(null); + } else { + const { raw, encoding, headers, seq, time, key } = resp; + respond(null, { + raw, + encoding, + headers: { ...headers, seq, time, key }, + }); + } + } else if (request.cmd == "keys") { + const resp = stream.keys(); + respond({ resp }); + } else if (request.cmd == "sqlite") { + if (!ENABLE_SQLITE_GENERAL_QUERIES) { + throw Error("sqlite command not currently supported"); + } + const resp = stream.sqlite(request.statement, request.params); + respond({ resp }); + } else if (request.cmd == "getAll") { + // getAll will free reference to stream when it is done: + getAll({ mesg, request, user_id, stream, messagesThresh }); + } else { + respond({ error: `unknown command ${request.cmd}` }); + } + } catch (err) { + respond({ error: `${err}`, code: err.code }); + } +} + +async function getAll({ mesg, request, user_id, stream, messagesThresh }) { + //console.log("getAll", request); + // getAll sends multiple responses + let seq = 0; + let lastSend = 0; + + const respond = async ( + error, + content?: + | "" + | { id: string; lifetime: number } + | { state: "watch" } + | StoredMessage[], + ) => { + if (terminated) { + end(); + } + lastSend = Date.now(); + if (!error && is_array(content)) { + // console.log("content = ", content); + // StoredMessage + const messages = content as StoredMessage[]; + await mesg.respond(messages, { headers: { seq } }); + } else { + await mesg.respond(null, { headers: { error, seq, content } }); + } + if (error) { + end(); + return; + } + + seq += 1; + }; + + const id = uuid(); + numPersists += 1; + metrics(); + let done = false; + const end = () => { + if (done) { + return; + } + done = true; + delete endOfLife[id]; + numPersists -= 1; + metrics(); + stream.close(); + // end response stream with empty payload and no headers. + mesg.respond(null); + }; + + if (numPersistsPerUser[user_id] > MAX_PERSISTS_PER_USER) { + logger.debug(`numPersistsPerUser[${user_id}] >= MAX_PERSISTS_PER_USER`, { + numPersistsPerUserThis: numPersistsPerUser[user_id], + MAX_PERSISTS_PER_USER, + }); + respond( + `This server has a limit of ${MAX_PERSISTS_PER_USER} persists per account`, + ); + return; + } + if (numPersists >= MAX_PERSISTS_PER_SERVER) { + logger.debug("numPersists >= MAX_PERSISTS_PER_SERVER", { + numPersists, + MAX_PERSISTS_PER_SERVER, + }); + // this will just cause the client to make another attempt, hopefully + // to another server + respond(`This server has a limit of ${MAX_PERSISTS_PER_SERVER} persists`); + return; + } + + let { heartbeat } = request; + const lifetime = getLifetime(request); + delete request.lifetime; + delete request.heartbeat; + + endOfLife[id] = Date.now() + lifetime; + + async function lifetimeLoop() { + while (!done) { + await delay(7500); + if (!endOfLife[id] || endOfLife[id] <= Date.now()) { + end(); + return; + } + } + } + lifetimeLoop(); + + async function heartbeatLoop() { + let hb = parseFloat(heartbeat); + if (hb < MIN_HEARTBEAT) { + hb = MIN_HEARTBEAT; + } else if (hb > MAX_HEARTBEAT) { + hb = MAX_HEARTBEAT; + } + await delay(hb); + while (!done) { + const timeSinceLast = Date.now() - lastSend; + if (timeSinceLast < hb) { + // no neeed to send heartbeat yet + await delay(hb - timeSinceLast); + continue; + } + respond(undefined, ""); + await delay(hb); + } + } + + try { + if (!request.end_seq) { + await respond(undefined, { id, lifetime }); + } + if (done) { + return; + } + + // send the current data + const messages: any[] = []; + let size = 0; + // [ ] TODO: limit the size + for (const message of stream.getAll({ + start_seq: request.start_seq, + end_seq: request.end_seq, + })) { + messages.push(message); + size += message.raw.length; + if (size >= messagesThresh) { + await respond(undefined, messages); + messages.length = 0; + size = 0; + } + if (done) return; + } + + if (messages.length > 0) { + await respond(undefined, messages); + } + if (done) return; + + if (request.end_seq) { + end(); + return; + } + + // send state change message + await respond(undefined, { state: "watch" }); + + const unsentMessages: StoredMessage[] = []; + const sendAllUnsentMessages = reuseInFlight(async () => { + while (!done && unsentMessages.length > 0) { + if (done) return; + const messages: StoredMessage[] = []; + let size = 0; + while (unsentMessages.length > 0 && !done) { + const message = unsentMessages.shift(); + size += message?.raw?.length ?? 0; // e.g. op:'delete' messages have length 0 and now raw field + messages.push(message!); + if (size >= messagesThresh) { + await respond(undefined, messages); + if (done) return; + size = 0; + messages.length = 0; + } + } + if (done) return; + if (messages.length > 0) { + await respond(undefined, messages); + } + } + }); + + stream.on("change", async (message) => { + if (done) { + return; + } + //console.log("stream change event", message); + unsentMessages.push(message); + sendAllUnsentMessages(); + }); + + if (heartbeat) { + heartbeatLoop(); + } + } catch (err) { + if (!done) { + respond(`${err}`); + } + } +} diff --git a/src/packages/conat/persist/storage.ts b/src/packages/conat/persist/storage.ts new file mode 100644 index 0000000000..75de350bdd --- /dev/null +++ b/src/packages/conat/persist/storage.ts @@ -0,0 +1,650 @@ +/* +Persistent storage of a specific stream or kv store. + +You can set a message by providing optionally a key, buffer and/or json value. +A sequence number and time (in ms since epoch) is assigned and returned. +If the key is provided, it is an arbitrary string and all older messages +with that same key are deleted. You can efficiently retrieve a message +by its key. The message content itself is given by the buffer and/or json +value. The buffer is like the "payload" in NATS, and the json is like +the headers in NATS. + +This module is: + + - efficient -- buffer is automatically compressed using zstandard + - synchronous -- fast enough to meet our requirements even with blocking + - memory efficient -- nothing in memory beyond whatever key you request + +We care about memory efficiency here since it's likely we'll want to have +possibly thousands of these in a single nodejs process at once, but with +less than 1 read/write per second for each. Thus memory is critical, and +supporting at least 1000 writes/second is what we need. +Fortunately, this implementation can do ~50,000+ writes per second and read +over 500,000 per second. Yes, it blocks the main thread, but by using +better-sqlite3 and zstd-napi, we get 10x speed increases over async code, +so this is worth it. + + +COMPRESSION: + +I implemented *sync* lz4-napi compression here and it's very fast, +but it LEAKS MEMORY HORRIBLY. The async functions in lz4-napi seem fine. +Upstream report (by me): https://github.com/antoniomuso/lz4-napi/issues/678 +I also tried the rust sync snappy and it had a similar memory leak. Finally, +I tried zstd-napi and it has a very fast sync implementation that does *not* +leak memory. So zstd-napi it is. And I like zstandard anyways. + +NOTE: + +We use seconds instead of ms in sqlite since that is the standard +convention for times in sqlite. + +DEVELOPMENT: + + + s = require('@cocalc/backend/conat/persist').pstream({path:'/tmp/a.db'}) + +*/ + +import { refCacheSync } from "@cocalc/util/refcache"; +import { createDatabase, type Database, compress, decompress } from "./context"; +import type { JSONValue } from "@cocalc/util/types"; +import { EventEmitter } from "events"; +import { + DataEncoding, + type Headers, + ConatError, +} from "@cocalc/conat/core/client"; +import TTL from "@isaacs/ttlcache"; + +export interface Configuration { + // How many messages may be in a Stream, oldest messages will be removed + // if the Stream exceeds this size. -1 for unlimited. + max_msgs: number; + + // Maximum age of any message in the stream, + // expressed in milliseconds. 0 for unlimited. + // **Note that max_age is in milliseconds.** + max_age: number; + + // How big the Stream may be. When the stream size + // exceeds this, old messages are removed. -1 for unlimited. + // The size of a message is the sum of the raw uncompressed blob + // size, the headers json and the key length. + max_bytes: number; + + // The largest message that will be accepted. -1 for unlimited. + max_msg_size: number; + + // Attempting to publish a message that causes either of the following + // two rate limits to be exceeded throws an exception. + // For dstream, the messages are explicitly rejected and the client + // gets a "reject" event emitted. E.g., the terminal running in the project + // writes [...] when it gets these rejects, indicating that data was dropped. + // -1 for unlimited + max_bytes_per_second: number; + + // -1 for unlimited + max_msgs_per_second: number; + + // old = delete old messages to make room for nw + // new = refuse writes if they exceed the limits + discard_policy: "old" | "new"; + + // If true (default: false), messages will be automatically deleted after their ttl + // Use the option {ttl:number of MILLISECONDS} when publishing to set a ttl. + allow_msg_ttl: boolean; +} + +const CONFIGURATION = { + max_msgs: { def: -1, fromDb: parseInt, toDb: (x) => `${parseInt(x)}` }, + max_age: { def: 0, fromDb: parseInt, toDb: (x) => `${parseInt(x)}` }, + max_bytes: { def: -1, fromDb: parseInt, toDb: (x) => `${parseInt(x)}` }, + max_msg_size: { def: -1, fromDb: parseInt, toDb: (x) => `${parseInt(x)}` }, + max_bytes_per_second: { + def: -1, + fromDb: parseInt, + toDb: (x) => `${parseInt(x)}`, + }, + max_msgs_per_second: { + def: -1, + fromDb: parseInt, + toDb: (x) => `${parseInt(x)}`, + }, + discard_policy: { + def: "old", + fromDb: (x) => `${x}`, + toDb: (x) => (x == "new" ? "new" : "old"), + }, + allow_msg_ttl: { + def: false, + fromDb: (x) => x == "true", + toDb: (x) => `${!!x}`, + }, +}; + +enum CompressionAlgorithm { + None = 0, + Zstd = 1, +} + +interface Compression { + // compression algorithm to use + algorithm: CompressionAlgorithm; + // only compress data above this size + threshold: number; +} + +const DEFAULT_COMPRESSION = { + algorithm: CompressionAlgorithm.Zstd, + threshold: 1024, +}; + +export interface Message { + // server assigned positive increasing integer number + seq: number; + // server assigned time in ms since epoch + time: number; + // user assigned key -- when set all previous messages with that key are deleted. + key?: string; + // the encoding used to encode the raw data + encoding: DataEncoding; + // arbitrary binary data + raw: Buffer; + // arbitrary JSON-able object -- analogue of NATS headers, but anything JSON-able + headers?: Headers; +} + +export interface SetOperation extends Message { + op: undefined; + msgID?: string; +} + +export interface DeleteOperation { + op: "delete"; + // sequence numbers of deleted messages + seqs: number[]; +} + +export interface Options { + // absolute path to sqlite database file. This needs to be a valid filename + // path, and must also be kept under 1K so it can be stored in cloud storage. + path: string; + // if false (the default) do not require sync writes to disk on every set + sync?: boolean; + // if set, then data is never saved to disk at all. This is very dangerous + // for production, since it could use a LOT of RAM -- but could be very useful + // for unit testing. + ephemeral?: boolean; + // compression configuration + compression?: Compression; +} + +// persistence for stream of messages with subject +export class PersistentStream extends EventEmitter { + private readonly options: Options; + private readonly db: Database; + private readonly msgIDs = new TTL({ ttl: 2 * 60 * 1000 }); + private conf: Configuration; + + constructor(options: Options) { + super(); + options = { compression: DEFAULT_COMPRESSION, ...options }; + this.options = options; + this.db = createDatabase( + this.options.ephemeral ? ":memory:" : this.options.path, + ); + this.init(); + } + + init = () => { + if (!this.options.sync && !this.options.ephemeral) { + // Unless sync is set, we do not require that the filesystem has commited changes + // to disk after every insert. This can easily make things 10x faster. sets are + // typically going to come in one-by-one as users edit files, so this works well + // for our application. Also, loss of a few seconds persistence is acceptable + // in a lot of applications, e.g., if it is just edit history for a file. + this.db.prepare("PRAGMA synchronous=OFF").run(); + } + // time is in *seconds* since the epoch, since that is standard for sqlite. + // ttl is in milliseconds. + this.db + .prepare( + `CREATE TABLE IF NOT EXISTS messages ( + seq INTEGER PRIMARY KEY AUTOINCREMENT, key TEXT UNIQUE, time INTEGER NOT NULL, headers TEXT, compress NUMBER NOT NULL, encoding NUMBER NOT NULL, raw BLOB NOT NULL, size NUMBER NOT NULL, ttl NUMBER + ) + `, + ) + .run(); + this.db + .prepare( + ` + CREATE TABLE IF NOT EXISTS config ( + field TEXT PRIMARY KEY, value TEXT NOT NULL + )`, + ) + .run(); + this.db + .prepare("CREATE INDEX IF NOT EXISTS idx_messages_key ON messages(key)") + .run(); + + this.conf = this.config(); + }; + + close = () => { + this.vacuum(); + this.db?.close(); + // @ts-ignore + delete this.options; + // @ts-ignore + delete this.db; + this.msgIDs?.clear(); + // @ts-ignore + delete this.msgIDs; + }; + + private compress = ( + raw: Buffer, + ): { raw: Buffer; compress: CompressionAlgorithm } => { + if ( + this.options.compression!.algorithm == CompressionAlgorithm.None || + raw.length <= this.options.compression!.threshold + ) { + return { raw, compress: CompressionAlgorithm.None }; + } + if (this.options.compression!.algorithm == CompressionAlgorithm.Zstd) { + return { raw: compress(raw), compress: CompressionAlgorithm.Zstd }; + } + throw Error( + `unknown compression algorithm: ${this.options.compression!.algorithm}`, + ); + }; + + set = ({ + encoding, + raw, + headers, + key, + ttl, + previousSeq, + msgID, + }: { + encoding: DataEncoding; + raw: Buffer; + headers?: JSONValue; + key?: string; + ttl?: number; + previousSeq?: number; + // if given, any attempt to publish something again with the same msgID + // is deduplicated. Use this to prevent accidentally writing twice, e.g., + // due to not getting a response back from the server. + msgID?: string; + }): { seq: number; time: number } => { + if (msgID !== undefined && this.msgIDs?.has(msgID)) { + return this.msgIDs.get(msgID)!; + } + if (key !== undefined && previousSeq !== undefined) { + // throw error if current seq number for the row + // with this key is not previousSeq. + const { seq } = this.db // there is an index on the key so this is fast + .prepare("SELECT seq FROM messages WHERE key=?") + .get(key) as any; + if (seq != previousSeq) { + throw new ConatError("wrong last sequence", { + code: "wrong-last-sequence", + }); + } + } + const time = Date.now(); + const compressedRaw = this.compress(raw); + const serializedHeaders = JSON.stringify(headers); + const size = + (serializedHeaders?.length ?? 0) + + (raw?.length ?? 0) + + (key?.length ?? 0); + + this.enforceLimits(size); + + const tx = this.db.transaction( + (time, compress, encoding, raw, headers, key, size, ttl) => { + if (key !== undefined) { + // insert with key -- delete all previous messages, as they will + // never be needed again and waste space. + this.db.prepare("DELETE FROM messages WHERE key = ?").run(key); + } + return this.db + .prepare( + "INSERT INTO messages(time, compress, encoding, raw, headers, key, size, ttl) VALUES (?, ?, ?, ?, ?, ?, ?, ?) RETURNING seq", + ) + .get(time / 1000, compress, encoding, raw, headers, key, size, ttl); + }, + ); + const row = tx( + time, + compressedRaw.compress, + encoding, + compressedRaw.raw, + serializedHeaders, + key, + size, + ttl, + ); + const seq = Number((row as any).seq); + // lastInsertRowid - is a bigint from sqlite, but we won't hit that limit + this.emit("change", { + op: "set", + seq, + time, + key, + encoding, + raw, + headers, + msgID, + }); + if (msgID !== undefined) { + this.msgIDs.set(msgID, { time, seq }); + } + return { time, seq }; + }; + + get = ({ + seq, + key, + }: { seq: number; key: undefined } | { seq: undefined; key: string }): + | Message + | undefined => { + let x; + if (seq) { + x = this.db + .prepare( + "SELECT seq, key, time, compress, encoding, raw, headers FROM messages WHERE seq=?", + ) + .get(seq); + } else if (key != null) { + // NOTE: we guarantee when doing set above that there is at most one + // row with a given key. Also there's a unique constraint. + x = this.db + .prepare( + "SELECT seq, key, time, compress, encoding, raw, headers FROM messages WHERE key=?", + ) + .get(key); + } else { + x = undefined; + } + return dbToMessage(x as any); + }; + + *getAll({ + start_seq, + end_seq, + }: { end_seq?: number; start_seq?: number } = {}): IterableIterator { + let query: string, stmt; + + const where: string[] = []; + const v: number[] = []; + if (start_seq != null) { + where.push("seq>=?"); + v.push(start_seq); + } + if (end_seq != null) { + where.push("seq<=?"); + v.push(end_seq); + } + query = `SELECT seq, key, time, compress, encoding, raw, headers FROM messages ${where.length == 0 ? "" : " where " + where.join(" AND ")} ORDER BY seq`; + stmt = this.db.prepare(query); + for (const row of stmt.iterate(...v)) { + yield dbToMessage(row)!; + } + } + + delete = ({ + seq, + last_seq, + all, + }: { + seq?: number; + last_seq?: number; + all?: boolean; + }): { seqs: number[] } => { + let seqs: number[] = []; + if (all) { + seqs = this.db + .prepare("SELECT seq FROM messages") + .all() + .map((row: any) => row.seq); + this.db.prepare("DELETE FROM messages").run(); + this.vacuum(); + } else if (last_seq) { + seqs = this.db + .prepare("SELECT seq FROM messages WHERE seq<=?") + .all(last_seq) + .map((row: any) => row.seq); + this.db.prepare("DELETE FROM messages WHERE seq<=?").run(last_seq); + this.vacuum(); + } else if (seq) { + seqs = this.db + .prepare("SELECT seq FROM messages WHERE seq=?") + .all(seq) + .map((row: any) => row.seq); + this.db.prepare("DELETE FROM messages WHERE seq=?").run(seq); + } + this.emit("change", { op: "delete", seqs }); + return { seqs }; + }; + + vacuum = () => { + this.db.prepare("VACUUM").run(); + }; + + get length(): number { + const { length } = this.db + .prepare("SELECT COUNT(*) AS length FROM messages") + .get() as { length: number }; + return length; + } + + keys = (): string[] => { + const v = this.db + .prepare("SELECT key FROM messages WHERE key IS NOT NULL") + .all() as { + key: string; + }[]; + return v.map(({ key }) => key); + }; + + sqlite = (statement: string, params: any[] = []): any[] => { + // Matches "attach database" (case-insensitive, ignores whitespace) + if (/\battach\s+database\b/i.test(statement)) { + throw Error("ATTACH DATABASE not allowed"); + } + const stmt = this.db.prepare(statement); + try { + return stmt.all(...params); + } catch (err) { + if (err.message.includes("run() instead")) { + stmt.run(...params); + return []; + } else { + throw err; + } + } + }; + + config = (config?: Partial): Configuration => { + const cur: any = {}; + for (const { field, value } of this.db + .prepare("SELECT * FROM config") + .all() as any) { + cur[field] = value; + } + const full: Partial = {}; + for (const key in CONFIGURATION) { + const { def, fromDb, toDb } = CONFIGURATION[key]; + full[key] = + config?.[key] ?? (cur[key] !== undefined ? fromDb(cur[key]) : def); + const x = toDb(full[key]); + if (config?.[key] != null && full[key] != (cur[key] ?? def)) { + // making a change + this.db + .prepare( + `INSERT INTO config (field, value) VALUES(?, ?) ON CONFLICT(field) DO UPDATE SET value=excluded.value`, + ) + .run(key, x); + } + full[key] = fromDb(x); + } + this.conf = full as Configuration; + // ensure any new limits are enforced + this.enforceLimits(0); + return full as Configuration; + }; + + private emitDelete = (rows) => { + if (rows.length > 0) { + const seqs = rows.map((row: { seq: number }) => row.seq); + this.emit("change", { op: "delete", seqs }); + } + }; + + // do whatever limit enforcement and throttling is needed when inserting one new message + // with the given size; if size=0 assume not actually inserting a new message, and just + // enforcingt current limits + private enforceLimits = (size: number = 0) => { + if (this.conf.max_msgs > -1) { + const length = this.length + (size > 0 ? 1 : 0); + if (length > this.conf.max_msgs) { + // delete earliest messages to make room + const rows = this.db + .prepare( + `DELETE FROM messages WHERE seq IN (SELECT seq FROM messages ORDER BY seq ASC LIMIT ?) RETURNING seq`, + ) + .all(length - this.conf.max_msgs); + this.emitDelete(rows); + } + } + + if (this.conf.max_age > 0) { + const rows = this.db + .prepare( + `DELETE FROM messages WHERE seq IN (SELECT seq FROM messages WHERE time <= ?) RETURNING seq`, + ) + .all((Date.now() - this.conf.max_age) / 1000); + this.emitDelete(rows); + } + + if (this.conf.max_bytes > -1) { + if (size > this.conf.max_bytes) { + // new message exceeds total, so this is the same as adding in the new message, + // then deleting everything. + this.delete({ all: true }); + } else { + // delete all the earliest (in terms of seq number) messages so that the sum of the remaining + // sizes along with the new size is <= max_bytes. + // Only enforce if actually inserting, or if current sum is over + const totalSize = + ( + this.db + .prepare(`SELECT SUM(size) AS sum FROM messages`) + .get() as any + ).sum ?? 0; + const newTotal = totalSize + size; + if (newTotal > this.conf.max_bytes) { + const bytesToFree = newTotal - this.conf.max_bytes; + let freed = 0; + let lastSeqToDelete: number | null = null; + + for (const { seq, size: msgSize } of this.db + .prepare(`SELECT seq, size FROM messages ORDER BY seq ASC`) + .iterate() as any) { + if (freed >= bytesToFree) break; + freed += msgSize; + lastSeqToDelete = seq; + } + + if (lastSeqToDelete !== null) { + const rows = this.db + .prepare(`DELETE FROM messages WHERE seq <= ? RETURNING seq`) + .all(lastSeqToDelete); + this.emitDelete(rows); + } + } + } + } + + if (this.conf.allow_msg_ttl) { + const rows = this.db + .prepare( + `DELETE FROM messages WHERE ttl IS NOT null AND time + ttl/1000 < ? RETURNING seq`, + ) + .all(Date.now() / 1000); + this.emitDelete(rows); + } + + if (this.conf.max_msg_size > -1 && size > this.conf.max_msg_size) { + throw new ConatError( + `max_msg_size of ${this.conf.max_msg_size} bytes exceeded`, + { code: "reject" }, + ); + } + }; +} + +function dbToMessage( + x: + | { + seq: number; + key?: string; + time: number; + compress: CompressionAlgorithm; + encoding: DataEncoding; + raw: Buffer; + headers?: string; + } + | undefined, +): Message | undefined { + if (x === undefined) { + return x; + } + return { + seq: x.seq, + time: x.time * 1000, + key: x.key != null ? x.key : undefined, + encoding: x.encoding, + raw: handleDecompress(x), + headers: x.headers ? JSON.parse(x.headers) : undefined, + }; +} + +function handleDecompress({ + raw, + compress, +}: { + raw: Buffer; + compress: CompressionAlgorithm; +}) { + if (compress == CompressionAlgorithm.None) { + return raw; + } else if (compress == CompressionAlgorithm.Zstd) { + return decompress(raw); + } else { + throw Error(`unknown compression ${compress}`); + } +} + +export const cache = refCacheSync< + Options & { noCache?: boolean }, + PersistentStream +>({ + name: "persistent-stream", + createObject: (options: Options & { noCache?: boolean }) => { + const pstream = new PersistentStream(options); + pstream.init(); + return pstream; + }, +}); + +export function pstream( + options: Options & { noCache?: boolean }, +): PersistentStream { + return cache(options); +} diff --git a/src/packages/nats/primus.ts b/src/packages/conat/primus.ts similarity index 80% rename from src/packages/nats/primus.ts rename to src/packages/conat/primus.ts index 2bbe5b8997..101584a02b 100644 --- a/src/packages/nats/primus.ts +++ b/src/packages/conat/primus.ts @@ -1,25 +1,34 @@ /* -Implement something that acts like a websocket as exposed in Primus, but using NATS. +Implement something that acts like a project-specific websocket from +**Primus**, but using Conats (which is really socket-io through a central +message broker). Development: -1. Change to a directly like packages/project that imports nats and backend +1. Change to a directory such as packages/project 2. Example session: ~/cocalc/src/packages/project$ node ... -Primus = require('@cocalc/nats/primus').Primus; -env = await require('@cocalc/backend/nats').getEnv(); +# non-channel full communication + +Primus = require('@cocalc/conat/primus').Primus; +env = await require('@cocalc/backend/conat').getEnv(); server = new Primus({subject:'test',env,role:'server',id:'s'}); -sparks = []; server.on("connection", (spark) => sparks.push(spark)) +sparks = []; server.on("connection", (spark) => sparks.push(spark)); + client = new Primus({subject:'test',env,role:'client',id:'c0'}); -sparks[0] client.on('data',(data)=>console.log('client got', data));0 sparks[0].write("foo") +sparks[0].on('data', (data)=>console.log("server got", data));0 +client.write('bar') + +# communication via a channel. + s9 = server.channel('9') c9 = client.channel('9') c9.on("data", (data)=>console.log('c9 got', data));0 @@ -28,6 +37,8 @@ s9.on("data", (data)=>console.log('s9 got', data));0 c9.write("from client 9") s9.write("from the server 9") + + client_b = new Primus({subject:'test',env,role:'client',id:'cb'}); c9b = client_b.channel('9') c9b.on("data", (data)=>console.log('c9b got', data));0 @@ -37,17 +48,15 @@ s9.sparks['cb'].write('blah') */ import { EventEmitter } from "events"; -import { type NatsEnv } from "@cocalc/nats/types"; +import { type NatsEnv } from "@cocalc/conat/types"; import { delay } from "awaiting"; -import { encodeBase64 } from "@cocalc/nats/util"; +import { encodeBase64 } from "@cocalc/conat/util"; +import { type Subscription } from "@cocalc/conat/core/client"; export type Role = "client" | "server"; -const PING_INTERVAL = 10000; +const PING_INTERVAL = 30000; -// function otherRole(role: Role): Role { -// return role == "client" ? "server" : "client"; -// } interface PrimusOptions { subject: string; channelName?: string; @@ -115,7 +124,7 @@ export class Primus extends EventEmitter { // this is just for compat with primus api: address = { ip: "" }; conn: { id: string }; - subs: any[] = []; + subs: Subscription[] = []; OPEN = 1; CLOSE = 0; readyState: 0; @@ -184,10 +193,14 @@ export class Primus extends EventEmitter { throw Error("only server can serve"); } this.deleteSparks(); - const sub = this.env.nc.subscribe(this.subjects.control); + const sub = await this.env.cn.subscribe(this.subjects.control); this.subs.push(sub); for await (const mesg of sub) { - const data = this.env.jc.decode(mesg.data) ?? ({} as any); + console.log("got ", { data: mesg.data }); + const data = mesg.data; + if (data == null) { + continue; + } if (data.cmd == "ping") { const spark = this.sparks[data.id]; if (spark != null) { @@ -200,7 +213,7 @@ export class Primus extends EventEmitter { }); this.sparks[data.id] = spark; this.emit("connection", spark); - mesg.respond(this.env.jc.encode({ status: "ok" })); + mesg.respond({ status: "ok" }); } } }; @@ -221,32 +234,25 @@ export class Primus extends EventEmitter { if (this.role != "client") { throw Error("only client can connect"); } - const mesg = this.env.jc.encode({ - cmd: "connect", - id: this.id, - }); + const mesg = { cmd: "connect", id: this.id }; console.log("Nats Primus: connecting..."); - await this.env.nc.publish(this.subjects.control, mesg); + await this.env.cn.request(this.subjects.control, mesg); this.clientPing(); console.log("Nats Primus: connected:"); - const sub = this.env.nc.subscribe(this.subjects.client); + const sub = await this.env.cn.subscribe(this.subjects.client); this.subs.push(sub); for await (const mesg of sub) { - const data = this.env.jc.decode(mesg.data) ?? ({} as any); - this.emit("data", data); + this.emit("data", mesg.data); } }; private clientPing = async () => { while (this.state != "closed") { try { - await this.env.nc.publish( - this.subjects.control, - this.env.jc.encode({ - cmd: "ping", - id: this.id, - }), - ); + await this.env.cn.publish(this.subjects.control, { + cmd: "ping", + id: this.id, + }); } catch { // if ping fails, connection is not working, so die. this.destroy(); @@ -261,11 +267,10 @@ export class Primus extends EventEmitter { this.role == "client" ? this.subjects.clientChannel : this.subjects.serverChannel; - const sub = this.env.nc.subscribe(subject); + const sub = await this.env.cn.subscribe(subject); this.subs.push(sub); for await (const mesg of sub) { - const data = this.env.jc.decode(mesg.data) ?? ({} as any); - this.emit("data", data); + this.emit("data", mesg.data); } }; @@ -277,11 +282,15 @@ export class Primus extends EventEmitter { if (!this.channel) { throw Error("broadcast write not implemented when not in channel mode"); } + // we are the server, so write to all clients in channel subject = this.subjects.clientChannel; } else { - subject = this.subjects.server; + // we are the client, so write to server (possibly a channel) + subject = this.channelName + ? this.subjects.serverChannel + : this.subjects.server; } - this.env.nc.publish(subject, this.env.jc.encode(data)); + this.env.cn.publish(subject, data); return true; }; @@ -337,19 +346,15 @@ export class Spark extends EventEmitter { end = () => this.destroy(); private init = async () => { - const sub = this.primus.env.nc.subscribe(this.subjects.server); + const sub = await this.primus.env.cn.subscribe(this.subjects.server); this.subs.push(sub); for await (const mesg of sub) { - const data = this.primus.env.jc.decode(mesg.data); - this.emit("data", data); + this.emit("data", mesg.data); } }; write = (data) => { - this.primus.env.nc.publish( - this.subjects.client, - this.primus.env.jc.encode(data), - ); + this.primus.env.cn.publish(this.subjects.client, data); return true; }; } diff --git a/src/packages/nats/project-api/editor.ts b/src/packages/conat/project-api/editor.ts similarity index 100% rename from src/packages/nats/project-api/editor.ts rename to src/packages/conat/project-api/editor.ts diff --git a/src/packages/nats/project-api/index.ts b/src/packages/conat/project-api/index.ts similarity index 93% rename from src/packages/nats/project-api/index.ts rename to src/packages/conat/project-api/index.ts index 2bc684cc0f..694229b2bc 100644 --- a/src/packages/nats/project-api/index.ts +++ b/src/packages/conat/project-api/index.ts @@ -1,7 +1,7 @@ import { type System, system } from "./system"; import { type Editor, editor } from "./editor"; import { type Sync, sync } from "./sync"; -import { handleErrorMessage } from "@cocalc/nats/util"; +import { handleErrorMessage } from "@cocalc/conat/util"; export interface ProjectApi { system: System; diff --git a/src/packages/nats/project-api/sync.ts b/src/packages/conat/project-api/sync.ts similarity index 100% rename from src/packages/nats/project-api/sync.ts rename to src/packages/conat/project-api/sync.ts diff --git a/src/packages/nats/project-api/system.ts b/src/packages/conat/project-api/system.ts similarity index 100% rename from src/packages/nats/project-api/system.ts rename to src/packages/conat/project-api/system.ts diff --git a/src/packages/nats/service/formatter.ts b/src/packages/conat/service/formatter.ts similarity index 100% rename from src/packages/nats/service/formatter.ts rename to src/packages/conat/service/formatter.ts diff --git a/src/packages/conat/service/index.ts b/src/packages/conat/service/index.ts new file mode 100644 index 0000000000..fd6e64b000 --- /dev/null +++ b/src/packages/conat/service/index.ts @@ -0,0 +1,8 @@ +export type { + ServiceDescription, + CallConatServiceFunction, + ServiceCall, + CreateConatServiceFunction, + ConatService, +} from "./service"; +export { callConatService, createConatService } from "./service"; diff --git a/src/packages/nats/service/jupyter.ts b/src/packages/conat/service/jupyter.ts similarity index 84% rename from src/packages/nats/service/jupyter.ts rename to src/packages/conat/service/jupyter.ts index 5ea9093395..7f96623606 100644 --- a/src/packages/nats/service/jupyter.ts +++ b/src/packages/conat/service/jupyter.ts @@ -46,12 +46,6 @@ export interface JupyterApi { export type JupyterApiEndpoint = keyof JupyterApi; -// we use request many for all calls to the Jupyter server, because -// at least one call -- more_output -- is very likely to return -// very large results (it's kind of the point), and this makes -// handling this very easy. -const REQUEST_MANY = true; - export function jupyterApiClient({ project_id, path, @@ -66,7 +60,6 @@ export function jupyterApiClient({ path, service, timeout, - many: REQUEST_MANY, }); } @@ -85,6 +78,5 @@ export async function createNatsJupyterService({ service, impl, description: "Jupyter notebook compute API", - many: REQUEST_MANY, }); } diff --git a/src/packages/nats/service/listings.ts b/src/packages/conat/service/listings.ts similarity index 98% rename from src/packages/nats/service/listings.ts rename to src/packages/conat/service/listings.ts index 5213db6975..0d194c5ae9 100644 --- a/src/packages/nats/service/listings.ts +++ b/src/packages/conat/service/listings.ts @@ -4,7 +4,7 @@ Service for watching directory listings in a project or compute server. import { createServiceClient, createServiceHandler } from "./typed"; import type { DirectoryListingEntry } from "@cocalc/util/types"; -import { dkv, type DKV } from "@cocalc/nats/sync/dkv"; +import { dkv, type DKV } from "@cocalc/conat/sync/dkv"; import { EventEmitter } from "events"; import refCache from "@cocalc/util/refcache"; @@ -66,7 +66,7 @@ export async function createListingsService({ }); } -const limits = { +const config = { max_msgs: MAX_DIRECTORIES, }; @@ -84,7 +84,7 @@ export async function getListingsKV( ): Promise> { return await dkv({ name: "listings", - limits, + config, ...opts, }); } @@ -101,7 +101,7 @@ export async function getListingsTimesKV( ): Promise> { return await dkv({ name: "listings-times", - limits, + config, ...opts, }); } diff --git a/src/packages/conat/service/service.ts b/src/packages/conat/service/service.ts new file mode 100644 index 0000000000..8a9cbc41c9 --- /dev/null +++ b/src/packages/conat/service/service.ts @@ -0,0 +1,328 @@ +/* +Simple to use UI to connect anything in cocalc via request/reply services. + +- callConatService +- createConatService + +The input is basically where the service is (account, project, public), +and either what message to send or how to handle messages. +Also if the handler throws an error, the caller will throw +an error too. +*/ + +import { type Location } from "@cocalc/conat/types"; +import { trunc_middle } from "@cocalc/util/misc"; +import { getEnv, getLogger } from "@cocalc/conat/client"; +import { randomId } from "@cocalc/conat/names"; +import { delay } from "awaiting"; +import { EventEmitter } from "events"; +import { encodeBase64 } from "@cocalc/conat/util"; +import { type Client } from "@cocalc/conat/core/client"; + +const DEFAULT_TIMEOUT = 10 * 1000; + +const logger = getLogger("nats:service"); + +export interface ServiceDescription extends Location { + service: string; + + description?: string; + + // if true and multiple servers are setup in same "location", then they ALL get to respond (sender gets first response). + all?: boolean; + + // DEFAULT: ENABLE_SERVICE_FRAMEWORK + enableServiceFramework?: boolean; + + subject?: string; +} + +export interface ServiceCall extends ServiceDescription { + mesg: any; + timeout?: number; + + // if it fails with error.code 503, we wait for service to be ready and try again, + // unless this is set -- e.g., when waiting for the service in the first + // place we set this to avoid an infinite loop. + noRetry?: boolean; + + client?: Client; +} + +export async function callConatService(opts: ServiceCall): Promise { + // console.log("callConatService", opts); + const cn = opts.client ?? (await getEnv()).cn; + const subject = serviceSubject(opts); + let resp; + const timeout = opts.timeout ?? DEFAULT_TIMEOUT; + // ensure not undefined, since undefined can't be published. + const data = opts.mesg ?? null; + + const doRequest = async () => { + resp = await cn.request(subject, data, { + timeout, + }); + const result = resp.data; + if (result?.error) { + throw Error(result.error); + } + return result; + }; + + try { + // try to call the service: + return await doRequest(); + } catch (err) { + // console.log(`request to '${subject}' failed -- ${err}`); + // it failed. + if (opts.noRetry) { + throw err; + } + const p = opts.path ? `${trunc_middle(opts.path, 64)}:` : ""; + if (err.code == 503) { + // it's actually just not ready, so + // wait for the service to be ready, then try again + await waitForConatService({ options: opts, maxWait: timeout }); + try { + return await doRequest(); + } catch (err) { + if (err.code == 503) { + err.message = `Not Available: service ${p}${opts.service} is not available`; + } + throw err; + } + } else if (err.code == 408) { + throw Error( + `Timeout: service '${p}${opts.service}' did not respond for ${Math.round(timeout / 1000)} seconds`, + ); + } else { + throw err; + } + } +} + +export type CallConatServiceFunction = typeof callConatService; + +export interface Options extends ServiceDescription { + description?: string; + version?: string; + handler: (mesg) => Promise; + client?: Client; + // NOTE: Unlike subscriptions, ephemeral defaults to true for services, because + // usually users want a quick *response*, rather than queuing up requests to a + // server that might never reconnect. + ephemeral?: boolean; +} + +export function createConatService(options: Options) { + return new ConatService(options); +} + +export type CreateConatServiceFunction = typeof createConatService; + +export function serviceSubject({ + service, + + account_id, + browser_id, + + project_id, + compute_server_id, + + path, + + subject, +}: ServiceDescription): string { + if (subject) { + return subject; + } + let segments; + path = path ? encodeBase64(path) : "_"; + if (!project_id && !account_id) { + segments = ["public", service]; + } else if (account_id) { + segments = [ + "services", + `account-${account_id}`, + browser_id ?? "_", + project_id ?? "_", + path ?? "_", + service, + ]; + } else if (project_id) { + segments = [ + "services", + `project-${project_id}`, + compute_server_id ?? "_", + service, + path, + ]; + } + return segments.join("."); +} + +export function serviceName({ + service, + + account_id, + browser_id, + + project_id, + compute_server_id, +}: ServiceDescription): string { + let segments; + if (!project_id && !account_id) { + segments = [service]; + } else if (account_id) { + segments = [`account-${account_id}`, browser_id ?? "-", service]; + } else if (project_id) { + segments = [`project-${project_id}`, compute_server_id ?? "-", service]; + } + return segments.join("-"); +} + +export function serviceDescription({ + description, + path, +}: ServiceDescription): string { + return [description, path ? `\nPath: ${path}` : ""].join(""); +} + +export class ConatService extends EventEmitter { + private options: Options; + public readonly subject: string; + public readonly name: string; + private sub?; + + constructor(options: Options) { + super(); + this.options = { ephemeral: true, ...options }; + this.name = serviceName(this.options); + this.subject = serviceSubject(options); + this.runService(); + } + + private log = (...args) => { + logger.debug(`service:subject='${this.subject}' -- `, ...args); + }; + + // create and run the service until something goes wrong, when this + // willl return. It does not throw an error. + private runService = async () => { + this.emit("starting"); + this.log("starting service", { + name: this.name, + description: this.options.description, + version: this.options.version, + }); + const cn = this.options.client ?? (await getEnv()).cn; + const queue = this.options.all ? randomId() : "0"; + // service=true so upon disconnect the socketio backend server + // immediately stops routing traffic to this. + this.sub = await cn.subscribe(this.subject, { + queue, + ephemeral: this.options.ephemeral, + }); + this.emit("running"); + await this.listen(); + }; + + private listen = async () => { + for await (const mesg of this.sub) { + const request = mesg.data ?? {}; + + // console.logger.debug("handle nats service call", request); + let resp; + if (request == "ping") { + resp = "pong"; + } else { + try { + resp = await this.options.handler(request); + } catch (err) { + resp = { error: `${err}` }; + } + } + try { + await mesg.respond(resp); + } catch (err) { + // If, e.g., resp is too big, then the error would be + // "NatsError: MAX_PAYLOAD_EXCEEDED" + // and it is of course very important to make the caller aware that + // there was an error, as opposed to just silently leaving + // them hanging forever. + const data = { error: `${err}` }; + await mesg.respond(data); + } + } + }; + + close = () => { + if (!this.subject) { + return; + } + this.emit("close"); + this.removeAllListeners(); + this.sub?.stop(); + delete this.sub; + // @ts-ignore + delete this.subject; + // @ts-ignore + delete this.options; + }; +} + +interface ServiceClientOpts { + options: ServiceDescription; + maxWait?: number; + id?: string; +} + +export async function pingConatService({ + options, + maxWait = 3000, +}: ServiceClientOpts): Promise { + const pong = await callConatService({ + ...options, + mesg: "ping", + timeout: Math.max(3000, maxWait), + // set no-retry to avoid infinite loop + noRetry: true, + }); + return [pong]; +} + +export async function waitForConatService({ + options, + maxWait = 60000, +}: { + options: ServiceDescription; + maxWait?: number; +}) { + let d = 1000; + let m = 100; + const start = Date.now(); + const getPing = async (m: number) => { + try { + return await pingConatService({ options, maxWait: m }); + } catch { + // ping can fail, e.g, if not connected to nats at all or the ping + // service isn't up yet. + return [] as string[]; + } + }; + let ping = await getPing(m); + while (ping.length == 0) { + d = Math.min(10000, d * 1.3); + m = Math.min(1500, m * 1.3); + if (Date.now() - start + d >= maxWait) { + logger.debug( + `timeout waiting for ${serviceName(options)} to start...`, + d, + ); + throw Error("timeout"); + } + await delay(d); + ping = await getPing(m); + } + return ping; +} diff --git a/src/packages/nats/service/syncfs-client.ts b/src/packages/conat/service/syncfs-client.ts similarity index 100% rename from src/packages/nats/service/syncfs-client.ts rename to src/packages/conat/service/syncfs-client.ts diff --git a/src/packages/nats/service/syncfs-server.ts b/src/packages/conat/service/syncfs-server.ts similarity index 100% rename from src/packages/nats/service/syncfs-server.ts rename to src/packages/conat/service/syncfs-server.ts diff --git a/src/packages/nats/service/terminal.ts b/src/packages/conat/service/terminal.ts similarity index 94% rename from src/packages/nats/service/terminal.ts rename to src/packages/conat/service/terminal.ts index e69c0de0b4..21978a2f9b 100644 --- a/src/packages/nats/service/terminal.ts +++ b/src/packages/conat/service/terminal.ts @@ -40,7 +40,7 @@ export function createTerminalClient({ project_id, path }) { return createServiceClient({ project_id, path, - service: "project-api", + service: "terminal-server", }); } @@ -58,7 +58,7 @@ export async function createTerminalServer({ return await createServiceHandler({ project_id, path, - service: "project-api", + service: "terminal-server", description: "Terminal service.", impl, }); @@ -81,7 +81,7 @@ export function createBrowserClient({ project_id, path }) { return createServiceClient({ project_id, path, - service: "browser-api", + service: "terminal-browser", }); } @@ -97,7 +97,7 @@ export async function createBrowserService({ return await createServiceHandler({ project_id, path, - service: "browser-api", + service: "terminal-browser", description: "Browser Terminal service.", all: true, impl, diff --git a/src/packages/nats/service/time.ts b/src/packages/conat/service/time.ts similarity index 95% rename from src/packages/nats/service/time.ts rename to src/packages/conat/service/time.ts index 30a1f7bc40..d5c6c41c65 100644 --- a/src/packages/nats/service/time.ts +++ b/src/packages/conat/service/time.ts @@ -5,7 +5,7 @@ This is a global service that is run by hubs. */ import { createServiceClient, createServiceHandler } from "./typed"; -import { getClient } from "@cocalc/nats/client"; +import { getClient } from "@cocalc/conat/client"; interface TimeApi { // time in ms since epoch, i.e., Date.now() diff --git a/src/packages/nats/service/typed.ts b/src/packages/conat/service/typed.ts similarity index 56% rename from src/packages/nats/service/typed.ts rename to src/packages/conat/service/typed.ts index f8bd8dcd2e..07a7508c3d 100644 --- a/src/packages/nats/service/typed.ts +++ b/src/packages/conat/service/typed.ts @@ -1,17 +1,13 @@ import { - callNatsService, - createNatsService, - natsServiceInfo, - natsServiceStats, - pingNatsService, - waitForNatsService, + callConatService, + createConatService, + pingConatService, + waitForConatService, } from "./service"; import type { Options, ServiceCall } from "./service"; export interface Extra { - info: typeof natsServiceInfo; - stats: typeof natsServiceStats; - ping: typeof pingNatsService; + ping: typeof pingConatService; waitFor: (opts?: { maxWait?: number }) => Promise; } @@ -29,24 +25,20 @@ export function createServiceClient(options: Omit) { } if (prop == "nats") { return { - info: async (opts: { id?: string; maxWait?: number } = {}) => - await natsServiceInfo({ options, ...opts }), - stats: async (opts: { id?: string; maxWait?: number } = {}) => - await natsServiceStats({ options, ...opts }), ping: async (opts: { id?: string; maxWait?: number } = {}) => - await pingNatsService({ options, ...opts }), + await pingConatService({ options, ...opts }), waitFor: async (opts: { maxWait?: number } = {}) => - await waitForNatsService({ options, ...opts }), + await waitForConatService({ options, ...opts }), }; } return async (...args) => { try { - return await callNatsService({ + return await callConatService({ ...options, mesg: { name: prop, args }, }); } catch (err) { - err.message = `Error calling remote function '${prop}': ${err.message}`; + err.message = `calling remote function '${prop}': ${err.message}`; throw err; } }; @@ -55,11 +47,11 @@ export function createServiceClient(options: Omit) { ) as Api & ServiceApi; } -export async function createServiceHandler({ +export function createServiceHandler({ impl, ...options }: Omit & { impl: Api }) { - return await createNatsService({ + return createConatService({ ...options, handler: async (mesg) => await impl[mesg.name](...mesg.args), }); diff --git a/src/packages/conat/sync/akv.ts b/src/packages/conat/sync/akv.ts new file mode 100644 index 0000000000..6cd5db6385 --- /dev/null +++ b/src/packages/conat/sync/akv.ts @@ -0,0 +1,154 @@ +/* +Asynchronous Memory Efficient Access to Persistent Key:Value Store + +This provides access to the same data as dkv, except it doesn't download any +data to the client until you actually call get. The calls to get and +set are thus async. + +There is no need to close this because it is stateless. + +[ ] TODO: efficiently get or set many values at once in a single call. This will be +very useful, e.g., for jupyter notebook timetravel browsing. + +DEVELOPMENT: + +~/cocalc/src/packages/backend$ node + +a = await require("@cocalc/backend/conat/sync").dkv({name:'test'}); a.set('x',5) + + +b = require("@cocalc/backend/conat/sync").akv({name:'test'}) +await b.set('x',10) + +a.get('x') + +await b.get('x') + +*/ + +import * as persistClient from "@cocalc/conat/persist/client"; +import { type DKVOptions } from "./dkv"; +import { + type Headers, + messageData, + type Message, +} from "@cocalc/conat/core/client"; +import { storagePath, type User, COCALC_TOMBSTONE_HEADER } from "./core-stream"; + +export class AKV { + private storage: persistClient.Storage; + private user: User; + + constructor(options: DKVOptions) { + this.user = { + account_id: options.account_id, + project_id: options.project_id, + }; + this.storage = { path: storagePath(options) }; + } + + getMessage = async ( + key: string, + { timeout }: { timeout?: number } = {}, + ): Promise | undefined> => { + const mesg = await persistClient.get({ + user: this.user, + storage: this.storage, + key, + timeout, + }); + if (mesg?.headers?.[COCALC_TOMBSTONE_HEADER]) { + return undefined; + } + return mesg; + }; + + // // Just get one value asynchronously, rather than the entire dkv. + // // If the timeout option is given and the value of key is not set, + // // will wait until that many ms to get the key. + get = async ( + key: string, + opts?: { timeout?: number }, + ): Promise => { + return (await this.getMessage(key, opts))?.data; + }; + + headers = async ( + key: string, + opts?: { timeout?: number }, + ): Promise => { + return (await this.getMessage(key, opts))?.headers; + }; + + time = async ( + key: string, + opts?: { timeout?: number }, + ): Promise => { + const time = (await this.getMessage(key, opts))?.headers?.time; + return time !== undefined ? new Date(time as number) : undefined; + }; + + delete = async (key: string, opts?: { timeout?: number }): Promise => { + await this.set(key, null as any, { + ...opts, + headers: { [COCALC_TOMBSTONE_HEADER]: true }, + }); + }; + + seq = async ( + key: string, + opts?: { timeout?: number }, + ): Promise => { + return (await this.getMessage(key, opts))?.headers?.seq as + | number + | undefined; + }; + + set = async ( + key: string, + value: T, + options?: { + headers?: Headers; + previousSeq?: number; + timeout?: number; + // note: msgID is NOT supported because its lifetime is that of the stream object + // on the server, which is likely immediately removed when using akv. Of course + // msgID is mainly for streams and not very relevant for kv. + }, + ) => { + return await persistClient.set({ + user: this.user, + storage: this.storage, + key, + messageData: messageData(value, { headers: options?.headers }), + previousSeq: options?.previousSeq, + timeout: options?.timeout, + }); + }; + + keys = async ({ timeout }: { timeout?: number } = {}): Promise => { + return await persistClient.keys({ + user: this.user, + storage: this.storage, + timeout, + }); + }; + + sqlite = async ( + statement: string, + params?: any[], + { timeout }: { timeout?: number } = {}, + ): Promise => { + return await persistClient.sqlite({ + user: this.user, + storage: this.storage, + timeout, + statement, + params, + }); + }; +} + +export function akv(opts: DKVOptions) { + return new AKV(opts); +} diff --git a/src/packages/conat/sync/core-stream.ts b/src/packages/conat/sync/core-stream.ts new file mode 100644 index 0000000000..eb72e9b9d6 --- /dev/null +++ b/src/packages/conat/sync/core-stream.ts @@ -0,0 +1,1301 @@ +/* +core-stream.ts = the Core Stream data structure for conats. + +This is the core data structure that easy-to-use ephemeral and persistent +streams and kv stores are built on. It is NOT meant to be super easy and +simple to use as synchronous with save in the background. Instead, operations +are async, and the API is complicated. We build dkv, dstream, etc. on +top of this with a much friendly API. + + +NOTE: unlike in NATS, in kv mode, the keys can be any utf-8 string. +We use the subject to track communication involving this stream, but +otherwise it has no relevant to the keys. Conat's core pub/sub/request/ +reply model is very similar to NATS, but the analogue of Jetstream is +different because I don't find Jetstream useful at all, and find this +much more useful. + +DEVELOPMENT: + +~/cocalc/src/packages/backend$ node + + + require('@cocalc/backend/conat'); a = require('@cocalc/conat/sync/core-stream'); s = await a.stream({name:'test', leader:true}) + + +Testing two at once (a leader and non-leader): + + require('@cocalc/backend/conat'); s = await require('@cocalc/backend/conat/sync').dstream({ephemeral:true,name:'test', leader:1, noAutosave:true}); t = await require('@cocalc/backend/conat/sync').dstream({ephemeral:true,name:'test', leader:0,noAutosave:true}) + + +With persistence: + + require('@cocalc/backend/conat'); a = require('@cocalc/conat/sync/core-stream'); s = await a.stream({name:'test', project_id:'00000000-0000-4000-8000-000000000000', persist:true}) + +*/ + +import { + enforceLimits, + enforceRateLimits, + ENFORCE_LIMITS_THROTTLE_MS, +} from "./limits"; +import { EventEmitter } from "events"; +import { + type Subscription, + Message, + type Headers, + messageData, + decode, +} from "@cocalc/conat/core/client"; +import { isNumericString } from "@cocalc/util/misc"; +import type { JSONValue } from "@cocalc/util/types"; +import { encodeBase64 } from "@cocalc/conat/util"; +import refCache from "@cocalc/util/refcache"; +import { streamSubject } from "@cocalc/conat/names"; +import { getEnv } from "@cocalc/conat/client"; +import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; +import { throttle } from "lodash"; +import { once } from "@cocalc/util/async-utils"; +import { callback, delay } from "awaiting"; +import { randomId } from "@cocalc/conat/names"; +import * as persistClient from "@cocalc/conat/persist/client"; +import type { Client } from "@cocalc/conat/core/client"; +import jsonStableStringify from "json-stable-stringify"; +import type { + SetOperation, + DeleteOperation, + Configuration, +} from "@cocalc/conat/persist/storage"; +export type { Configuration }; + +// when this many bytes of key:value have been changed (so need to be freed), +// we do a garbage collection pass. +export const KEY_GC_THRESH = 10 * 1e6; + +// NOTE: when you do delete this.deleteKv(key), we ensure the previous +// messages with the given key is completely deleted from sqlite, and +// also create a *new* lightweight tombstone. That tombstone has this +// ttl, which defaults to DEFAULT_TOMBSTONE_TTL (one week), so the tombstone +// itself will be removed after 1 week. The tombstone is only needed for +// clients that go offline during the delete, then come back, and reply the +// partial log of what was missed. Such clients should reset if the +// offline time is longer than DEFAULT_TOMBSTONE_TTL. +// This only happens if allow_msg_ttl is configured to true, which is +// done with dkv, but not on by default otherwise. +export const DEFAULT_TOMBSTONE_TTL = 7 * 24 * 60 * 60 * 1000; // 1 week + +export interface RawMsg extends Message { + timestamp: number; + seq: number; + sessionId: string; + key?: string; +} + +export interface ChangeEvent { + mesg?: T; + raw?: Partial; + key?: string; + prev?: T; + msgID?: string; +} + +const HEADER_PREFIX = "CoCalc-"; + +export const COCALC_MESSAGE_ID_HEADER = `${HEADER_PREFIX}Msg-Id`; +export const COCALC_TOMBSTONE_HEADER = `${HEADER_PREFIX}Tombstone`; +export const COCALC_STREAM_HEADER = `${HEADER_PREFIX}Stream`; +export const COCALC_OPTIONS_HEADER = `${HEADER_PREFIX}Options`; +export const COCALC_HEARTBEAT_HEADER = `${HEADER_PREFIX}Heartbeat`; + +const PUBLISH_TIMEOUT = 7500; + +const DEFAULT_HEARTBEAT_INTERVAL = 30 * 1000; + +export interface CoreStreamOptions { + // what it's called + name: string; + // where it is located + account_id?: string; + project_id?: string; + config?: Partial; + // only load historic messages starting at the given seq number. + start_seq?: number; + desc?: JSONValue; + leader?: boolean; + persist?: boolean; + + client?: Client; + + noCache?: boolean; + heartbeatInterval?: number; +} + +export interface User { + account_id?: string; + project_id?: string; +} + +export function storagePath({ + account_id, + project_id, + name, +}: User & { name: string }) { + let top; + if (account_id) { + top = `accounts/${account_id}`; + } else if (project_id) { + top = `projects/${project_id}`; + } else { + top = "global"; + } + return `${top}/${name}.db`; +} + +export class CoreStream extends EventEmitter { + public readonly name: string; + private readonly subject: string; + private configOptions?: Partial; + private _start_seq?: number; + + // don't do "this.raw=" or "this.messages=" anywhere in this class + // because dstream directly references the public raw/messages. + public readonly raw: RawMsg[] = []; + public readonly messages: T[] = []; + public readonly kv: { [key: string]: { mesg: T; raw: RawMsg } } = {}; + private kvChangeBytes = 0; + + // this msgID's is ONLY used in ephemeral mode by the leader. + private readonly msgIDs = new Set(); + private sub?: Subscription; + private leader: boolean; + private persist: boolean; + private server?: Subscription; + // ephemeralSeq = sequence number used by the *leader* only to assign sequence numbers + private ephemeralSeq: number = 1; + private lastHeartbeat: number = 0; + private heartbeatInterval: number; + // lastSeq used by clients to keep track of what they have received; if one + // is skipped they reconnect starting with the last one they didn't miss. + private lastSeq: number = 0; + private sendQueue: { data; options?; seq: number; cb: Function }[] = []; + private bytesSent: { [time: number]: number } = {}; + private user: User; + private persistStream?; + private storage?: persistClient.Storage; + private client?: Client; + + private renewLoopParams: { id: string; lifetime: number; user: User } | null = + null; + + private sessionId?: string; + + constructor({ + name, + project_id, + account_id, + config, + start_seq, + leader = false, + persist = false, + client, + heartbeatInterval = DEFAULT_HEARTBEAT_INTERVAL, + }: CoreStreamOptions) { + super(); + + this.client = client; + this.user = { account_id, project_id }; + this.heartbeatInterval = heartbeatInterval; + this.name = name; + this.leader = !!leader; + this.persist = !!persist; + const subject = streamSubject({ account_id, project_id, ephemeral: true }); + this.subject = subject.replace(">", encodeBase64(name)); + if (persist) { + this.storage = { + path: storagePath({ account_id, project_id, name }), + }; + } + this._start_seq = start_seq; + this.configOptions = config; + return new Proxy(this, { + get(target, prop) { + return typeof prop == "string" && isNumericString(prop) + ? target.get(parseInt(prop)) + : target[String(prop)]; + }, + }); + } + + init = async () => { + if (this.client == null) { + this.client = (await getEnv()).cn; + } + if (this.persist) { + await this.getAllFromPersist({ + start_seq: this._start_seq, + noEmit: true, + }); + this.configOptions = await this.config(this.configOptions); + } else if (!this.leader) { + // try to get current data from a leader + await this.getAllFromLeader({ + start_seq: this._start_seq ?? 0, + noEmit: true, + }); + } else { + // non-persist mode and we are the leader, so + // start listening on the subject for new data + await this.serve(); + } + // NOTE: if we miss a message between getAllFromLeader and when we start listening, + // then the sequence number will have a gap, and we'll immediately reconnect, starting + // at the right point. So no data can possibly be lost. + await this.listen(); + if (!this.leader && !this.persist) { + this.heartbeatMonitor(); + } + }; + + config = async (config?: Partial): Promise => { + if (this.storage == null) { + throw Error("bug -- storage must be set"); + } + return await persistClient.config({ + user: this.user, + storage: this.storage, + config, + }); + }; + + private resetState = () => { + delete this.sessionId; + this.bytesSent = {}; + this.msgIDs.clear(); + this.raw.length = 0; + this.messages.length = 0; + this.ephemeralSeq = 0; + this.sendQueue.length = 0; + this.lastSeq = 0; + delete this._start_seq; + this.emit("reset"); + }; + + private reset = async () => { + this.resetState(); + await this.reconnect(); + }; + + close = () => { + delete this.client; + this.renewLoopParams = null; + this.removeAllListeners(); + // @ts-ignore + this.sub?.close(); + delete this.sub; + // @ts-ignore + this.server?.close(); + delete this.server; + // @ts-ignore + delete this.kv; + // @ts-ignore + delete this.messages; + // @ts-ignore + delete this.raw; + // @ts-ignore + delete this.msgIDs; + // @ts-ignore + delete this.sendQueue; + // @ts-ignore + delete this.bytesSent; + // @ts-ignore + delete this.storage; + }; + + private getAllFromPersist = async ({ + start_seq = 0, + noEmit, + }: { start_seq?: number; noEmit?: boolean } = {}) => { + if (this.leader) { + throw Error("leader is incompatible with persist"); + } + if (!this.persist) { + throw Error("must have persist set"); + } + + if (this.storage == null) { + throw Error("bug -- storage must be set"); + } + + const { id, lifetime, stream } = await persistClient.getAll({ + user: this.user, + storage: this.storage, + start_seq, + }); + if (id && lifetime) { + this.renewLoopParams = { id, lifetime, user: this.user }; + this.startRenewLoop(); + } + //console.log("got persistent stream", { id }); + this.persistStream = stream; + while (true) { + const { value, done } = await stream.next(); + if (done || value == null) { + return; + } + if (value.headers?.content?.state == "watch") { + // switched to watch mode + return; + } + const messages = value.data as (SetOperation | DeleteOperation)[]; + // console.log( + // "got persistent data", + // value.raw.length, + // "bytes", + // " and ", + // messages.length, + // "messages", + // ); + this.processPersistentMessages(messages, noEmit); + } + }; + + private startRenewLoop = reuseInFlight(async () => { + while (this.renewLoopParams?.lifetime && this.renewLoopParams?.id) { + // max to avoid weird situation bombarding server or infinite loop + await delay(Math.max(7500, this.renewLoopParams.lifetime / 3)); + if (this.renewLoopParams == null) { + return; + } + //console.log("renewing with lifetime ", this.renewLoopParams.lifetime); + try { + await persistClient.renew(this.renewLoopParams); + } catch (err) { + console.log(`WARNING: core-stream renew failed -- ${err}`); + // [ ] TODO: critical at this point to do something! + } + } + }); + + private processPersistentMessages = ( + messages: (SetOperation | DeleteOperation)[], + noEmit?: boolean, + ) => { + // console.log("processPersistentMessages", messages.length, " messages"); + if (this.raw === undefined) { + // closed + return; + } + for (const mesg of messages) { + try { + this.processPersistentMessage(mesg, noEmit); + } catch (err) { + console.log("WARNING: issue processing message", mesg, err); + } + } + }; + + private processPersistentMessage = ( + mesg: SetOperation | DeleteOperation, + noEmit?: boolean, + ) => { + if (mesg.op == "delete") { + this.processPersistentDelete(mesg, noEmit); + } else { + // set is the default + this.processPersistentSet(mesg as SetOperation, noEmit); + } + }; + + private processPersistentDelete = ( + { seqs }: DeleteOperation, + noEmit?: boolean, + ) => { + //console.log("processPersistentDelete", seqs); + const X = new Set(seqs); + // seqs is a list of integers. We remove + // every entry from this.raw, this.messages, and this.kv + // where this.raw.seq is in X by mutating raw/messages/kv, + // not by making new objects (since external references). + // This is a rare operation so we're not worried too much + // about performance. + const keys: { [seq: number]: string } = {}; + for (const key in this.kv) { + const seq = this.kv[key]?.raw?.seq; + if (X.has(seq)) { + delete this.kv[key]; + keys[key] = seq; + } + } + const indexes: number[] = []; + for (let i = 0; i < this.raw.length; i++) { + const seq = this.raw[i].seq; + if (X.has(seq)) { + indexes.push(i); + if (!noEmit) { + this.emitChange({ + mesg: undefined, + raw: { seq }, + key: keys[seq], + prev: this.messages[i], + }); + } + } + } + + //console.log({ indexes, seqs, noEmit }); + // remove this.raw[i] and this.messages[i] for all i in indexes, + // with special case to be fast in the very common case of contiguous indexes. + if (indexes.length > 1 && indexes.every((v, i) => v === indexes[0] + i)) { + // Contiguous: bulk remove for performance + const start = indexes[0]; + const deleteCount = indexes.length; + this.raw.splice(start, deleteCount); + this.messages.splice(start, deleteCount); + } else { + // Non-contiguous: fallback to individual reverse splices + for (let i = indexes.length - 1; i >= 0; i--) { + const idx = indexes[i]; + this.raw.splice(idx, 1); + this.messages.splice(idx, 1); + } + } + }; + + private processPersistentSet = ( + { seq, time, key, encoding, raw: data, headers, msgID }: SetOperation, + noEmit?: boolean, + ) => { + const mesg = decode({ encoding, data }); + const raw = { + timestamp: time, + headers, + seq, + raw: data, + key, + } as RawMsg; + if (seq > (this.raw.slice(-1)[0]?.seq ?? 0)) { + // easy fast initial load to the end of the list (common special case) + this.messages.push(mesg); + this.raw.push(raw); + } else { + // [ ] TODO: insert in the correct place. This should only + // happen when calling load of old ata. The algorithm below is + // dumb and could be replaced by a binary search. However, we'll + // change how we batch load so there's less point. + let i = 0; + while (i < this.raw.length && this.raw[i].seq < seq) { + i += 1; + } + this.raw.splice(i, 0, raw); + this.messages.splice(i, 0, mesg); + } + let prev: T | undefined = undefined; + if (typeof key == "string") { + prev = this.kv[key]?.mesg; + if (raw.headers?.[COCALC_TOMBSTONE_HEADER]) { + delete this.kv[key]; + } else { + if (this.kv[key] !== undefined) { + const { raw } = this.kv[key]; + this.kvChangeBytes += raw.raw.length; + } + + this.kv[key] = { raw, mesg }; + + if (this.kvChangeBytes >= KEY_GC_THRESH) { + this.gcKv(); + } + } + } + this.lastSeq = Math.max(this.lastSeq, seq); + if (!noEmit) { + this.emitChange({ mesg, raw, key, prev, msgID }); + } + }; + + private emitChange = (e: ChangeEvent) => { + this.emit("change", e); + }; + + private getAllFromLeader = async ({ + start_seq = 0, + noEmit, + }: { maxWait?: number; start_seq?: number; noEmit?: boolean } = {}) => { + if (this.leader) { + throw Error("this is the leader"); + } + // be agressive about initial retrying since the leader + // might just not be ready yet... but quickly back off. + // TODO: maybe we should add a primitive to the server + // that is client.waitUntilSubscriber('subject', {queue:?}) that + // waits until there is at least one subscribe to the given subject + // and only then sends a message. It would be doable, with a check + // each time the interest is updated. + let d = 250; + while (this.client != null) { + try { + const resp = await this.client.request(this.subject + ".all", { + start_seq, + }); + this.lastHeartbeat = Date.now(); + for (const x of resp.data) { + const raw = getRawMsg(new Message(x)); + if ( + !this.leader && + this.sessionId && + this.sessionId != raw.sessionId + ) { + await this.reset(); + return; + } else if (this.lastSeq && raw.seq > this.lastSeq + 1) { + await this.reconnect(); + return; + } else if (raw.seq <= this.lastSeq) { + // already saw this + continue; + } + if (!this.sessionId) { + this.sessionId = raw.sessionId; + } + this.lastSeq = raw.seq; + const mesg = raw.data; + this.messages.push(mesg); + this.raw.push(raw); + if (!noEmit) { + this.emitChange({ mesg, raw }); + } + } + return; + } catch (err) { + if (err.code == 503) { + // leader just isn't ready yet? + await delay(d); + d = Math.min(15000, d * 1.5); + continue; + } else { + throw err; + } + } + } + }; + + private serve = async () => { + if (this.client == null) { + throw Error("closed"); + } + this.sessionId = randomId(); + this.sendHeartbeats(); + this.server = await this.client.subscribe(this.subject + ".>"); + this.serveUntilDone(this.server); + }; + + private serveUntilDone = async (sub) => { + for await (const raw of sub) { + if (raw.subject.endsWith(".all")) { + // batch get + + const { start_seq = 0 } = raw.data ?? {}; + + // put exactly the entire data the client needs to get updated + // into a single payload + const payload = this.raw + .filter(({ seq }) => seq >= start_seq) + .map(({ headers, encoding, raw }) => { + return { headers, encoding, raw }; + }); + + // send it out as a single response. + raw.respond(payload); + } else if (raw.subject.endsWith(".send")) { + // single send: ([ ] TODO need to support a batch send?) + + const options = raw.headers?.[COCALC_OPTIONS_HEADER]; + let resp; + try { + resp = await this.sendAsLeader(raw.data, options); + } catch (err) { + raw.respond({ error: `${err}` }); + return; + } + raw.respond(resp); + } + } + }; + + private sendHeartbeats = async () => { + while (this.client != null) { + const now = Date.now(); + const wait = this.heartbeatInterval - (now - this.lastHeartbeat); + if (wait > 100) { + await delay(wait); + } else { + const now = Date.now(); + this.client.publish(this.subject, null, { + headers: { [COCALC_HEARTBEAT_HEADER]: true }, + }); + this.lastHeartbeat = now; + await delay(this.heartbeatInterval); + } + } + }; + + private heartbeatMonitor = async () => { + while (this.client != null) { + if (Date.now() - this.lastHeartbeat >= 2.1 * this.heartbeatInterval) { + try { + await this.reconnect(); + } catch {} + } + await delay(this.heartbeatInterval); + } + }; + + private listen = async () => { + if (this.client == null) { + return; + } + if (this.persist) { + this.listenLoopPersist(); + return; + } else { + this.sub = await this.client.subscribe(this.subject); + this.listenLoop(); + } + this.enforceLimits(); + }; + + private listenLoopPersist = async () => { + if (this.persistStream == null) { + throw Error("persistentStream must be defined"); + } + for await (const { data } of this.persistStream) { + this.processPersistentMessages(data, false); + } + }; + + private listenLoop = async () => { + if (this.sub == null) { + throw Error("subscription must be setup"); + } + for await (const raw0 of this.sub) { + if (!this.leader) { + this.lastHeartbeat = Date.now(); + } + if (raw0.data == null && raw0.headers?.[COCALC_HEARTBEAT_HEADER]) { + // it's a heartbeat probe + continue; + } + const raw = getRawMsg(raw0); + if (!this.leader && this.sessionId && this.sessionId != raw.sessionId) { + await this.reset(); + return; + } else if (!this.leader && this.lastSeq && raw.seq > this.lastSeq + 1) { + await this.reconnect(); + return; + } else if (raw.seq <= this.lastSeq) { + // already saw this + continue; + } + if (!this.sessionId) { + this.sessionId = raw.sessionId; + } + // move sequence number forward one and record the data + this.lastSeq = raw.seq; + const mesg = raw.data; + this.messages.push(mesg); + this.raw.push(raw); + this.lastSeq = raw.seq; + this.emitChange({ mesg, raw }); + } + }; + + private reconnect = reuseInFlight(async () => { + if (this.leader) { + // leader doesn't have a notion of reconnect -- it is the one that + // gets connected to + return; + } + // @ts-ignore + this.sub?.close(); + delete this.sub; + if (this.persist) { + await this.getAllFromPersist({ + start_seq: this.lastSeq + 1, + noEmit: false, + }); + } else { + await this.getAllFromLeader({ + start_seq: this.lastSeq + 1, + noEmit: false, + }); + } + this.listen(); + }); + + publish = async ( + mesg: T, + options?: { + // headers for this message + headers?: Headers; + // unique id for this message to dedup so if you send the same + // message more than once with the same id it doesn't get published + // multiple times. + msgID?: string; + // key -- if specified a key field is also stored on the server, + // and any previous messages with the same key are deleted. Also, + // an entry is set in this.kv[key] so that this.getKv(key), etc. work. + key?: string; + // if key is specified and previousSeq is set, the server throws + // an error if the sequence number of the current key is + // not previousSeq. We use this with this.seqKv(key) to + // provide read/change/write semantics and to know when we + // should resovle a merge conflict. This is ignored if + // key is not specified. + previousSeq?: number; + // if set to a number of ms AND the config option allow_msg_ttl + // is set on this persistent stream, then + // this message will be deleted after the given amount of time (in ms). + ttl?: number; + }, + ) => { + if (mesg === undefined) { + if (options?.key !== undefined) { + // undefined can't be JSON encoded, so we can't possibly represent it, and this + // *must* be treated as a delete. + this.deleteKv(options?.key, { previousSeq: options?.previousSeq }); + return; + } else { + throw Error("stream non-kv publish - mesg must not be 'undefined'"); + } + } + + const data = mesg; + + if (typeof mesg == "string" && !this.persist) { + // this may throw an exception preventing publishing. + enforceRateLimits({ + limits: { + max_msgs: -1, + max_age: 0, + max_bytes: -1, + max_msg_size: -1, + max_bytes_per_second: -1, + max_msgs_per_second: -1, + ...this.configOptions, + }, + bytesSent: this.bytesSent, + subject: this.subject, + bytes: mesg.length, + }); + } + if (this.persist) { + if (this.storage == null) { + throw Error("bug -- storage must be set"); + } + if (options?.msgID && this.msgIDs.has(options.msgID)) { + // it's a dup + return; + } + const md = messageData(mesg, { headers: options?.headers }); + const x = await persistClient.set({ + user: this.user, + storage: this.storage, + key: options?.key, + messageData: md, + previousSeq: options?.previousSeq, + msgID: options?.msgID, + ttl: options?.ttl, + }); + if (options?.msgID) { + this.msgIDs?.add(options.msgID); + } + return x; + } else if (this.leader) { + // sending from leader -- so assign seq, timestamp and send it out. + return await this.sendAsLeader(data, options); + } else { + const timeout = 15000; // todo + // sending as non-leader -- ask leader to send it. + let headers; + if (options != null && Object.keys(options).length > 0) { + headers = { [COCALC_OPTIONS_HEADER]: options }; + } else { + headers = undefined; + } + if (this.client == null) { + throw Error("closed"); + } + const resp = await this.client.request(this.subject + ".send", data, { + headers, + timeout, + }); + const r = resp.data; + if (r?.error) { + throw Error(r.error); + } + return resp; + } + }; + + private sendAsLeader = async (data, options?): Promise<{ seq: number }> => { + if (!this.leader) { + throw Error("must be the leader"); + } + const seq = this.ephemeralSeq; + this.ephemeralSeq += 1; + const f = (cb) => { + if (this.sendQueue == null) { + cb(); + return; + } + this.sendQueue.push({ data, options, seq, cb }); + this.processQueue(); + }; + await callback(f); + return { seq }; + }; + + private processQueue = reuseInFlight(async () => { + if (!this.leader) { + throw Error("must be the leader"); + } + if (this.sendQueue == null) { + return; + } + const { sessionId } = this; + while ( + (this.sendQueue?.length ?? 0) > 0 && + this.client != null && + this.sessionId == sessionId + ) { + const x = this.sendQueue.shift(); + if (x == null) { + continue; + } + const { data, options, seq, cb } = x; + if (options?.msgID && this.msgIDs.has(options?.msgID)) { + // it's a dup of one already successfully sent before -- dedup by ignoring. + cb(); + continue; + } + if (this.client == null) { + cb("closed"); + return; + } + const timestamp = Date.now(); + const headers = { + [COCALC_STREAM_HEADER]: { + seq, + timestamp, + sessionId: this.sessionId, + }, + [COCALC_MESSAGE_ID_HEADER]: options?.msgID, + } as any; + if (options?.headers) { + for (const k in options.headers) { + headers[k] = options.headers[k]; + } + } + // we publish it until we get it as a change event, and only + // then do we respond, being sure it was sent. + const now = Date.now(); + while (this.client != null && this.sessionId == sessionId) { + // critical to use publishSync here so that we are waiting + // for the "change" below *before* it happens. + this.client.publishSync(this.subject, data, { headers }); + const start = Date.now(); + let done = false; + try { + while ( + Date.now() - start <= PUBLISH_TIMEOUT && + this.sessionId == sessionId + ) { + const [{ raw }] = (await once(this, "change", PUBLISH_TIMEOUT)) as [ + ChangeEvent, + ]; + if (raw?.seq == seq) { + done = true; + break; + } + } + if (done && options?.msgID) { + this.msgIDs.add(options.msgID); + } + cb(done ? undefined : "timeout"); + break; + } catch (err) { + console.warn(`Error processing sendQueue -- ${err}`); + cb(`${err}`); + break; + } + } + if (now > this.lastHeartbeat) { + this.lastHeartbeat = now; + } + } + }); + + get = (n?): T | T[] => { + if (n == null) { + return this.getAll(); + } else { + return this.messages[n]; + } + }; + + seq = (n: number): number | undefined => { + return this.raw[n]?.seq; + }; + + getAll = (): T[] => { + return [...this.messages]; + }; + + get length(): number { + return this.messages.length; + } + + get start_seq(): number | undefined { + return this._start_seq; + } + + headers = (n: number): { [key: string]: any } | undefined => { + return this.raw[n]?.headers; + }; + + // key:value interface for subset of messages pushed with key option set. + // NOTE: This does NOT throw an error if our local seq is out of date (leave that + // to dkv built on this). + setKv = async ( + key: string, + mesg: T, + options?: { + headers?: Headers; + previousSeq?: number; + }, + ) => { + return await this.publish(mesg, { ...options, key }); + }; + + deleteKv = async ( + key: string, + options?: { + msgID?: string; + previousSeq?: number; + }, + ) => { + if (this.kv[key] === undefined) { + // nothing to do + return; + } + return await this.publish(null as any, { + ...options, + headers: { [COCALC_TOMBSTONE_HEADER]: true }, + key, + ttl: DEFAULT_TOMBSTONE_TTL, + }); + }; + + getKv = (key: string): T | undefined => { + return this.kv[key]?.mesg; + }; + + hasKv = (key: string): boolean => { + return this.kv?.[key] !== undefined; + }; + + getAllKv = (): { [key: string]: T } => { + const all: { [key: string]: T } = {}; + for (const key in this.kv) { + all[key] = this.kv[key].mesg; + } + return all; + }; + + seqKv = (key: string): number | undefined => { + return this.kv[key]?.raw.seq; + }; + + timeKv = (key?: string): Date | { [key: string]: Date } | undefined => { + if (key === undefined) { + const all: { [key: string]: Date } = {}; + for (const key in this.kv) { + all[key] = new Date(this.kv[key].raw.timestamp); + } + return all; + } + const r = this.kv[key]?.raw; + if (r == null) { + return; + } + return new Date(r.timestamp); + }; + + headersKv = (key: string): { [key: string]: any } | undefined => { + return this.kv[key]?.raw?.headers; + }; + + get lengthKv(): number { + return Object.keys(this.kv).length; + } + + // load older messages starting at start_seq up to the oldest message + // we currently have. + load = async ({ + start_seq, + noEmit, + }: { + start_seq: number; + noEmit?: boolean; + }) => { + if (this.persist) { + // This is used for loading more TimeTravel history + if (this.storage == null) { + throw Error("bug"); + } + // this is one before the oldest we have + const end_seq = (this.raw[0]?.seq ?? this._start_seq ?? 1) - 1; + if (start_seq > end_seq) { + // nothing to load + return; + } + // we're moving start_seq back to this point + this._start_seq = start_seq; + const { stream } = await persistClient.getAll({ + user: this.user, + storage: this.storage, + start_seq, + end_seq, + }); + for await (const { data } of stream) { + this.processPersistentMessages(data, noEmit); + } + return; + } + + // Ephemeral case below - lower priority since probably never used: + // [ ] TODO: this is NOT efficient - it just discards everything and starts over. + if (this._start_seq == null || this._start_seq <= 1 || this.leader) { + // we already loaded everything on initialization; there can't be anything older; + // or we are leader, so we are the full source of truth. + return; + } + const n = this.messages.length; + this.resetState(); + this._start_seq = start_seq; + this.lastSeq = start_seq - 1; + await this.reconnect(); + if (!noEmit) { + for (let i = 0; i < this.raw.length - n; i++) { + this.emitChange({ mesg: this.messages[i], raw: this.raw[i] }); + } + } + }; + + // get server assigned time of n-th message in stream + time = (n: number): Date | undefined => { + const r = this.raw[n]; + if (r == null) { + return; + } + return new Date(r.timestamp); + }; + + times = () => { + const v: (Date | undefined)[] = []; + for (let i = 0; i < this.length; i++) { + v.push(this.time(i)); + } + return v; + }; + + stats = ({ + start_seq = 1, + }: { + start_seq?: number; + } = {}): { count: number; bytes: number } | undefined => { + if (this.raw == null) { + return; + } + let count = 0; + let bytes = 0; + for (const { raw, seq } of this.raw) { + if (seq == null) { + continue; + } + if (seq < start_seq) { + continue; + } + count += 1; + bytes += raw.length; + } + return { count, bytes }; + }; + + // delete all messages up to and including the + // one at position index, i.e., this.messages[index] + // is deleted. + // NOTE: For ephemeral streams, clients will NOT see the result of a delete, + // except when they load the stream later. For persistent streams all + // **connected** clients will see the delete. THAT said, this is not a "proper" + // distributed computing primitive with tombstones, etc. This is primarily + // meant for reducing space usage, and shouldn't be relied on for + // any other purpose. + delete = async ({ + all, + last_index, + seq, + last_seq, + key, + }: { + // give exactly ONE parameter -- by default nothing happens with no params + all?: boolean; // delete everything + last_index?: number; // everything up to and including index'd message + seq?: number; // delete message with this sequence number + last_seq?: number; // delete everything up to and including this sequence number + key?: string; // delete the message with this key + } = {}): Promise<{ seqs: number[] }> => { + if (this.persist) { + let opts; + if (all) { + opts = { all: true }; + } else if (last_index != null) { + if (last_index >= this.raw.length) { + opts = { all: true }; + } else if (last_index < 0) { + return { seqs: [] }; + } else { + const last_seq = this.raw[last_index].seq; + if (last_seq === undefined) { + throw Error(`BUG: invalid index ${last_index}`); + } + opts = { last_seq }; + } + } else if (seq != null) { + opts = { seq }; + } else if (last_seq != null) { + opts = { last_seq }; + } else if (key != null) { + const seq = this.kv[key]?.raw?.seq; + if (seq === undefined) { + return { seqs: [] }; + } + opts = { seq }; + } + return await persistClient.deleteMessages({ + user: this.user, + storage: this.storage, + ...opts, + }); + } + if (seq != null || last_seq != null) { + throw Error( + "only deleting by last_index is supported for ephemeral streams", + ); + } + if (last_index == null) { + last_index = -1; + } + if (last_index >= this.raw.length - 1 || last_index == -1) { + last_index = this.raw.length - 1; + } + const seqs = this.raw.slice(0, last_index + 1).map((x) => x.seq); + this.messages.splice(0, last_index + 1); + this.raw.splice(0, last_index + 1); + return { seqs }; + }; + + private enforceLimitsNow = reuseInFlight(async () => { + if (this.persist) { + // this is done in persistent storage server side, not by our client. + return; + } + // ephemeral limits are enforced by all clients. + const index = enforceLimits({ + messages: this.messages, + // @ts-ignore [ ] TODO + raw: this.raw, + limits: { + max_msgs: -1, + max_age: 0, + max_bytes: -1, + max_msg_size: -1, + max_bytes_per_second: -1, + max_msgs_per_second: -1, + ...this.configOptions, + }, + }); + if (index > -1) { + try { + await this.delete({ last_index: index }); + } catch (err) { + if (err.code != "TIMEOUT") { + console.log(`WARNING: purging old messages - ${err}`); + } + } + } + }); + + private enforceLimits = throttle( + this.enforceLimitsNow, + ENFORCE_LIMITS_THROTTLE_MS, + { leading: false, trailing: true }, + ); + + // delete messages that are no longer needed since newer values have been written + gcKv = () => { + this.kvChangeBytes = 0; + for (let i = 0; i < this.raw.length; i++) { + const key = this.raw[i].key; + if (key !== undefined) { + if (this.raw[i].raw.length > 0 && this.raw[i] !== this.kv[key].raw) { + this.raw[i] = { + ...this.raw[i], + headers: undefined, + raw: Buffer.from(""), + } as RawMsg; + this.messages[i] = undefined as T; + } + } + } + }; +} + +export const cache = refCache({ + name: "core-stream", + createObject: async (options: CoreStreamOptions) => { + const estream = new CoreStream(options); + await estream.init(); + return estream; + }, + createKey: ({ client, ...options }) => { + return jsonStableStringify(options)!; + }, +}); +export async function cstream( + options: CoreStreamOptions, +): Promise> { + return await cache(options); +} + +function getRawMsg(raw: Message): RawMsg { + const { + seq = 0, + timestamp = 0, + sessionId = "", + } = (raw.headers?.[COCALC_STREAM_HEADER] ?? {}) as any; + if (!seq) { + throw Error("missing seq header"); + } + if (!timestamp) { + throw Error("missing timestamp header"); + } + // @ts-ignore + raw.seq = seq; + // @ts-ignore + raw.timestamp = timestamp; + // @ts-ignore + raw.sessionId = sessionId; + // @ts-ignore + return raw; +} diff --git a/src/packages/nats/sync/dko.ts b/src/packages/conat/sync/dko.ts similarity index 91% rename from src/packages/nats/sync/dko.ts rename to src/packages/conat/sync/dko.ts index 3247bd6a62..6116e832d7 100644 --- a/src/packages/nats/sync/dko.ts +++ b/src/packages/conat/sync/dko.ts @@ -9,23 +9,37 @@ DEVELOPMENT: ~/cocalc/src/packages/backend n Welcome to Node.js v18.17.1. Type ".help" for more information. -> t = await require("@cocalc/backend/nats/sync").dko({name:'test'}) +> t = await require("@cocalc/backend/conat/sync").dko({name:'test'}) */ import { EventEmitter } from "events"; import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; import { dkv as createDKV, DKV, DKVOptions } from "./dkv"; -import { userKvKey } from "./kv"; import { is_object } from "@cocalc/util/misc"; import refCache from "@cocalc/util/refcache"; -import { getEnv } from "@cocalc/nats/client"; +import { getEnv } from "@cocalc/conat/client"; +import jsonStableStringify from "json-stable-stringify"; + +export function userKvKey(options: DKVOptions) { + if (!options.name) { + throw Error("name must be specified"); + } + const { client, ...x } = options; + return jsonStableStringify(x)!; +} + + +interface DKOOptions extends DKVOptions { + // TODO + env?: any; +} export class DKO extends EventEmitter { opts: DKVOptions; dkv?: DKV; // can't type this - constructor(opts: DKVOptions) { + constructor(opts: DKOOptions) { super(); this.opts = opts; this.init(); @@ -223,7 +237,7 @@ export class DKO extends EventEmitter { }; } -export const cache = refCache({ +export const cache = refCache({ name: "dko", createKey: userKvKey, createObject: async (opts) => { @@ -243,6 +257,6 @@ function dkoPrefix(name: string): string { return `${DKO_PREFIX}${name}`; } -export async function dko(options: DKVOptions): Promise> { +export async function dko(options: DKOOptions): Promise> { return await cache(options); } diff --git a/src/packages/nats/sync/general-dkv.ts b/src/packages/conat/sync/dkv.ts similarity index 68% rename from src/packages/nats/sync/general-dkv.ts rename to src/packages/conat/sync/dkv.ts index b4b94d5c2d..a6c6ff4c84 100644 --- a/src/packages/nats/sync/general-dkv.ts +++ b/src/packages/conat/sync/dkv.ts @@ -1,16 +1,21 @@ /* Eventually Consistent Distributed Key:Value Store -- You give one or more subjects and this provides a synchronous eventually consistent - "multimaster" distributed way to work with the KV store of keys matching any of those subjects, +- You give one subject and general-dkv provides a synchronous eventually consistent + "multimaster" distributed way to work with the KV store of keys matching that subject, inside of the named KV store. -- You should define a 3-way merge function, which is used to automatically resolve all - conflicting writes. The default is to use our local version, i.e., "last write to remote wins". + +- You may define a 3-way merge function, which is used to automatically resolve all + conflicting writes. The default is to use our local version, i.e., "last write + to remote wins". The function is run locally so can have access to any state. + - All set/get/delete operations are synchronous. -- The state gets sync'd in the backend to NATS as soon as possible. + +- The state gets sync'd in the backend to persistent storage on Conat as soon as possible, + and there is an async save function. This class is based on top of the Consistent Centralized Key:Value Store defined in kv.ts. -You can use the same key:value store at the same time via both interfaces, and if store +You can use the same key:value store at the same time via both interfaces, and if the store is a DKV, you can also access the underlying KV via "store.kv". - You must explicitly call "await store.init()" to initialize this before using it. @@ -22,7 +27,7 @@ is a DKV, you can also access the underlying KV via "store.kv". - Use "store.set(key,value)" or "store.set({key:value, key2:value2, ...})" to set data, with the following semantics: - - in the background, changes propagate to NATS. You do not do anything explicitly and + - in the background, changes propagate to Conat. You do not do anything explicitly and this should never raise an exception. - you can call "store.hasUnsavedChanges()" to see if there are any unsaved changes. @@ -44,15 +49,14 @@ is a DKV, you can also access the underlying KV via "store.kv". DEVELOPMENT: -~/cocalc/src/packages/server$ node -Welcome to Node.js v18.17.1. -Type ".help" for more information. -> env = await require("@cocalc/backend/nats/env").getEnv(); a = require("@cocalc/nats/sync/dkv"); s = new a.DKV({name:'test',env,filter:['foo.>'],merge:({local,remote})=>{return {...remote,...local}}}); await s.init(); +~/cocalc/src/packages/backend$ node + +s = await require("@cocalc/backend/conat/sync").dkv({name:'test', merge:({local,remote})=>{return {...remote,...local}}}); In the browser console: -> s = await cc.client.nats_client.dkv({filter:['foo.>'],merge:({local,remote})=>{return {...remote,...local}}}) +> s = await cc.client.conat_client.dkv({filter:['foo.>'],merge:({local,remote})=>{return {...remote,...local}}}) # NOTE that the name is account-{account_id} or project-{project_id}, # and if not given defaults to the account-{user's account id} @@ -61,23 +65,21 @@ In the browser console: > s.on('change',(key)=>console.log(key));0; - -TODO: - - require not-everything subject or have an explicit size limit? - - some history would be VERY useful here due to the merge conflicts. - - for conflict resolution maybe instead of local and remote, just give - two values along with their assigned sequence numbers (?). I.e., something - where the resolution doesn't depend on where it is run. ? Or maybe this doesn't matter. */ import { EventEmitter } from "events"; -import { GeneralKV, type KVLimits } from "./general-kv"; +import { + CoreStream, + type Configuration, + type ChangeEvent, +} from "./core-stream"; import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import { type NatsEnv, type ValueType } from "@cocalc/nats/types"; import { isEqual } from "lodash"; import { delay } from "awaiting"; import { map as awaitMap } from "awaiting"; -import { getClient, type ClientWithState } from "@cocalc/nats/client"; +import type { Client, Headers } from "@cocalc/conat/core/client"; +import refCache from "@cocalc/util/refcache"; +import { type JSONValue } from "@cocalc/util/types"; export const TOMBSTONE = Symbol("tombstone"); const MAX_PARALLEL = 250; @@ -89,69 +91,91 @@ export type MergeFunction = (opts: { remote: any; }) => any; -interface Options { - headers?: { [name: string]: string | null }; +interface SetOptions { + headers?: Headers; } -export class GeneralDKV extends EventEmitter { - private kv?: GeneralKV; - private jc?; +export interface DKVOptions { + name: string; + account_id?: string; + project_id?: string; + desc?: JSONValue; + client?: Client; + // 3-way merge conflict resolution + merge?: (opts: { key: string; prev?: any; local?: any; remote?: any }) => any; + config?: Partial; + + // if noAutosave is set, local changes are never saved until you explicitly + // call "await this.save()", which will try once to save. Changes made during + // the save may not be saved though. + // CAUTION: noAutosave is really only meant for unit testing! The save is + // reuseInFlighted so a safe somewhere far away could be in progress starting + // before your call to save, and when it finishes that's it, so what you just + // did is not saved. Take care. + noAutosave?: boolean; + + noCache?: boolean; +} + +export class DKV extends EventEmitter { + private kv?: CoreStream; private merge?: MergeFunction; private local: { [key: string]: T | typeof TOMBSTONE } = {}; - private options: { [key: string]: Options } = {}; + private options: { [key: string]: SetOptions } = {}; private saved: { [key: string]: T | typeof TOMBSTONE } = {}; private changed: Set = new Set(); private noAutosave: boolean; - private client?: ClientWithState; - public readonly valueType: ValueType; public readonly name: string; - public readonly desc?: string; + public readonly desc?: JSONValue; constructor({ name, - env, - filter, + project_id, + account_id, + desc, + client, merge, - options, + config, noAutosave, - limits, - valueType, - desc, - }: { - name: string; - // used for log and error messages - desc: string; - env: NatsEnv; - // 3-way merge conflict resolution - merge?: (opts: { - key: string; - prev?: any; - local?: any; - remote?: any; - }) => any; - // filter: optionally restrict to subset of named kv store matching these subjects. - // NOTE: any key name that you *set or delete* must match one of these - filter: string | string[]; - limits?: KVLimits; - // if noAutosave is set, local changes are never saved until you explicitly - // call "await this.save()", which will try once to save. Changes made during - // the save may not be saved though. - noAutosave?: boolean; - options?; - valueType?: ValueType; - }) { + }: DKVOptions) { super(); this.name = name; this.desc = desc; this.merge = merge; this.noAutosave = !!noAutosave; - this.jc = env.jc; - this.valueType = valueType ?? "json"; - this.kv = new GeneralKV({ name, env, filter, options, limits, valueType }); - if (!noAutosave) { - this.client = getClient(); - this.client.on("connected", this.save); - } + this.kv = new CoreStream({ + name, + project_id, + account_id, + client, + config, + // we do not have any notion of ephemeral kv yet + persist: true, + }); + + return new Proxy(this, { + deleteProperty(target, prop) { + if (typeof prop == "string") { + target.delete(prop); + } + return true; + }, + set(target, prop, value) { + prop = String(prop); + if (prop == "_eventsCount" || prop == "_events" || prop == "close") { + target[prop] = value; + return true; + } + if (target[prop] != null) { + throw Error(`method name '${prop}' is read only`); + } + target.set(prop, value); + return true; + }, + get(target, prop) { + return target[String(prop)] ?? target.get(String(prop)); + }, + }); } init = reuseInFlight(async () => { @@ -160,6 +184,8 @@ export class GeneralDKV extends EventEmitter { } this.kv.on("change", this.handleRemoteChange); await this.kv.init(); + // allow_msg_ttl is used for deleting tombstones. + await this.kv.config({ allow_msg_ttl: true }); this.emit("connected"); }); @@ -176,7 +202,6 @@ export class GeneralDKV extends EventEmitter { `WARNING: unable to save some data when closing a general-dkv -- ${err}`, ); } - this.client?.removeListener("connected", this.save); } this.kv.close(); this.emit("closed"); @@ -208,7 +233,15 @@ export class GeneralDKV extends EventEmitter { return true; }; - private handleRemoteChange = ({ key, value: remote, prev }) => { + private handleRemoteChange = ({ + mesg: remote, + key, + prev, + }: ChangeEvent) => { + if (key === undefined) { + // not part of kv store data + return; + } const local = this.local[key] === TOMBSTONE ? undefined : this.local[key]; let value: any = remote; if (local !== undefined) { @@ -260,7 +293,6 @@ export class GeneralDKV extends EventEmitter { if (this.kv == null) { throw Error("closed"); } - this.assertValidKey(key); const local = this.local[key]; if (local === TOMBSTONE) { return undefined; @@ -268,7 +300,7 @@ export class GeneralDKV extends EventEmitter { if (local !== undefined) { return local; } - return this.kv.get(key); + return this.kv.getKv(key); }; get length(): number { @@ -280,7 +312,7 @@ export class GeneralDKV extends EventEmitter { if (this.kv == null) { throw Error("closed"); } - const x = { ...this.kv.getAll(), ...this.local }; + const x = { ...this.kv.getAllKv(), ...this.local }; for (const key in this.local) { if (this.local[key] === TOMBSTONE) { delete x[key]; @@ -300,21 +332,21 @@ export class GeneralDKV extends EventEmitter { if (a !== undefined) { return true; } - return this.kv.has(key); + return this.kv.hasKv(key); }; time = (key?: string): { [key: string]: Date } | Date | undefined => { if (this.kv == null) { throw Error("closed"); } - return this.kv.time(key); + return this.kv.timeKv(key); }; - private assertValidKey = (key): void => { + seq = (key: string): number | undefined => { if (this.kv == null) { throw Error("closed"); } - this.kv.assertValidKey(key); + return this.kv.seqKv(key); }; private _delete = (key) => { @@ -323,7 +355,6 @@ export class GeneralDKV extends EventEmitter { }; delete = (key) => { - this.assertValidKey(key); this._delete(key); if (!this.noAutosave) { this.save(); @@ -334,7 +365,7 @@ export class GeneralDKV extends EventEmitter { if (this.kv == null) { throw Error("closed"); } - for (const key in this.kv.getAll()) { + for (const key in this.kv.getAllKv()) { this._delete(key); } for (const key in this.local) { @@ -349,25 +380,18 @@ export class GeneralDKV extends EventEmitter { if (obj === undefined) { return TOMBSTONE; } - if (this.valueType == "binary") { - if (!ArrayBuffer.isView(obj)) { - throw Error("value must be an array buffer"); - } - return obj; - } - // It's EXTREMELY important that anything we save to NATS has the property that - // jc.decode(jc.encode(obj)) is the identity map. That is very much NOT - // the case for stuff that set gets called on, e.g., {a:new Date()}. - // Thus before storing it in in any way, we ensure this immediately: - return this.jc.decode(this.jc.encode(obj)); + return obj; }; - headers = (key: string): { [key: string]: string } | undefined => { - return this.kv?.headers(key); + headers = (key: string): Headers | undefined => { + if (this.options[key] != null) { + return this.options[key]?.headers; + } else { + return this.kv?.headersKv(key); + } }; - set = (key: string, value: T, options?: Options) => { - this.assertValidKey(key); + set = (key: string, value: T, options?: SetOptions) => { const obj = this.toValue(value); this.local[key] = obj; if (options != null) { @@ -381,7 +405,6 @@ export class GeneralDKV extends EventEmitter { setMany = (obj) => { for (const key in obj) { - this.assertValidKey(key); this.local[key] = this.toValue(obj[key]); this.changed.add(key); } @@ -397,7 +420,7 @@ export class GeneralDKV extends EventEmitter { return this.unsavedChanges().length > 0; }; - unsavedChanges = () => { + unsavedChanges = (): string[] => { return Object.keys(this.local).filter( (key) => this.local[key] !== this.saved[key], ); @@ -442,7 +465,7 @@ export class GeneralDKV extends EventEmitter { for (const key in obj) { if (obj[key] === TOMBSTONE) { status.unsaved += 1; - await this.kv.delete(key); + await this.kv.deleteKv(key); status.delete += 1; status.unsaved -= 1; delete obj[key]; @@ -452,45 +475,42 @@ export class GeneralDKV extends EventEmitter { } } } - const f = async (key) => { + const f = async (key: string) => { if (this.kv == null) { // closed return; } try { status.unsaved += 1; - await this.kv.set(key, obj[key] as T, this.options[key]); + const previousSeq = this.seq(key); + await this.kv.setKv(key, obj[key] as T, { + ...this.options[key], + previousSeq, + }); // console.log("kv store -- attemptToSave succeed", this.desc, { // key, // value: obj[key], // }); status.unsaved -= 1; status.set += 1; - if (!this.changed.has(key)) { - // successfully saved this and user didn't make a change *during* the set - this.discardLocalState(key); - } // note that we CANNOT call this.discardLocalState(key) here, because // this.get(key) needs to work immediately after save, but if this.local[key] // is deleted, then this.get(key) would be undefined, because - // this.kv.get(key) only has value in it once the value is + // this.kv.getKv(key) only has value in it once the value is // echoed back from the server. } catch (err) { // console.log("kv store -- attemptToSave failed", this.desc, err, { // key, // value: obj[key], // }); - if (err.code == "REJECT" && err.key) { - const value = this.local[err.key]; + if (err.code == "reject") { + const value = this.local[key]; // can never save this. - this.discardLocalState(err.key); + this.discardLocalState(key); status.unsaved -= 1; - this.emit("reject", { key: err.key, value }); + this.emit("reject", { key, value }); } - if ( - err.code == "10071" && - err.message.startsWith("wrong last sequence") - ) { + if (err.code == "wrong-last-sequence") { // this happens when another client has published a NEWER version of this key, // so the right thing is to just ignore this. In a moment there will be no // need to save anything, since we'll receive a message that overwrites this key. @@ -504,4 +524,27 @@ export class GeneralDKV extends EventEmitter { }); stats = () => this.kv?.stats(); + + // get or set config + config = async (config: Partial): Promise => { + if (this.kv == null) { + throw Error("not initialized"); + } + return await this.kv.config(config); + }; +} + +export const cache = refCache({ + name: "dkv", + createKey: ({ name, account_id, project_id }) => + JSON.stringify({ name, account_id, project_id }), + createObject: async (opts) => { + const k = new DKV(opts); + await k.init(); + return k; + }, +}); + +export async function dkv(options: DKVOptions): Promise> { + return await cache(options); } diff --git a/src/packages/nats/sync/dstream.ts b/src/packages/conat/sync/dstream.ts similarity index 69% rename from src/packages/nats/sync/dstream.ts rename to src/packages/conat/sync/dstream.ts index ef38a7a937..12b1c52967 100644 --- a/src/packages/nats/sync/dstream.ts +++ b/src/packages/conat/sync/dstream.ts @@ -1,98 +1,74 @@ /* -Eventually Consistent Distributed Event Stream +Eventually Consistent Distributed Message Stream DEVELOPMENT: # in node -- note the package directory!! -~/cocalc/src/packages/backend n -Welcome to Node.js v18.17.1. -Type ".help" for more information. - -> s = await require("@cocalc/backend/nats/sync").dstream({name:'test'}); - - -> s = await require("@cocalc/backend/nats/sync").dstream({project_id:cc.current().project_id,name:'foo'});0 +~/cocalc/src/packages/backend node +> s = await require("@cocalc/backend/conat/sync").dstream({name:'test'}); +> s = await require("@cocalc/backend/conat/sync").dstream({project_id:cc.current().project_id,name:'foo'});0 See the guide for dkv, since it's very similar, especially for use in a browser. - */ import { EventEmitter } from "events"; -import { - Stream, - type StreamOptions, - type UserStreamOptions, - userStreamOptionsKey, - last, -} from "./stream"; -import { EphemeralStream } from "./ephemeral-stream"; -import { jsName, streamSubject, randomId } from "@cocalc/nats/names"; +import { CoreStream, type RawMsg, type ChangeEvent } from "./core-stream"; +import { streamSubject, randomId } from "@cocalc/conat/names"; import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; import { delay } from "awaiting"; import { map as awaitMap } from "awaiting"; import { isNumericString } from "@cocalc/util/misc"; import refCache from "@cocalc/util/refcache"; -import { type JsMsg } from "@nats-io/jetstream"; -import { getEnv } from "@cocalc/nats/client"; -import { inventory, THROTTLE_MS, type Inventory } from "./inventory"; -import { asyncThrottle } from "@cocalc/util/async-utils"; -import { getClient, type ClientWithState } from "@cocalc/nats/client"; -import { encodeBase64 } from "@cocalc/nats/util"; -import { getLogger } from "@cocalc/nats/client"; -import { waitUntilConnected } from "@cocalc/nats/util"; -import { type Msg } from "@nats-io/nats-core"; -import { headersFromRawMessages } from "./stream"; -import { COCALC_MESSAGE_ID_HEADER } from "./ephemeral-stream"; - -const logger = getLogger("dstream"); - -const MAX_PARALLEL = 250; - -export interface DStreamOptions extends StreamOptions { +import { encodeBase64 } from "@cocalc/conat/util"; +import type { Client, Headers } from "@cocalc/conat/core/client"; +import jsonStableStringify from "json-stable-stringify"; +import type { JSONValue } from "@cocalc/util/types"; +import { type ValueType } from "@cocalc/conat/types"; +import { Configuration } from "./core-stream"; + +const MAX_PARALLEL = 50; + +export interface DStreamOptions { + // what it's called by us + name: string; + subject: string; + config?: Partial; + // only load historic messages starting at the given seq number. + start_seq?: number; + desc?: JSONValue; + valueType?: ValueType; + + client?: Client; noAutosave?: boolean; - noInventory?: boolean; ephemeral?: boolean; + + // only relevant for ephemeral, in which case this will only work when there + // is exactly ONE leader. leader?: boolean; } export class DStream extends EventEmitter { public readonly name: string; - private stream?: Stream | EphemeralStream; + private stream?: CoreStream; private messages: T[]; - private raw: (JsMsg | Msg)[][]; + private raw: RawMsg[]; private noAutosave: boolean; // TODO: using Map for these will be better because we use .length a bunch, which is O(n) instead of O(1). private local: { [id: string]: T } = {}; private publishOptions: { - [id: string]: { headers?: { [key: string]: string } }; + [id: string]: { headers?: Headers }; } = {}; private saved: { [seq: number]: T } = {}; - private opts; - private client?: ClientWithState; constructor(opts: DStreamOptions) { super(); - - if ( - opts.noInventory || - opts.ephemeral || - (process.env.COCALC_TEST_MODE && opts.noInventory == null) - ) { - // @ts-ignore - this.updateInventory = () => {}; - } - this.opts = opts; this.noAutosave = !!opts.noAutosave; this.name = opts.name; - this.stream = opts.ephemeral ? new EphemeralStream(opts) : new Stream(opts); + this.stream = new CoreStream({ ...opts, persist: !opts.ephemeral }); this.messages = this.stream.messages; this.raw = this.stream.raw; - if (!this.noAutosave) { - this.client = getClient(); - this.client.on("connected", this.save); - } return new Proxy(this, { get(target, prop) { return typeof prop == "string" && isNumericString(prop) @@ -106,16 +82,18 @@ export class DStream extends EventEmitter { if (this.stream == null) { throw Error("closed"); } - this.stream.on("change", (mesg: T, raw: JsMsg[]) => { - delete this.saved[last(raw).seq]; - const headers = headersFromRawMessages(raw); - if (headers?.[COCALC_MESSAGE_ID_HEADER]) { - // this is critical with ephemeral-stream.ts, since otherwise there is a moment + this.stream.on("change", ({ mesg, raw, msgID }: ChangeEvent) => { + if (raw?.seq !== undefined) { + delete this.saved[raw.seq]; + } + if (mesg === undefined) { + return; + } + if (msgID) { + // this is critical with core-stream.ts, since otherwise there is a moment // when the same message is in both this.local *and* this.messages, and you'll - // see it doubled in this.getAll(). I didn't see this ever with - // stream.ts, but maybe it is possible. It probably wouldn't impact any application, - // but still it would be a bug to not do this properly, which is what we do here. - delete this.local[headers[COCALC_MESSAGE_ID_HEADER]]; + // see it doubled in this.getAll(). + delete this.local[msgID]; } this.emit("change", mesg); if (this.isStable()) { @@ -128,7 +106,6 @@ export class DStream extends EventEmitter { }); await this.stream.init(); this.emit("connected"); - this.updateInventory(); }); isStable = () => { @@ -146,17 +123,17 @@ export class DStream extends EventEmitter { return; } if (!this.noAutosave) { - this.client?.removeListener("connected", this.save); try { await this.save(); } catch { // [ ] TODO: try localStorage or a file?! } } - this.stream.close(); + const stream = this.stream; + delete this.stream; + stream.close(); this.emit("closed"); this.removeAllListeners(); - delete this.stream; // @ts-ignore delete this.local; // @ts-ignore @@ -197,7 +174,7 @@ export class DStream extends EventEmitter { // sequence number of n-th message seq = (n: number): number | undefined => { if (n < this.raw.length) { - return last(this.raw[n])?.seq; + return this.raw[n].seq; } const v = Object.keys(this.saved); if (n < v.length + this.raw.length) { @@ -228,34 +205,20 @@ export class DStream extends EventEmitter { ); } - private toValue = (obj) => { - if (this.stream == null) { - throw Error("not initialized"); - } - if (this.stream.valueType == "binary") { - if (!ArrayBuffer.isView(obj)) { - throw Error("value must be an array buffer"); - } - return obj; - } - return this.opts.env.jc.decode(this.opts.env.jc.encode(obj)); - }; - publish = ( mesg: T, - // NOTE: if you call this.headers(n) it is NOT visible until the publish is confirmed. - // This could be changed with more work if it matters. - options?: { headers?: { [key: string]: string } }, + // NOTE: if you call this.headers(n) it is NOT visible until + // the publish is confirmed. This could be changed with more work if it matters. + options?: { headers?: Headers; ttl?: number }, ): void => { const id = randomId(); - this.local[id] = this.toValue(mesg); + this.local[id] = mesg; if (options != null) { this.publishOptions[id] = options; } if (!this.noAutosave) { this.save(); } - this.updateInventory(); }; headers = (n) => { @@ -294,10 +257,12 @@ export class DStream extends EventEmitter { } catch (err) { d = Math.min(10000, d * 1.3) + Math.random() * 100; await delay(d); - console.warn( - `WARNING stream attemptToSave failed -- ${err}`, - this.name, - ); + if (!process.env.COCALC_TEST_MODE) { + console.warn( + `WARNING stream attemptToSave failed -- ${err}`, + this.name, + ); + } } if (!this.hasUnsavedChanges()) { return; @@ -305,6 +270,8 @@ export class DStream extends EventEmitter { } }); + // [ ] TODO: make faster by saving everything as a single message + // -- ie a batch write. That said, in cocalc this case doesn't come up. private attemptToSave = reuseInFlight(async () => { const f = async (id) => { if (this.stream == null) { @@ -317,14 +284,17 @@ export class DStream extends EventEmitter { ...this.publishOptions[id], msgID: id, }); - if ((last(this.raw[this.raw.length - 1])?.seq ?? -1) < seq) { + if (this.raw == null) { + return; + } + if ((this.raw[this.raw.length - 1]?.seq ?? -1) < seq) { // it still isn't in this.raw this.saved[seq] = mesg; } delete this.local[id]; delete this.publishOptions[id]; } catch (err) { - if (err.code == "REJECT") { + if (err.code == "reject") { delete this.local[id]; // err has mesg and subject set. this.emit("reject", { err, mesg }); @@ -351,21 +321,30 @@ export class DStream extends EventEmitter { }; // this is not synchronous -- it makes sure everything is saved out, - // then purges the stream stored in nats. - // NOTE: other clients will NOT see the result of a purge (unless they reconnect). - purge = async (opts?) => { + // then delete the persistent stream + // NOTE: for ephemeral streams, other clients will NOT see the result of a purge (unless they reconnect). + delete = async (opts?) => { await this.save(); if (this.stream == null) { throw Error("not initialized"); } - await this.stream.purge(opts); + return await this.stream.delete(opts); }; get start_seq(): number | undefined { return this.stream?.start_seq; } - // returns largest sequence number known to this client. + // get or set config + config = async (config: Partial): Promise => { + if (this.stream == null) { + throw Error("not initialized"); + } + return await this.stream.config(config); + }; + + /* + // returns largest sequence number known to this client. // not optimized to be super fast. private getCurSeq = (): number | undefined => { let s = 0; @@ -381,6 +360,7 @@ export class DStream extends EventEmitter { return s ? s : undefined; }; + // [ ] TODO: this will be moved to persistence server, which is where it belongs. private updateInventory = asyncThrottle( async () => { if (this.stream == null || this.opts.noInventory) { @@ -400,7 +380,6 @@ export class DStream extends EventEmitter { return; } const { account_id, project_id, desc, limits } = this.opts; - await waitUntilConnected(); inv = await inventory({ account_id, project_id }); if (this.stream == null) { return; @@ -447,18 +426,37 @@ export class DStream extends EventEmitter { THROTTLE_MS, { leading: true, trailing: true }, ); + */ +} + +interface CreateOptions { + name: string; + account_id?: string; + project_id?: string; + config?: Partial; + start_seq?: number; + noCache?: boolean; + desc?: JSONValue; + valueType?: ValueType; + client?: Client; + noAutosave?: boolean; + noInventory?: boolean; + leader?: boolean; + ephemeral?: boolean; } -export const cache = refCache({ +export const cache = refCache({ name: "dstream", - createKey: userStreamOptionsKey, - createObject: async (options) => { - await waitUntilConnected(); - if (options.env == null) { - options.env = await getEnv(); + createKey: (options: CreateOptions) => { + if (!options.name) { + throw Error("name must be specified"); } + // @ts-ignore + const { env, client, ...x } = options; + return jsonStableStringify(x)!; + }, + createObject: async (options: CreateOptions) => { const { account_id, project_id, name, valueType = "json" } = options; - const jsname = jsName({ account_id, project_id }); const subjects = streamSubject({ account_id, project_id }); // **CRITICAL:** do NOT change how the filter is computed as a function @@ -469,23 +467,13 @@ export const cache = refCache({ const dstream = new DStream({ ...options, name, - jsname, - subjects, subject: filter, - filter, }); await dstream.init(); return dstream; }, }); -export async function dstream( - options: UserStreamOptions & { - noAutosave?: boolean; - noInventory?: boolean; - leader?: boolean; - ephemeral?: boolean; - }, -): Promise> { +export async function dstream(options: CreateOptions): Promise> { return await cache(options); } diff --git a/src/packages/nats/sync/inventory.ts b/src/packages/conat/sync/inventory.ts similarity index 95% rename from src/packages/nats/sync/inventory.ts rename to src/packages/conat/sync/inventory.ts index dcb4c3da68..c600ceb36b 100644 --- a/src/packages/nats/sync/inventory.ts +++ b/src/packages/conat/sync/inventory.ts @@ -3,7 +3,7 @@ Inventory of all streams and key:value stores in a specific project, account or DEVELOPMENT: -i = await require('@cocalc/backend/nats/sync').inventory({project_id:'00847397-d6a8-4cb0-96a8-6ef64ac3e6cf'}) +i = await require('@cocalc/backend/conat/sync').inventory({project_id:'00847397-d6a8-4cb0-96a8-6ef64ac3e6cf'}) i.ls() @@ -11,18 +11,18 @@ i.ls() import { dkv, type DKV } from "./dkv"; import { dstream, type DStream } from "./dstream"; -import getTime from "@cocalc/nats/time"; +import getTime from "@cocalc/conat/time"; import refCache from "@cocalc/util/refcache"; import type { JSONValue } from "@cocalc/util/types"; import { human_readable_size as humanReadableSize, trunc_middle, } from "@cocalc/util/misc"; -import type { ValueType } from "@cocalc/nats/types"; -import { type KVLimits } from "./general-kv"; -import { type FilteredStreamLimitOptions } from "./stream"; +import type { ValueType } from "@cocalc/conat/types"; +import { type KVLimits } from "./limits"; +import { type FilteredStreamLimitOptions } from "./limits"; import { DKO_PREFIX } from "./dko"; -import { waitUntilTimeAvailable } from "@cocalc/nats/time"; +import { waitUntilTimeAvailable } from "@cocalc/conat/time"; export const THROTTLE_MS = 10000; export const INVENTORY_NAME = "CoCalc-Inventory"; @@ -53,9 +53,9 @@ type StoreType = "kv" | "stream"; interface Item { // when it was created created: number; - // last time this kv-store was updated + // last time this kv-store or stream was updated last: number; - // how much space is used by this kv-store + // how much space is used by this kv-store or stream bytes: number; // number of keys or messages count: number; diff --git a/src/packages/conat/sync/limits.ts b/src/packages/conat/sync/limits.ts new file mode 100644 index 0000000000..779fd0619e --- /dev/null +++ b/src/packages/conat/sync/limits.ts @@ -0,0 +1,178 @@ +import type { RawMsg } from "./core-stream"; + +export const ENFORCE_LIMITS_THROTTLE_MS = process.env.COCALC_TEST_MODE + ? 100 + : 45000; + +class PublishRejectError extends Error { + code: string; + mesg: any; + subject?: string; + limit?: string; +} + +export interface FilteredStreamLimitOptions { + // How many messages may be in a Stream, oldest messages will be removed + // if the Stream exceeds this size. -1 for unlimited. + max_msgs: number; + // Maximum age of any message in the stream matching the filter, + // expressed in milliseconds. 0 for unlimited. + // **Note that max_age is in milliseoncds, NOT nanoseconds like in Nats!!!** + max_age: number; + // How big the Stream may be, when the combined stream size matching the filter + // exceeds this old messages are removed. -1 for unlimited. + // This is enforced only on write, so if you change it, it only applies + // to future messages. + max_bytes: number; + // The largest message that will be accepted by the Stream. -1 for unlimited. + max_msg_size: number; + + // Attempting to publish a message that causes this to be exceeded + // throws an exception instead. -1 (or 0) for unlimited + // For dstream, the messages are explicitly rejected and the client + // gets a "reject" event emitted. E.g., the terminal running in the project + // writes [...] when it gets these rejects, indicating that data was + // dropped. + max_bytes_per_second: number; + max_msgs_per_second: number; +} + +export interface KVLimits { + // How many keys may be in the KV store. Oldest keys will be removed + // if the key-value store exceeds this size. -1 for unlimited. + max_msgs: number; + + // Maximum age of any key, expressed in milliseconds. 0 for unlimited. + // Age is updated whenever value of the key is changed. + max_age: number; + + // The maximum number of bytes to store in this KV, which means + // the total of the bytes used to store everything. Since we store + // the key with each value (to have arbitrary keys), this includes + // the size of the keys. + max_bytes: number; + + // The maximum size of any single value, including the key. + max_msg_size: number; +} + +export function enforceLimits({ + messages, + raw, + limits, +}: { + messages: T[]; + raw: RawMsg[]; + limits: FilteredStreamLimitOptions; +}) { + const { max_msgs, max_age, max_bytes } = limits; + // we check with each defined limit if some old messages + // should be dropped, and if so move limit forward. If + // it is above -1 at the end, we do the drop. + let index = -1; + const setIndex = (i, _limit) => { + // console.log("setIndex", { i, _limit }); + index = Math.max(i, index); + }; + // max_msgs + // console.log({ max_msgs, l: messages.length, messages }); + if (max_msgs > -1 && messages.length > max_msgs) { + // ensure there are at most limits.max_msgs messages + // by deleting the oldest ones up to a specified point. + const i = messages.length - max_msgs; + if (i > 0) { + setIndex(i - 1, "max_msgs"); + } + } + + // max_age + if (max_age > 0) { + // expire messages older than max_age nanoseconds + const recent = raw[raw.length - 1]; + if (recent != null) { + // to avoid potential clock skew, we define *now* as the time of the most + // recent message. For us, this should be fine, since we only impose limits + // when writing new messages, and none of these limits are guaranteed. + const now = recent.timestamp; + if (now) { + const cutoff = now - max_age; + for (let i = raw.length - 1; i >= 0; i--) { + const t = raw[i].timestamp; + if (t < cutoff) { + // it just went over the limit. Everything before + // and including the i-th message must be deleted. + setIndex(i, "max_age"); + break; + } + } + } + } + } + + // max_bytes + if (max_bytes >= 0) { + let t = 0; + for (let i = raw.length - 1; i >= 0; i--) { + t += raw[i].data.length; + if (t > max_bytes) { + // it just went over the limit. Everything before + // and including the i-th message must be deleted. + setIndex(i, "max_bytes"); + break; + } + } + } + + return index; +} + +export function enforceRateLimits({ + limits, + bytesSent, + subject, + bytes, +}: { + limits: { max_bytes_per_second: number; max_msgs_per_second: number }; + bytesSent: { [time: number]: number }; + subject?: string; + bytes; +}) { + const now = Date.now(); + if (!(limits.max_bytes_per_second > 0) && !(limits.max_msgs_per_second > 0)) { + return; + } + + const cutoff = now - 1000; + let totalBytes = 0, + msgs = 0; + for (const t in bytesSent) { + if (parseInt(t) < cutoff) { + delete bytesSent[t]; + } else { + totalBytes += bytesSent[t]; + msgs += 1; + } + } + if ( + limits.max_bytes_per_second > 0 && + totalBytes + bytes > limits.max_bytes_per_second + ) { + const err = new PublishRejectError( + `bytes per second limit of ${limits.max_bytes_per_second} exceeded`, + ); + err.code = "REJECT"; + err.subject = subject; + err.limit = "max_bytes_per_second"; + throw err; + } + if (limits.max_msgs_per_second > 0 && msgs > limits.max_msgs_per_second) { + const err = new PublishRejectError( + `messages per second limit of ${limits.max_msgs_per_second} exceeded`, + ); + err.code = "REJECT"; + err.subject = subject; + err.limit = "max_msgs_per_second"; + throw err; + } + bytesSent[now] = bytes; +} diff --git a/src/packages/nats/sync/open-files.ts b/src/packages/conat/sync/open-files.ts similarity index 95% rename from src/packages/nats/sync/open-files.ts rename to src/packages/conat/sync/open-files.ts index 202b040fef..723eceff06 100644 --- a/src/packages/nats/sync/open-files.ts +++ b/src/packages/conat/sync/open-files.ts @@ -7,7 +7,7 @@ Change to packages/backend, since packages/nats doesn't have a way to connect: ~/cocalc/src/packages/backend$ node -> z = await require('@cocalc/backend/nats/sync').openFiles({project_id:cc.current().project_id}) +> z = await require('@cocalc/backend/conat/sync').openFiles({project_id:cc.current().project_id}) > z.touch({path:'a.txt'}) > z.get({path:'a.txt'}) { open: true, count: 1, time:2025-02-09T16:37:20.713Z } @@ -24,16 +24,15 @@ Change to packages/backend, since packages/nats doesn't have a way to connect: Frontend Dev in browser: -z = await cc.client.nats_client.openFiles({project_id:cc.current().project_id)) +z = await cc.client.conat_client.openFiles({project_id:cc.current().project_id)) z.getAll() } */ -import { type State } from "@cocalc/nats/types"; -import { dkv, type DKV } from "@cocalc/nats/sync/dkv"; +import { type State } from "@cocalc/conat/types"; +import { dkv, type DKV } from "@cocalc/conat/sync/dkv"; import { EventEmitter } from "events"; -import getTime, { getSkew } from "@cocalc/nats/time"; -import { getEnv } from "@cocalc/nats/client"; +import getTime, { getSkew } from "@cocalc/conat/time"; // info about interest in open files (and also what was explicitly deleted) older // than this is automatically purged. @@ -166,8 +165,7 @@ export class OpenFiles extends EventEmitter { const d = await dkv({ name: "open-files", project_id: this.project_id, - env: await getEnv(), - limits: { + config: { max_age: MAX_AGE_MS, }, noAutosave: this.noAutosave, diff --git a/src/packages/nats/sync/pubsub.ts b/src/packages/conat/sync/pubsub.ts similarity index 71% rename from src/packages/nats/sync/pubsub.ts rename to src/packages/conat/sync/pubsub.ts index cbaeab8baa..11e95e7fbe 100644 --- a/src/packages/nats/sync/pubsub.ts +++ b/src/packages/conat/sync/pubsub.ts @@ -1,12 +1,14 @@ /* Use NATS simple pub/sub to share state for something *ephemeral* in a project. + +This is used, e.g., for broadcasting a user's cursors when they are editing a file. */ -import { projectSubject } from "@cocalc/nats/names"; -import { type NatsEnv, State } from "@cocalc/nats/types"; +import { projectSubject } from "@cocalc/conat/names"; +import { type NatsEnv, State } from "@cocalc/conat/types"; import { EventEmitter } from "events"; -import { isConnectedSync } from "@cocalc/nats/util"; -import { type Subscription } from "@nats-io/nats-core"; +import { isConnectedSync } from "@cocalc/conat/util"; +import { type Subscription } from "@cocalc/conat/core/client"; export class PubSub extends EventEmitter { private subject: string; @@ -56,14 +58,14 @@ export class PubSub extends EventEmitter { // when disconnected, all state is dropped return; } - this.env.nc.publish(this.subject, this.env.jc.encode(obj)); + this.env.cn.publish(this.subject, obj); }; private subscribe = async () => { - this.sub = this.env.nc.subscribe(this.subject); + this.sub = await this.env.cn.subscribe(this.subject); this.setState("connected"); for await (const mesg of this.sub) { - this.emit("change", this.env.jc.decode(mesg.data)); + this.emit("change", mesg.data); } }; } diff --git a/src/packages/nats/sync/syncdoc-info.ts b/src/packages/conat/sync/syncdoc-info.ts similarity index 96% rename from src/packages/nats/sync/syncdoc-info.ts rename to src/packages/conat/sync/syncdoc-info.ts index 042c4d4ee9..c4c0966762 100644 --- a/src/packages/nats/sync/syncdoc-info.ts +++ b/src/packages/conat/sync/syncdoc-info.ts @@ -12,7 +12,7 @@ export async function getSyncDocType({ let syncdocs; try { const string_id = client_db.sha1(project_id, path); - syncdocs = await client.synctable_nats( + syncdocs = await client.synctable_conat( { syncstrings: [{ project_id, path, string_id, doctype: null }] }, { stream: false, diff --git a/src/packages/nats/sync/synctable-kv.ts b/src/packages/conat/sync/synctable-kv.ts similarity index 90% rename from src/packages/nats/sync/synctable-kv.ts rename to src/packages/conat/sync/synctable-kv.ts index e7ae924309..3a54e0f402 100644 --- a/src/packages/nats/sync/synctable-kv.ts +++ b/src/packages/conat/sync/synctable-kv.ts @@ -7,16 +7,16 @@ import { keys } from "lodash"; import { client_db } from "@cocalc/util/db-schema/client-db"; -import type { NatsEnv, State } from "@cocalc/nats/types"; +import type { NatsEnv, State } from "@cocalc/conat/types"; import { EventEmitter } from "events"; import { dkv as createDkv, type DKV } from "./dkv"; import { dko as createDko, type DKO } from "./dko"; import jsonStableStringify from "json-stable-stringify"; -import { toKey } from "@cocalc/nats/util"; +import { toKey } from "@cocalc/conat/util"; import { wait } from "@cocalc/util/async-wait"; import { fromJS, Map } from "immutable"; -import { type KVLimits } from "./general-kv"; import type { JSONValue } from "@cocalc/util/types"; +import type { Configuration } from "@cocalc/conat/sync/core-stream"; export class SyncTableKV extends EventEmitter { public readonly table; @@ -29,9 +29,8 @@ export class SyncTableKV extends EventEmitter { private dkv?: DKV | DKO; private env; private getHook: Function; - private limits?: Partial; + private config?: Partial; private desc?: JSONValue; - private noInventory?: boolean; constructor({ query, @@ -40,9 +39,8 @@ export class SyncTableKV extends EventEmitter { project_id, atomic, immutable, - limits, + config, desc, - noInventory, }: { query; env: NatsEnv; @@ -50,17 +48,15 @@ export class SyncTableKV extends EventEmitter { project_id?: string; atomic?: boolean; immutable?: boolean; - limits?: Partial; + config?: Partial; desc?: JSONValue; - noInventory?: boolean; }) { super(); - this.noInventory = noInventory; this.setMaxListeners(100); this.atomic = !!atomic; this.getHook = immutable ? fromJS : (x) => x; this.query = query; - this.limits = limits; + this.config = config; this.env = env; this.desc = desc; this.table = keys(query)[0]; @@ -121,20 +117,16 @@ export class SyncTableKV extends EventEmitter { name, account_id: this.account_id, project_id: this.project_id, - env: this.env, - limits: this.limits, + config: this.config, desc: this.desc, - noInventory: this.noInventory, }); } else { this.dkv = await createDko({ name, account_id: this.account_id, project_id: this.project_id, - env: this.env, - limits: this.limits, + config: this.config, desc: this.desc, - noInventory: this.noInventory, }); } // For some reason this one line confuses typescript and break building the compute server package (nothing else similar happens). @@ -151,7 +143,7 @@ export class SyncTableKV extends EventEmitter { } } // change api was to emit array of keys. - // We also use this packages/sync/table/changefeed-nats.ts which needs the value, + // We also use this packages/sync/table/changefeed-conat.ts which needs the value, // so we emit that object second. this.emit("change", [x.key], x); }); diff --git a/src/packages/nats/sync/synctable-stream.ts b/src/packages/conat/sync/synctable-stream.ts similarity index 89% rename from src/packages/nats/sync/synctable-stream.ts rename to src/packages/conat/sync/synctable-stream.ts index a810af96fb..371e74f4ef 100644 --- a/src/packages/nats/sync/synctable-stream.ts +++ b/src/packages/conat/sync/synctable-stream.ts @@ -13,10 +13,9 @@ import { keys } from "lodash"; import { cmp_Date, is_array, isValidUUID } from "@cocalc/util/misc"; import { client_db } from "@cocalc/util/db-schema/client-db"; import { EventEmitter } from "events"; -import { type NatsEnv } from "@cocalc/nats/types"; import { dstream, DStream } from "./dstream"; import { fromJS, Map } from "immutable"; -import { type FilteredStreamLimitOptions } from "./stream"; +import type { Configuration } from "@cocalc/conat/sync/core-stream"; export type State = "disconnected" | "connected" | "closed"; @@ -38,38 +37,38 @@ export class SyncTableStream extends EventEmitter { private string_id: string; private data: any = {}; private state: State = "disconnected"; - private env; private dstream?: DStream; private getHook: Function; - private limits?: Partial; + private config?: Partial; private start_seq?: number; private noInventory?: boolean; + private ephemeral?: boolean; constructor({ query, - env, account_id: _account_id, project_id, immutable, - limits, + config, start_seq, noInventory, + ephemeral, }: { query; - env: NatsEnv; account_id?: string; project_id?: string; immutable?: boolean; - limits?: Partial; + config?: Partial; start_seq?: number; noInventory?: boolean; + ephemeral?: boolean; }) { super(); this.noInventory = noInventory; + this.ephemeral = ephemeral; this.setMaxListeners(100); this.getHook = immutable ? fromJS : (x) => x; - this.env = env; - this.limits = limits; + this.config = config; this.start_seq = start_seq; const table = keys(query)[0]; this.table = table; @@ -96,19 +95,21 @@ export class SyncTableStream extends EventEmitter { this.dstream = await dstream({ name, project_id: this.project_id, - env: this.env, - limits: this.limits, + config: this.config, desc: { path: this.path }, start_seq: this.start_seq, noInventory: this.noInventory, - // ephemeral: true, - // leader: typeof navigator == "undefined", + ephemeral: this.ephemeral, + // ephemeral only supported for synctable when one synctable opened + // in project/compute-server and all others in browser; this is, + // of course our model for terminals, jupyter, etc. + leader: this.ephemeral && typeof navigator == "undefined", }); this.dstream.on("change", (mesg) => { this.handle(mesg, true); }); this.dstream.on("reject", (err) => { - console.warn("synctable-stream: REJECTED - ", err); + console.warn("synctable-stream: rejected - ", err); }); for (const mesg of this.dstream.getAll()) { this.handle(mesg, false); diff --git a/src/packages/nats/sync/synctable.ts b/src/packages/conat/sync/synctable.ts similarity index 73% rename from src/packages/nats/sync/synctable.ts rename to src/packages/conat/sync/synctable.ts index bf819c3813..47d06c1d61 100644 --- a/src/packages/nats/sync/synctable.ts +++ b/src/packages/conat/sync/synctable.ts @@ -1,9 +1,9 @@ -import { type NatsEnv } from "@cocalc/nats/types"; +import { type NatsEnv } from "@cocalc/conat/types"; import { SyncTableKV } from "./synctable-kv"; import { SyncTableStream } from "./synctable-stream"; import { refCacheSync } from "@cocalc/util/refcache"; -import { type KVLimits } from "./general-kv"; -import { type FilteredStreamLimitOptions } from "./stream"; +import { type KVLimits } from "./limits"; +import { type FilteredStreamLimitOptions } from "./limits"; import jsonStableStringify from "json-stable-stringify"; export type NatsSyncTable = SyncTableStream | SyncTableKV; @@ -22,9 +22,9 @@ export type NatsSyncTableFunction = ( }, ) => Promise; -// When the database is watching tables for changefeeds, if it doesn't get a clear expression -// of interest from a client every this much time, it stops managing the changefeed to -// save resources. +// When the database is watching tables for changefeeds, if it doesn't +// get a clear expression of interest from a client every this much time, +// it stops managing the changefeed to save resources. export const CHANGEFEED_INTEREST_PERIOD_MS = 120000; // export const CHANGEFEED_INTEREST_PERIOD_MS = 3000; @@ -42,18 +42,17 @@ interface Options { desc?: any; start_seq?: number; noInventory?: boolean; -} - -function createObject(options: Options) { - if (options.stream) { - return new SyncTableStream(options); - } else { - return new SyncTableKV(options); - } + ephemeral?: boolean; } export const createSyncTable = refCacheSync({ name: "synctable", - createKey: (opts) => jsonStableStringify({ ...opts, env: undefined }), - createObject, + createKey: (opts) => jsonStableStringify({ ...opts, env: undefined })!, + createObject: (options: Options) => { + if (options.stream) { + return new SyncTableStream(options); + } else { + return new SyncTableKV(options); + } + }, }); diff --git a/src/packages/nats/time.ts b/src/packages/conat/time.ts similarity index 91% rename from src/packages/nats/time.ts rename to src/packages/conat/time.ts index 901bd9c74a..3e98622fc4 100644 --- a/src/packages/nats/time.ts +++ b/src/packages/conat/time.ts @@ -17,15 +17,15 @@ In unit testing mode this just falls back to Date.now(). DEVELOPMENT: -See src/packages/backend/nats/test/time.test.ts for relevant unit test, though +See src/packages/backend/conat/test/time.test.ts for relevant unit test, though in test mode this is basically disabled. -Also do this, noting the directory and import of @cocalc/backend/nats. +Also do this, noting the directory and import of @cocalc/backend/conat. ~/cocalc/src/packages/backend$ node Welcome to Node.js v18.17.1. Type ".help" for more information. -> a = require('@cocalc/nats/time'); require('@cocalc/backend/nats') +> a = require('@cocalc/conat/time'); require('@cocalc/backend/conat') { getEnv: [Getter], getConnection: [Function: debounced], @@ -37,11 +37,10 @@ Type ".help" for more information. */ -import { timeClient } from "@cocalc/nats/service/time"; +import { timeClient } from "@cocalc/conat/service/time"; import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import { getClient } from "@cocalc/nats/client"; +import { getClient } from "@cocalc/conat/client"; import { delay } from "awaiting"; -import { waitUntilConnected } from "./util"; // we use exponential backoff starting with a short interval // then making it longer @@ -97,7 +96,6 @@ export const getSkew = reuseInFlight(async (): Promise => { return skew; } try { - await waitUntilConnected(); const start = Date.now(); const client = getClient(); const tc = timeClient(client); diff --git a/src/packages/nats/tsconfig.json b/src/packages/conat/tsconfig.json similarity index 100% rename from src/packages/nats/tsconfig.json rename to src/packages/conat/tsconfig.json diff --git a/src/packages/nats/types.ts b/src/packages/conat/types.ts similarity index 55% rename from src/packages/nats/types.ts rename to src/packages/conat/types.ts index ba0d00b915..d69eefc984 100644 --- a/src/packages/nats/types.ts +++ b/src/packages/conat/types.ts @@ -1,18 +1,14 @@ -import type { NatsConnection as NatsConnection0 } from "@nats-io/nats-core"; -import type { EventEmitter } from "events"; export type ValueType = "json" | "binary"; +import { type Client as ConatClient } from "@cocalc/conat/core/client"; -export type NatsConnection = NatsConnection0 & - Partial & { - getProjectPermissions?: () => Promise; - getConnectionInfo?: Function; - addProjectPermissions: (project_ids: string[]) => Promise; - }; +export type NatsConnection = any; export interface NatsEnv { // nats connection, but frontend extends it to be an EventEmitter nc: NatsConnection; jc; // jsoncodec + + cn: ConatClient; } export type State = "disconnected" | "connected" | "closed"; diff --git a/src/packages/conat/util.ts b/src/packages/conat/util.ts new file mode 100644 index 0000000000..69778b90aa --- /dev/null +++ b/src/packages/conat/util.ts @@ -0,0 +1,106 @@ +import jsonStableStringify from "json-stable-stringify"; +import { encode as encodeBase64, decode as decodeBase64 } from "js-base64"; +export { encodeBase64, decodeBase64 }; +import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; +import { getConnection, getConnectionSync } from "./client"; + +export function handleErrorMessage(mesg) { + if (mesg?.error) { + if (mesg.error.startsWith("Error: ")) { + throw Error(mesg.error.slice("Error: ".length)); + } else { + throw Error(mesg.error); + } + } + return mesg; +} + +// Returns true if the subject matches the NATS pattern. +export function matchesPattern({ + pattern, + subject, +}: { + pattern: string; + subject: string; +}): boolean { + const subParts = subject.split("."); + const patParts = pattern.split("."); + let i = 0, + j = 0; + while (i < subParts.length && j < patParts.length) { + if (patParts[j] === ">") return true; + if (patParts[j] !== "*" && patParts[j] !== subParts[i]) return false; + i++; + j++; + } + + return i === subParts.length && j === patParts.length; +} + +// Return true if the subject is a valid NATS subject. +// Returns true if the subject is a valid NATS subject (UTF-8 aware) +export function isValidSubject(subject: string): boolean { + if (typeof subject !== "string" || subject.length === 0) return false; + if (subject.startsWith(".") || subject.endsWith(".")) return false; + const tokens = subject.split("."); + // No empty tokens + if (tokens.some((t) => t.length === 0)) return false; + for (let i = 0; i < tokens.length; ++i) { + const tok = tokens[i]; + // ">" is only allowed as last token + if (tok === ">" && i !== tokens.length - 1) return false; + // "*" and ">" are allowed as sole tokens + if (tok !== "*" && tok !== ">") { + // Must not contain "." or any whitespace Unicode code point + if (/[.\s]/u.test(tok)) { + return false; + } + } + // All tokens: must not contain whitespace (unicode aware) + if (/\s/u.test(tok)) { + return false; + } + // Allow any UTF-8 (unicode) chars except dot and whitespace in tokens. + } + return true; +} + +export function isValidSubjectWithoutWildcards(subject: string): boolean { + return ( + isValidSubject(subject) && !subject.includes("*") && !subject.endsWith(">") + ); +} + +export function toKey(x): string | undefined { + if (x === undefined) { + return undefined; + } else if (typeof x === "object") { + return jsonStableStringify(x); + } else { + return `${x}`; + } +} + +// returns false if not connected or there is no connection yet. +export function isConnectedSync(): boolean { + const nc = getConnectionSync(); + // @ts-ignore + return !!nc?.protocol?.connected; +} + +export async function isConnected(nc?): Promise { + nc = nc ?? (await getConnection()); + // At least if this changes, things will be so broken, we'll quickly notice, hopefully. + // @ts-ignore + return !!nc.protocol?.connected; +} + +// Returns the max payload size for messages for the NATS server +// that we are connected to. This is used for chunking by the kv +// and stream to support arbitrarily large values. +export const getMaxPayload = reuseInFlight(async () => { + // [ ] TODO + return 1e6; +}); + +export const waitUntilConnected = reuseInFlight(async () => {}); diff --git a/src/packages/database/nats/changefeed-api.ts b/src/packages/database/conat/changefeed-api.ts similarity index 53% rename from src/packages/database/nats/changefeed-api.ts rename to src/packages/database/conat/changefeed-api.ts index 3fe737a01f..ebda84cac7 100644 --- a/src/packages/database/nats/changefeed-api.ts +++ b/src/packages/database/conat/changefeed-api.ts @@ -2,19 +2,19 @@ DEVELOPMENT: -Turn off nats-server handling for the hub for changefeeds by sending this message from a browser as an admin: +Turn off conat-server handling for the hub for changefeeds by sending this message from a browser as an admin: - await cc.client.nats_client.hub.system.terminate({service:'changefeeds'}) + await cc.client.conat_client.hub.system.terminate({service:'changefeeds'}) In a node session: DEBUG=cocalc*changefeed* DEBUG_CONSOLE=yes node - require('@cocalc/backend/nats'); require('@cocalc/database/nats/changefeed-api').init() + require('@cocalc/backend/conat'); require('@cocalc/database/conat/changefeed-api').init() In another session: - require('@cocalc/backend/nats'); c = require('@cocalc/nats/changefeed/client'); + require('@cocalc/backend/conat'); c = require('@cocalc/conat/changefeed/client'); account_id = '6aae57c6-08f1-4bb5-848b-3ceb53e61ede'; cf = await c.changefeed({account_id,query:{accounts:[{account_id, first_name:null}]}, heartbeat:5000, lifetime:30000}); @@ -25,9 +25,9 @@ In another session: await c.renew({account_id, id}) */ -import { init as initChangefeedServer } from "@cocalc/nats/changefeed/server"; +import { init as initChangefeedServer } from "@cocalc/conat/changefeed/server"; import { db } from "@cocalc/database"; -import "@cocalc/backend/nats"; +import "@cocalc/backend/conat"; export function init() { initChangefeedServer(db); diff --git a/src/packages/database/nats/leak-search.ts b/src/packages/database/conat/leak-search.ts similarity index 97% rename from src/packages/database/nats/leak-search.ts rename to src/packages/database/conat/leak-search.ts index a90687e142..842208efc6 100644 --- a/src/packages/database/nats/leak-search.ts +++ b/src/packages/database/conat/leak-search.ts @@ -12,7 +12,7 @@ ACCOUNT_ID="6aae57c6-08f1-4bb5-848b-3ceb53e61ede" DEBUG=cocalc:* DEBUG_CONSOLE=y Then do this - a = require('@cocalc/database/nats/leak-search') + a = require('@cocalc/database/conat/leak-search') await a.testQueryOnly(50) await a.testChangefeed(50) diff --git a/src/packages/database/jest.config.js b/src/packages/database/jest.config.js index 763f4887ba..b23c3ab539 100644 --- a/src/packages/database/jest.config.js +++ b/src/packages/database/jest.config.js @@ -4,4 +4,12 @@ module.exports = { testEnvironment: "node", setupFiles: ["./test/setup.js"], // Path to your setup file testMatch: ["**/?(*.)+(spec|test).ts?(x)"], + transform: { + ".*\\.tsx?$": [ + "ts-jest", + { + isolatedModules: true, + }, + ], + }, }; diff --git a/src/packages/database/nats/changefeeds.ts b/src/packages/database/nats/changefeeds.ts deleted file mode 100644 index 00e458a0af..0000000000 --- a/src/packages/database/nats/changefeeds.ts +++ /dev/null @@ -1,566 +0,0 @@ -/* - -What this does: - -A backend server gets a request that a given changefeed (e.g., "messages" or -"projects" for a given user) needs to be managed. For a while, the server will -watch the datϨabase and put entries in a NATS jetstream kv that represents the -data. The browser also periodically pings the backend saying "I'm still -interested in this changefeed" and the backend server keeps up watching postgres -for changes. When the user is gone for long enough (5 minutes?) the backend -stops watching and just leaves the data as is in NATS. - -When the user comes back, they immediately get the last version of the data -straight from NATS, and their browser says "I'm interested in this changefeed". -The changefeed then gets updated (hopefully 1-2 seconds later) and periodically -updated after that. - - -DEVELOPMENT: - -1. turn off nats-server handling for the hub by sending this message from a browser as an admin: - - await cc.client.nats_client.hub.system.terminate({service:'db'}) - -2. Run this line in nodejs right here: - -DEBUG_CONSOLE=yes DEBUG=cocalc:debug:database:nats:changefeeds - - require("@cocalc/database/nats/changefeeds").init() - - -*/ - -import getLogger from "@cocalc/backend/logger"; -import { JSONCodec } from "nats"; -import userQuery from "@cocalc/database/user-query"; -import { getConnection } from "@cocalc/backend/nats"; -import { getUserId } from "@cocalc/nats/hub-api"; -import { callback } from "awaiting"; -import { db } from "@cocalc/database"; -import { - createSyncTable, - CHANGEFEED_INTEREST_PERIOD_MS as CHANGEFEED_INTEREST_PERIOD_MS_USERS, -} from "@cocalc/nats/sync/synctable"; -import { sha1 } from "@cocalc/backend/misc_node"; -import jsonStableStringify from "json-stable-stringify"; -import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import { uuid } from "@cocalc/util/misc"; -import { delay } from "awaiting"; -import { Svcm } from "@nats-io/services"; -import { Coordinator, now } from "./coordinator"; -import { numSubscriptions } from "@cocalc/nats/client"; - -const logger = getLogger("database:nats:changefeeds"); - -const jc = JSONCodec(); - -// How long until the manager's lock on changefeed expires. -// It's good for this to be SHORT, since if a hub-database -// terminates badly (without calling terminate explicitly), then -// nothing else will take over until after this lock expires. -// It's good for this to be LONG, since it reduces load on the system. -// That said, if hubs are killed properly, they release their -// locks on exit. -const LOCK_TIMEOUT_MS = 90000; - -const MAX_MANAGER_CONFLICTS = parseInt( - process.env.COCALC_MAX_MANAGER_CONFLICTS ?? "9999", -); - -// This is a limit on the numChangefeedsBeingCreatedAtOnce: -const PARALLEL_LIMIT = parseInt(process.env.COCALC_PARALLEL_LIMIT ?? "15"); - -const CHANGEFEED_INTEREST_PERIOD_MS = parseInt( - process.env.COCALC_CHANGEFEED_INTEREST_PERIOD_MS ?? `${CHANGEFEED_INTEREST_PERIOD_MS_USERS}`, -); - -export async function init() { - if (process.env.COCALC_TERMINATE_CHANGEFEEDS_ON_EXIT) { - setupExitHandler(); - } - while (true) { - if (terminated) { - return; - } - try { - await mainLoop(); - } catch (err) { - logger.debug(`error running mainLoop -- ${err}`); - } - await delay(15000); - } -} - -let api: any | null = null; -let coordinator: null | Coordinator = null; -async function mainLoop() { - if (terminated) { - return; - } - const subject = "hub.*.*.db"; - logger.debug(`init -- subject='${subject}', options=`); - coordinator = new Coordinator({ timeout: LOCK_TIMEOUT_MS }); - await coordinator.init(); - const nc = await getConnection(); - - // @ts-ignore - const svcm = new Svcm(nc); - - const service = await svcm.add({ - name: "db-server", - version: "0.2.0", - description: "CoCalc Database Service (changefeeds)", - queue: "0", - }); - - api = service.addEndpoint("api", { subject }); - - try { - for await (const mesg of api) { - await handleRequest({ mesg, nc }); - } - } finally { - cancelAllChangefeeds(); - try { - await coordinator?.close(); - } catch (err) { - logger.debug("error closing coordinator", err); - } - coordinator = null; - } -} - -// try very hard to call terminate properly, so can locks are freed -// and clients don't have to wait for the locks to expire. -function setupExitHandler() { - async function exitHandler(evtOrExitCodeOrError: number | string | Error) { - try { - await terminate(); - } catch (e) { - console.error("EXIT HANDLER ERROR", e); - } - - process.exit(isNaN(+evtOrExitCodeOrError) ? 1 : +evtOrExitCodeOrError); - } - [ - "beforeExit", - "uncaughtException", - "unhandledRejection", - "SIGHUP", - "SIGINT", - "SIGQUIT", - "SIGILL", - "SIGTRAP", - "SIGABRT", - "SIGBUS", - "SIGFPE", - "SIGUSR1", - "SIGSEGV", - "SIGUSR2", - "SIGTERM", - "exit", - ].forEach((evt) => process.on(evt, exitHandler)); -} - -let terminated = false; -export async function terminate() { - if (terminated) { - return; - } - console.log("changefeeds: TERMINATE"); - logger.debug("terminating service"); - terminated = true; - api?.stop(); - api = null; - cancelAllChangefeeds(); - if (coordinator != null) { - console.log("about to try to async save"); - await coordinator.save(); - console.log(coordinator?.dkv?.hasUnsavedChanges()); - await coordinator?.close(); - console.log("coordinator successfully saved"); - coordinator = null; - } -} - -let numRequestsAtOnce = 0; -let numChangefeedsBeingCreatedAtOnce = 0; -async function handleRequest({ mesg, nc }) { - let resp; - try { - numRequestsAtOnce += 1; - logger.debug("handleRequest", { - numRequestsAtOnce, - numSubscriptions: numSubscriptions(), - numChangefeedsBeingCreatedAtOnce, - numChangefeedsBeingManaging: Object.keys(changefeedHashes).length, - numCanceledSinceStart, - }); - const { account_id, project_id } = getUserId(mesg.subject); - const { name, args } = jc.decode(mesg.data) ?? ({} as any); - //console.log(`got request: "${JSON.stringify({ name, args })}"`); - // logger.debug(`got request: "${JSON.stringify({ name, args })}"`); - if (!name) { - throw Error("api endpoint name must be given in message"); - } - // logger.debug("handling server='db' request:", { - // account_id, - // project_id, - // name, - // }); - resp = await getResponse({ - name, - args, - account_id, - project_id, - nc, - }); - } catch (err) { - logger.debug(`ERROR -- ${err}`); - resp = { error: `${err}` }; - } finally { - numRequestsAtOnce -= 1; - } - // logger.debug(`Responding with "${JSON.stringify(resp)}"`); - mesg.respond(jc.encode(resp)); -} - -async function getResponse({ name, args, account_id, project_id, nc }) { - if (name == "userQuery") { - const opts = { ...args[0], account_id, project_id }; - if (!opts.changes) { - // a normal query - console.log("doing normal userQuery", opts); - return await userQuery(opts); - } else { - return await createChangefeed(opts, nc); - } - } else { - throw Error(`name='${name}' not implemented`); - } -} - -function queryTable(query) { - return Object.keys(query)[0]; -} - -// changefeedHashes maps changes (database changefeed id) to hash -const changefeedHashes: { [id: string]: string } = {}; -// changefeedChanges maps hash to changes. -const changefeedChanges: { [hash: string]: string } = {}; -// changefeedInterest maps hash to time -const changefeedInterest: { [hash: string]: number } = {}; -// changefeedSynctables maps hash to SyncTable -const changefeedSynctables: { [hash: string]: any } = {}; -const changefeedManagerConflicts: { [id: string]: number } = {}; - -let numCanceledSinceStart = 0; -async function cancelChangefeed({ - hash, - changes, -}: { - hash?: string; - changes?: string; -}) { - logger.debug("cancelChangefeed", { changes, hash }); - numCanceledSinceStart += 1; - if (changes && !hash) { - hash = changefeedHashes[changes]; - } else if (hash && !changes) { - changes = changefeedChanges[hash]; - } else { - // nothing - return; - } - if (!hash || !changes) { - // already canceled - return; - } - coordinator?.unlock(hash); - const synctable = changefeedSynctables[hash]; - delete changefeedSynctables[hash]; - delete changefeedInterest[hash]; - delete changefeedHashes[changes]; - delete changefeedChanges[hash]; - delete changefeedManagerConflicts[hash]; - db().user_query_cancel_changefeed({ id: changes }); - if (synctable) { - try { - await synctable.close(); - } catch (err) { - logger.debug(`WARNING: error closing changefeed synctable -- ${err}`, { - hash, - }); - } - } -} - -function cancelAllChangefeeds() { - logger.debug("cancelAllChangefeeds"); - for (const changes in changefeedHashes) { - cancelChangefeed({ changes }); - } -} - -// This is tricky. We return the first result as a normal -// async function, but then handle (and don't return) -// the subsequent calls to cb generated by the changefeed. -const createChangefeed = reuseInFlight( - async (opts, nc) => { - const query = opts.query; - // the query *AND* the user making it define the thing: - const user = { account_id: opts.account_id, project_id: opts.project_id }; - const desc = jsonStableStringify({ - query, - ...user, - }); - const hash = sha1(desc); - if (coordinator == null) { - logger.debug("coordinator is not defined"); - return; - } - - // ALWAYS update that a user is interested in this changefeed - coordinator.updateUserInterest(hash); - - const manager = coordinator.getManagerId(hash); - logger.debug("createChangefeed -- considering: ", { - table: queryTable(query), - hash, - managerId: coordinator.managerId, - manager, - }); - if (manager && coordinator.managerId != manager) { - logger.debug(`somebody else ${manager} is the manager`, { hash }); - if (changefeedInterest[hash]) { - changefeedManagerConflicts[hash] = - (changefeedManagerConflicts[hash] ?? 0) + 1; - logger.debug( - `both us (${coordinator.managerId}) and ${manager} we are also managing changefeed`, - { - hash, - count: changefeedManagerConflicts[hash], - max: MAX_MANAGER_CONFLICTS, - }, - ); - if (changefeedManagerConflicts[hash] >= MAX_MANAGER_CONFLICTS) { - cancelChangefeed({ hash }); - } - return; - } - return; - } - // take it - coordinator.lock(hash); - - if (changefeedInterest[hash]) { - changefeedInterest[hash] = now(); - logger.debug("use existing changefeed", { - hash, - table: queryTable(query), - user, - }); - } else { - // we create new changefeeed but do NOT block on this. While creating this - // if user calls again then changefeedInterest[hash] is set, so it'll just - // use the existing changefeed (the case above). If things eventually go awry, - // changefeedInterest[hash] gets cleared. - createNewChangefeed({ query, user, nc, opts, hash }); - } - }, - { createKey: (args) => jsonStableStringify(args[0])! }, -); - -const createNewChangefeed = async ({ query, user, nc, opts, hash }) => { - logger.debug("create new changefeed", queryTable(query), user); - changefeedInterest[hash] = now(); - const changes = uuid(); - changefeedHashes[changes] = hash; - changefeedChanges[hash] = changes; - logger.debug( - "managing ", - Object.keys(changefeedHashes).length, - "changefeeds", - ); - const env = { nc, jc, sha1 }; - - let done = false; - // we start watching state immediately and updating it, since if it - // takes a while to setup the feed, we don't want somebody else to - // steal it. - const watchManagerState = async () => { - while (!done && changefeedInterest[hash]) { - await delay(LOCK_TIMEOUT_MS / 1.5); - if (done) { - return; - } - if (coordinator == null) { - done = true; - return; - } - const manager = coordinator.getManagerId(hash); - if (manager != coordinator.managerId) { - // we are no longer the manager - cancelChangefeed({ changes }); - done = true; - return; - } - // update the lock - coordinator.lock(hash); - } - }; - watchManagerState(); - - // If you change any settings below (i.e., atomic or immutable), you might also have to change them in - // src/packages/sync/table/changefeed-nats.ts - const synctable = createSyncTable({ - query, - env, - account_id: opts.account_id, - project_id: opts.project_id, - // atomic = false is just way too slow due to the huge number of distinct - // messages, which NATS is not as good with. - atomic: true, - immutable: false, - }); - changefeedSynctables[hash] = synctable; - - // before doing the HARD WORK, we wait until there aren't too many - // other "threads" doing hard work: - while (numChangefeedsBeingCreatedAtOnce >= PARALLEL_LIMIT) { - // TODO: This is STUPID - await delay(25); - } - try { - numChangefeedsBeingCreatedAtOnce += 1; - try { - await synctable.init(); - logger.debug("successfully created synctable", queryTable(query), user); - } catch (err) { - logger.debug(`Error initializing changefeed -- ${err}`, { hash }); - cancelChangefeed({ changes }); - } - - const handleFirst = ({ cb, err, rows }) => { - if (err || rows == null) { - cb(err ?? "missing result"); - return; - } - try { - if (synctable.get_state() != "connected") { - cb("not connected"); - return; - } - const current = synctable.get(); - const databaseKeys = new Set(); - for (const obj of rows) { - databaseKeys.add(synctable.getKey(obj)); - synctable.set(obj); - } - for (const key in current) { - if (!databaseKeys.has(key)) { - // console.log("remove from synctable", key); - synctable.delete(key); - } - } - cb(); - } catch (err) { - logger.debug(`Error handling first changefeed output -- ${err}`, { - hash, - }); - cb(err); - } - }; - - const handleUpdate = ({ action, new_val, old_val }) => { - // action = 'insert', 'update', 'delete', 'close' - // e.g., {"action":"insert","new_val":{"title":"testingxxxxx","project_id":"81e0c408-ac65-4114-bad5-5f4b6539bd0e"}} - const obj = new_val ?? old_val; - if (obj == null) { - // nothing we can do with this - return; - } - if (action == "insert" || action == "update") { - const cur = synctable.get(new_val); - // logger.debug({ table: queryTable(query), action, new_val, old_val }); - synctable.set({ ...cur, ...new_val }); - } else if (action == "delete") { - synctable.delete(old_val); - } else if (action == "close") { - cancelChangefeed({ changes }); - } - }; - - const f = (cb) => { - let first = true; - db().user_query({ - ...opts, - changes, - cb: (err, x) => { - if (first) { - first = false; - handleFirst({ cb, err, rows: x?.[synctable.table] }); - return; - } - try { - handleUpdate(x as any); - } catch (err) { - logger.debug(`Error handling update: ${err}`, { hash }); - cancelChangefeed({ changes }); - } - }, - }); - }; - try { - await callback(f); - // it's running successfully - changefeedInterest[hash] = now(); - - const watchUserInterest = async () => { - logger.debug("watchUserInterest", { hash }); - // it's all setup and running. If there's no interest for a while, stop watching - while (!done && changefeedInterest[hash]) { - await delay(CHANGEFEED_INTEREST_PERIOD_MS); - if (done) { - break; - } - if ( - now() - changefeedInterest[hash] > - CHANGEFEED_INTEREST_PERIOD_MS - ) { - logger.debug("watchUserInterest: no local interest", { - hash, - }); - // we check both the local known interest *AND* interest recorded - // by any other servers! - const last = coordinator?.getUserInterest(hash) ?? 0; - if (now() - last >= CHANGEFEED_INTEREST_PERIOD_MS) { - logger.debug("watchUserInterest: no interest, canceling", { - hash, - }); - cancelChangefeed({ changes }); - done = true; - break; - } - } - } - logger.debug("watchUserInterest: stopped watching since done", { - hash, - }); - }; - // do not block on this. - watchUserInterest(); - } catch (err) { - // if anything goes wrong, make sure we don't think the changefeed is working. - cancelChangefeed({ changes }); - logger.debug( - `WARNING: error creating changefeed -- ${err}`, - queryTable(query), - user, - ); - } - } finally { - numChangefeedsBeingCreatedAtOnce -= 1; - } -}; diff --git a/src/packages/database/nats/coordinator.ts b/src/packages/database/nats/coordinator.ts deleted file mode 100644 index 4f5e2864be..0000000000 --- a/src/packages/database/nats/coordinator.ts +++ /dev/null @@ -1,167 +0,0 @@ -/* -This is for managing who is responsible for each changefeed. - -It stores: - -- for each changefeed id, the managerId of who is manging it -- for each manager id, time when it last checked in - -The manger checks in 2.5x every timeout period. -If a manger doesn't check in for the entire timeout period, then -they are considered gone. - -DEVELOPMENT: - -c = await require('@cocalc/database/nats/coordinator').coordinator({timeout:10000}) - -*/ - -import { dkv, type DKV } from "@cocalc/backend/nats/sync"; -import { randomId } from "@cocalc/nats/names"; -import getTime from "@cocalc/nats/time"; - -interface Entry { - // last time user expressed interest in this changefeed - user?: number; - // manager of this changefeed. - managerId?: string; - // last time manager updated lock on this changefeed - lock?: number; -} - -function mergeTime( - a: number | undefined, - b: number | undefined, -): number | undefined { - // time of interest should clearly always be the largest known value so far. - if (a == null && b == null) { - return undefined; - } - return Math.max(a ?? 0, b ?? 0); -} - -// TODO: note -- local or remote may be null -- fix this! -function resolveMergeConflict(local?: Entry, remote?: Entry): Entry { - const user = mergeTime(remote?.user, local?.user); - let managerId = local?.managerId ?? remote?.managerId; - if ( - local?.managerId && - remote?.managerId && - local.managerId != remote.managerId - ) { - // conflicting manager - winner is one with newest lock. - if ((local.lock ?? 0) > (remote.lock ?? 0)) { - managerId = local.managerId; - } else { - managerId = remote.managerId; - } - } - const lock = mergeTime(remote?.lock, local?.lock); - return { user, lock, managerId }; -} - -export const now = () => getTime({ noError: true }); - -const LIMITS = { - // discard any keys that are 15 minutes old -- the lock and user interest - // updates are much more frequently than this, but this keeps memory usage down. - max_age: 1000 * 60 * 15, -}; - -export async function coordinator(opts) { - const C = new Coordinator(opts); - await C.init(); - return C; -} - -export class Coordinator { - public readonly managerId: string; - public dkv?: DKV; - - // if a manager hasn't update that it is managing this changefeed for timeout ms, then - // the lock is relinquished. - public readonly timeout: number; - - constructor({ timeout }: { timeout: number }) { - this.managerId = randomId(); - this.timeout = timeout; - } - - init = async () => { - this.dkv = await dkv({ - name: "changefeed-manager", - limits: LIMITS, - merge: ({ local, remote }) => resolveMergeConflict(local, remote), - }); - }; - - save = async () => { - await this.dkv?.save(); - }; - - close = async () => { - await this.dkv?.close(); - delete this.dkv; - }; - - getManagerId = (id: string): string | undefined => { - if (this.dkv == null) { - throw Error("coordinator is closed"); - } - const cur = this.dkv.get(id); - if (cur == null) { - return; - } - const { managerId, lock } = cur; - if (!managerId || !lock) { - return undefined; - } - if (lock < now() - this.timeout) { - // lock is too old - return undefined; - } - return managerId; - }; - - // update that this manager has the lock on this changefeed. - lock = (id: string) => { - if (this.dkv == null) { - throw Error("coordinator is closed"); - } - this.dkv.set(id, { - ...this.dkv.get(id), - lock: now(), - managerId: this.managerId, - }); - }; - - // ensure that this manager no longer has the lock - unlock = (id: string) => { - if (this.dkv == null) { - throw Error("coordinator is closed"); - } - const x: Entry = this.dkv.get(id) ?? {}; - if (x.managerId == this.managerId) { - // we are the manager - this.dkv.set(id, { ...x, lock: 0, managerId: "" }); - return; - } - }; - - // user expresses interest in changefeed with given id, - // which we may or may not be the manager of. - updateUserInterest = (id: string) => { - if (this.dkv == null) { - throw Error("coordinator is closed"); - } - this.dkv.set(id, { ...this.dkv.get(id), user: now() }); - }; - - getUserInterest = (id: string): number => { - if (this.dkv == null) { - throw Error("coordinator is closed"); - } - const { user } = this.dkv.get(id) ?? {}; - return user ?? 0; - }; -} diff --git a/src/packages/database/package.json b/src/packages/database/package.json index c5c02e9e5f..483e5eaf7b 100644 --- a/src/packages/database/package.json +++ b/src/packages/database/package.json @@ -5,7 +5,7 @@ "exports": { ".": "./dist/index.js", "./accounts/*": "./dist/accounts/*.js", - "./nats/*": "./dist/nats/*.js", + "./conat/*": "./dist/conat/*.js", "./pool": "./dist/pool/index.js", "./pool/*": "./dist/pool/*.js", "./postgres/*": "./dist/postgres/*.js", @@ -19,10 +19,9 @@ }, "dependencies": { "@cocalc/backend": "workspace:*", + "@cocalc/conat": "workspace:*", "@cocalc/database": "workspace:*", - "@cocalc/nats": "workspace:*", "@cocalc/util": "workspace:*", - "@nats-io/services": "3.0.0", "async": "^1.5.2", "awaiting": "^3.0.0", "debug": "^4.4.0", @@ -30,7 +29,6 @@ "json-stable-stringify": "^1.0.1", "lodash": "^4.17.21", "lru-cache": "^7.18.3", - "nats": "^2.29.3", "node-fetch": "2.6.7", "pg": "^8.7.1", "random-key": "^0.3.2", @@ -40,8 +38,8 @@ "validator": "^13.6.0" }, "devDependencies": { - "@types/node": "^18.16.14", "@types/lodash": "^4.14.202", + "@types/node": "^18.16.14", "@types/pg": "^8.6.1", "@types/uuid": "^8.3.1", "coffeescript": "^2.5.1" @@ -59,7 +57,10 @@ "url": "https://github.com/sagemathinc/cocalc" }, "homepage": "https://github.com/sagemathinc/cocalc", - "keywords": ["postgresql", "cocalc"], + "keywords": [ + "postgresql", + "cocalc" + ], "author": "SageMath, Inc.", "license": "SEE LICENSE.md", "bugs": { diff --git a/src/packages/database/tsconfig.json b/src/packages/database/tsconfig.json index a9234ba729..0121dd9584 100644 --- a/src/packages/database/tsconfig.json +++ b/src/packages/database/tsconfig.json @@ -8,7 +8,7 @@ "exclude": ["node_modules", "dist"], "references": [ { "path": "../backend" }, - { "path": "../nats" }, + { "path": "../conat" }, { "path": "../util" } ] } diff --git a/src/packages/file-server/package.json b/src/packages/file-server/package.json index 0665a655dc..5eac8e7af5 100644 --- a/src/packages/file-server/package.json +++ b/src/packages/file-server/package.json @@ -12,21 +12,30 @@ "tsc": "pnpm exec tsc --watch --pretty --preserveWatchOutput", "test": "pnpm exec jest --runInBand" }, - "files": ["dist/**", "README.md", "package.json"], + "files": [ + "dist/**", + "README.md", + "package.json" + ], "author": "SageMath, Inc.", - "keywords": ["utilities", "nats", "cocalc"], + "keywords": [ + "utilities", + "btrfs", + "zfs", + "cocalc" + ], "license": "SEE LICENSE.md", "dependencies": { "@cocalc/backend": "workspace:*", + "@cocalc/conat": "workspace:*", "@cocalc/file-server": "workspace:*", - "@cocalc/nats": "workspace:*", "@cocalc/util": "workspace:*", "awaiting": "^3.0.0", - "better-sqlite3": "^11.8.1", + "better-sqlite3": "^11.10.0", "lodash": "^4.17.21" }, "devDependencies": { - "@types/better-sqlite3": "^7.6.12", + "@types/better-sqlite3": "^7.6.13", "@types/lodash": "^4.14.202" }, "repository": { diff --git a/src/packages/file-server/tsconfig.json b/src/packages/file-server/tsconfig.json index 9d5e2b1337..c2fcccc371 100644 --- a/src/packages/file-server/tsconfig.json +++ b/src/packages/file-server/tsconfig.json @@ -5,5 +5,5 @@ "outDir": "dist" }, "exclude": ["node_modules", "dist", "test"], - "references": [{ "path": "../util", "path": "../nats", "path": "../backend" }] + "references": [{ "path": "../util", "path": "../conat", "path": "../backend" }] } diff --git a/src/packages/file-server/zfs/names.ts b/src/packages/file-server/zfs/names.ts index aee7b41cd5..a9f5ff7571 100644 --- a/src/packages/file-server/zfs/names.ts +++ b/src/packages/file-server/zfs/names.ts @@ -1,7 +1,7 @@ import { join } from "path"; import { context } from "./config"; import { primaryKey, type PrimaryKey } from "./types"; -import { randomId } from "@cocalc/nats/names"; +import { randomId } from "@cocalc/conat/names"; export function databaseFilename(data: string) { return join(data, "database.sqlite3"); diff --git a/src/packages/frontend/app/connection-indicator.tsx b/src/packages/frontend/app/connection-indicator.tsx index 5a8360d374..52b0a77854 100644 --- a/src/packages/frontend/app/connection-indicator.tsx +++ b/src/packages/frontend/app/connection-indicator.tsx @@ -44,9 +44,9 @@ export const ConnectionIndicator: React.FC = React.memo( const hub_status = useTypedRedux("page", "connection_status"); const mesg_info = useTypedRedux("account", "mesg_info"); const actions = useActions("page"); - const nats = useTypedRedux("page", "nats"); - const nats_status = nats?.get("state") ?? "disconnected"; - const connection_status = worst(hub_status, nats_status); + const conat = useTypedRedux("page", "conat"); + const conatState = conat?.get("state") ?? "disconnected"; + const connection_status = worst(hub_status, conatState); const connecting_style: CSS = { flex: "1", diff --git a/src/packages/frontend/app/connection-info.tsx b/src/packages/frontend/app/connection-info.tsx index 5276951bef..be29abc2ef 100644 --- a/src/packages/frontend/app/connection-info.tsx +++ b/src/packages/frontend/app/connection-info.tsx @@ -5,7 +5,6 @@ import { Modal } from "antd"; import { FormattedMessage, useIntl } from "react-intl"; -import { A } from "@cocalc/frontend/components/A"; import { Button, Col, Row } from "@cocalc/frontend/antd-bootstrap"; import { React, @@ -25,7 +24,7 @@ export const ConnectionInfo: React.FC = React.memo(() => { const status = useTypedRedux("page", "connection_status"); const hub = useTypedRedux("account", "hub"); const page_actions = useActions("page"); - const nats = useTypedRedux("page", "nats"); + const conat = useTypedRedux("page", "conat"); function close() { page_actions.show_connection(false); @@ -58,7 +57,7 @@ export const ConnectionInfo: React.FC = React.memo(() => {

@@ -79,14 +78,12 @@ export const ConnectionInfo: React.FC = React.memo(() => { ) : undefined} -

- NATS client -

+

Conat client

- {nats != null && ( + {conat != null && (
-                {JSON.stringify(nats.toJS(), undefined, 2)
+                {JSON.stringify(conat.toJS(), undefined, 2)
                   .replace(/{|}|,|\"/g, "")
                   .trim()
                   .replace("  data:", "data:")}
diff --git a/src/packages/frontend/app/localize.tsx b/src/packages/frontend/app/localize.tsx
index e9f5c9a12b..ec768293e8 100644
--- a/src/packages/frontend/app/localize.tsx
+++ b/src/packages/frontend/app/localize.tsx
@@ -57,7 +57,8 @@ export function Localize({ children }: { children: React.ReactNode }) {
     // NOTE: the locale will be set from the other_settings, on the "page".
     // So, for the default (english) we always have to render it, and then, maybe, a locale is set...
     if (locale === DEFAULT_LOCALE) {
-      return children;
+      // we are explicitly returning as any since ts is suddenly complaining about a potential bigint
+      return children as any;
     } else {
       if (isEmpty(messages)) {
         return (
@@ -68,7 +69,7 @@ export function Localize({ children }: { children: React.ReactNode }) {
           />
         );
       } else {
-        return children;
+        return children as any;
       }
     }
   }
diff --git a/src/packages/frontend/app/store.ts b/src/packages/frontend/app/store.ts
index ffa1a3e131..0157c3bba7 100644
--- a/src/packages/frontend/app/store.ts
+++ b/src/packages/frontend/app/store.ts
@@ -6,6 +6,7 @@
 import { redux, Store, TypedMap } from "@cocalc/frontend/app-framework";
 import target from "@cocalc/frontend/client/handle-target";
 import { parse_target } from "../history";
+import type { ConatConnectionStatus } from "@cocalc/frontend/conat/client";
 
 type TopTab =
   | "about" // the "/help" page
@@ -52,16 +53,7 @@ export interface PageState {
   };
 
   settingsModal?: string;
-  nats?: TypedMap<{
-    state: ConnectionStatus;
-    data: {
-      inBytes?: number;
-      inMsgs?: number;
-      outBytes?: number;
-      outMsgs?: number;
-    };
-    numConnections: number;
-  }>;
+  conat?: TypedMap;
 }
 
 export class PageStore extends Store {}
diff --git a/src/packages/frontend/client/account.ts b/src/packages/frontend/client/account.ts
index 5f8bc43b25..2253c4764e 100644
--- a/src/packages/frontend/client/account.ts
+++ b/src/packages/frontend/client/account.ts
@@ -181,6 +181,6 @@ export class AccountClient {
     id?: number;
     expire?: Date;
   }): Promise {
-    return await this.client.nats_client.hub.system.manageApiKeys(opts);
+    return await this.client.conat_client.hub.system.manageApiKeys(opts);
   }
 }
diff --git a/src/packages/frontend/client/admin.ts b/src/packages/frontend/client/admin.ts
index 00a30d7315..f99cb9d43b 100644
--- a/src/packages/frontend/client/admin.ts
+++ b/src/packages/frontend/client/admin.ts
@@ -37,7 +37,7 @@ export class AdminClient {
   }
 
   public async get_user_auth_token(user_account_id: string): Promise {
-    return await this.client.nats_client.hub.system.generateUserAuthToken({
+    return await this.client.conat_client.hub.system.generateUserAuthToken({
       user_account_id,
     });
   }
diff --git a/src/packages/frontend/client/client.ts b/src/packages/frontend/client/client.ts
index fbaba53a21..24f6239f65 100644
--- a/src/packages/frontend/client/client.ts
+++ b/src/packages/frontend/client/client.ts
@@ -21,7 +21,7 @@ import { SyncClient } from "@cocalc/sync/client/sync-client";
 import { UsersClient } from "./users";
 import { FileClient } from "./file";
 import { TrackingClient } from "./tracking";
-import { NatsClient } from "@cocalc/frontend/nats/client";
+import { ConatClient } from "@cocalc/frontend/conat/client";
 import { HubClient } from "./hub";
 import { IdleClient } from "./idle";
 import { version } from "@cocalc/util/smc-version";
@@ -32,13 +32,13 @@ import Cookies from "js-cookie";
 import { basePathCookieName } from "@cocalc/util/misc";
 import { ACCOUNT_ID_COOKIE_NAME } from "@cocalc/util/db-schema/accounts";
 import { appBasePath } from "@cocalc/frontend/customize/app-base-path";
-import type { NatsSyncTableFunction } from "@cocalc/nats/sync/synctable";
+import type { NatsSyncTableFunction } from "@cocalc/conat/sync/synctable";
 import type {
-  CallNatsServiceFunction,
-  CreateNatsServiceFunction,
-} from "@cocalc/nats/service";
-import type { NatsEnvFunction } from "@cocalc/nats/types";
-import { randomId } from "@cocalc/nats/names";
+  CallConatServiceFunction,
+  CreateConatServiceFunction,
+} from "@cocalc/conat/service";
+import type { NatsEnvFunction } from "@cocalc/conat/types";
+import { randomId } from "@cocalc/conat/names";
 
 // This DEBUG variable comes from webpack:
 declare const DEBUG;
@@ -79,7 +79,7 @@ export interface WebappClient extends EventEmitter {
   users_client: UsersClient;
   file_client: FileClient;
   tracking_client: TrackingClient;
-  nats_client: NatsClient;
+  conat_client: ConatClient;
   hub_client: HubClient;
   idle_client: IdleClient;
   client: Client;
@@ -91,9 +91,9 @@ export interface WebappClient extends EventEmitter {
   get_username: Function;
   is_signed_in: () => boolean;
   synctable_project: Function;
-  synctable_nats: NatsSyncTableFunction;
-  callNatsService: CallNatsServiceFunction;
-  createNatsService: CreateNatsServiceFunction;
+  synctable_conat: NatsSyncTableFunction;
+  callConatService: CallConatServiceFunction;
+  createConatService: CreateConatServiceFunction;
   getNatsEnv: NatsEnvFunction;
   pubsub_nats: Function;
   prettier: Function;
@@ -163,7 +163,7 @@ class Client extends EventEmitter implements WebappClient {
   users_client: UsersClient;
   file_client: FileClient;
   tracking_client: TrackingClient;
-  nats_client: NatsClient;
+  conat_client: ConatClient;
   hub_client: HubClient;
   idle_client: IdleClient;
   client: Client;
@@ -175,9 +175,9 @@ class Client extends EventEmitter implements WebappClient {
   get_username: Function;
   is_signed_in: () => boolean;
   synctable_project: Function;
-  synctable_nats: NatsSyncTableFunction;
-  callNatsService: CallNatsServiceFunction;
-  createNatsService: CreateNatsServiceFunction;
+  synctable_conat: NatsSyncTableFunction;
+  callConatService: CallConatServiceFunction;
+  createConatService: CreateConatServiceFunction;
   getNatsEnv: NatsEnvFunction;
   pubsub_nats: Function;
   prettier: Function;
@@ -246,7 +246,7 @@ class Client extends EventEmitter implements WebappClient {
     );
     this.users_client = bind_methods(new UsersClient(this));
     this.tracking_client = bind_methods(new TrackingClient(this));
-    this.nats_client = bind_methods(new NatsClient(this));
+    this.conat_client = bind_methods(new ConatClient(this));
     this.file_client = bind_methods(new FileClient(this.async_call.bind(this)));
     this.idle_client = bind_methods(new IdleClient(this));
 
@@ -269,11 +269,11 @@ class Client extends EventEmitter implements WebappClient {
     this.synctable_project = this.sync_client.synctable_project.bind(
       this.sync_client,
     );
-    this.synctable_nats = this.nats_client.synctable;
-    this.pubsub_nats = this.nats_client.pubsub;
-    this.callNatsService = this.nats_client.callNatsService;
-    this.createNatsService = this.nats_client.createNatsService;
-    this.getNatsEnv = this.nats_client.getEnv;
+    this.synctable_conat = this.conat_client.synctable;
+    this.pubsub_nats = this.conat_client.pubsub;
+    this.callConatService = this.conat_client.callConatService;
+    this.createConatService = this.conat_client.createConatService;
+    this.getNatsEnv = this.conat_client.getEnv;
 
     this.query = this.query_client.query.bind(this.query_client);
     this.async_query = this.query_client.query.bind(this.query_client);
@@ -358,7 +358,7 @@ class Client extends EventEmitter implements WebappClient {
     // if file is deleted, this explicitly undeletes it.
     setNotDeleted?: boolean;
   }) => {
-    const x = await this.nats_client.openFiles(project_id);
+    const x = await this.conat_client.openFiles(project_id);
     if (setNotDeleted) {
       x.setNotDeleted(path);
     }
diff --git a/src/packages/frontend/client/console.ts b/src/packages/frontend/client/console.ts
index 925b839e40..f0f686a2f7 100644
--- a/src/packages/frontend/client/console.ts
+++ b/src/packages/frontend/client/console.ts
@@ -45,7 +45,7 @@ export function setup_global_cocalc(client): void {
 
   const cocalc: any = window.cc ?? {};
   cocalc.client = client;
-  cocalc.nats = client.nats_client;
+  cocalc.conat = client.conat_client;
   cocalc.misc = require("@cocalc/util/misc");
   cocalc.immutable = require("immutable");
   cocalc.done = cocalc.misc.done;
diff --git a/src/packages/frontend/client/hub.ts b/src/packages/frontend/client/hub.ts
index b9a2433e6c..42dc2fb292 100644
--- a/src/packages/frontend/client/hub.ts
+++ b/src/packages/frontend/client/hub.ts
@@ -514,7 +514,7 @@ export class HubClient {
     this.delete_websocket_cookie();
     this.conn?.end();
     this.conn?.open();
-    this.client.nats_client.reconnect();
+    this.client.conat_client.reconnect();
   }
 
   public latency(): number | void {
diff --git a/src/packages/frontend/client/idle.ts b/src/packages/frontend/client/idle.ts
index 8569d7b917..45ebe149a5 100644
--- a/src/packages/frontend/client/idle.ts
+++ b/src/packages/frontend/client/idle.ts
@@ -113,7 +113,7 @@ export class IdleClient {
         console.log("Entering standby mode");
         this.standbyMode = true;
         // console.log("idle timeout: disconnect!");
-        this.client.nats_client.standby();
+        this.client.conat_client.standby();
         this.client.hub_client.disconnect();
         disconnect_from_all_projects();
       }, CHECK_INTERVAL / 2);
@@ -133,7 +133,7 @@ export class IdleClient {
     if (this.standbyMode) {
       this.standbyMode = false;
       console.log("Leaving standby mode");
-      this.client.nats_client.resume();
+      this.client.conat_client.resume();
       this.client.hub_client.reconnect();
     }
   };
diff --git a/src/packages/frontend/client/llm.ts b/src/packages/frontend/client/llm.ts
index 821e24c841..9303c05f3b 100644
--- a/src/packages/frontend/client/llm.ts
+++ b/src/packages/frontend/client/llm.ts
@@ -183,13 +183,13 @@ export class LLMClient {
 
     if (chatStream == null) {
       // not streaming
-      return await this.client.nats_client.llm(options);
+      return await this.client.conat_client.llm(options);
     }
 
     chatStream.once("start", async () => {
       // streaming version
       try {
-        await this.client.nats_client.llm({
+        await this.client.conat_client.llm({
           ...options,
           stream: chatStream.process,
         });
diff --git a/src/packages/frontend/client/project.ts b/src/packages/frontend/client/project.ts
index f0644350cf..35811f6e91 100644
--- a/src/packages/frontend/client/project.ts
+++ b/src/packages/frontend/client/project.ts
@@ -50,11 +50,10 @@ import {
 } from "@cocalc/util/misc";
 import { reuseInFlight } from "@cocalc/util/reuse-in-flight";
 import { DirectoryListingEntry } from "@cocalc/util/types";
-import httpApi from "./api";
 import { WebappClient } from "./client";
 import { throttle } from "lodash";
-import { writeFile, type WriteFileOptions } from "@cocalc/nats/files/write";
-import { readFile, type ReadFileOptions } from "@cocalc/nats/files/read";
+import { writeFile, type WriteFileOptions } from "@cocalc/conat/files/write";
+import { readFile, type ReadFileOptions } from "@cocalc/conat/files/read";
 
 export class ProjectClient {
   private client: WebappClient;
@@ -69,7 +68,7 @@ export class ProjectClient {
   }
 
   private natsApi = (project_id: string) => {
-    return this.client.nats_client.projectApi({ project_id });
+    return this.client.conat_client.projectApi({ project_id });
   };
 
   // This can write small text files in one message.
@@ -240,7 +239,7 @@ export class ProjectClient {
     time" (which is stored in the db), which they client will know.  This is used, e.g.,
     for operations like "run rst2html on this file whenever it is saved."
     */
-  public async exec(opts: ExecOpts & { post?: boolean }): Promise {
+  exec = async (opts: ExecOpts & { post?: boolean }): Promise => {
     if ("async_get" in opts) {
       opts = defaults(opts, {
         project_id: required,
@@ -288,19 +287,10 @@ export class ProjectClient {
       };
     }
 
-    const { post } = opts;
-    delete opts.post;
-
     try {
-      let msg;
-      if (post) {
-        // use post API
-        msg = await httpApi("exec", opts);
-      } else {
-        const ws = await this.websocket(opts.project_id);
-        const exec_opts = copy_without(opts, ["project_id"]);
-        msg = await ws.api.exec(exec_opts);
-      }
+      const ws = await this.websocket(opts.project_id);
+      const exec_opts = copy_without(opts, ["project_id", "cb"]);
+      const msg = await ws.api.exec(exec_opts);
       if (msg.status && msg.status == "error") {
         throw new Error(msg.error);
       }
@@ -333,7 +323,7 @@ export class ProjectClient {
         };
       }
     }
-  }
+  };
 
   // Directly compute the directory listing.  No caching or other information
   // is used -- this just sends a message over the websocket requesting
@@ -487,7 +477,7 @@ export class ProjectClient {
     }
     this.touch_throttle[project_id] = Date.now();
     try {
-      await this.client.nats_client.hub.db.touch({ project_id });
+      await this.client.conat_client.hub.db.touch({ project_id });
     } catch (err) {
       // silently ignore; this happens, e.g., if you touch too frequently,
       // and shouldn't be fatal and break other things.
@@ -538,7 +528,7 @@ export class ProjectClient {
     noPool?: boolean;
   }): Promise {
     const project_id =
-      await this.client.nats_client.hub.projects.createProject(opts);
+      await this.client.conat_client.hub.projects.createProject(opts);
     this.client.tracking_client.user_tracking("create_project", {
       project_id,
       title: opts.title,
@@ -631,7 +621,7 @@ export class ProjectClient {
     id?: number;
     expire?: Date;
   }): Promise {
-    return await this.client.nats_client.hub.system.manageApiKeys(opts);
+    return await this.client.conat_client.hub.system.manageApiKeys(opts);
   }
 
   computeServers = (project_id) => {
diff --git a/src/packages/frontend/client/purchases.ts b/src/packages/frontend/client/purchases.ts
index 12e0357f8b..85ed05ff38 100644
--- a/src/packages/frontend/client/purchases.ts
+++ b/src/packages/frontend/client/purchases.ts
@@ -32,7 +32,7 @@ export class PurchasesClient {
   }
 
   async getBalance(): Promise {
-    return await this.client.nats_client.hub.purchases.getBalance();
+    return await this.client.conat_client.hub.purchases.getBalance();
   }
 
   async getSpendRate(): Promise {
diff --git a/src/packages/frontend/client/query.ts b/src/packages/frontend/client/query.ts
index c5d96539a5..82ebb141ec 100644
--- a/src/packages/frontend/client/query.ts
+++ b/src/packages/frontend/client/query.ts
@@ -6,15 +6,14 @@
 import { is_array } from "@cocalc/util/misc";
 import { validate_client_query } from "@cocalc/util/schema-validate";
 import { CB } from "@cocalc/util/types/database";
-import { NatsChangefeed } from "@cocalc/sync/table/changefeed-nats2";
+import { ConatChangefeed } from "@cocalc/sync/table/changefeed-conat";
 import { uuid } from "@cocalc/util/misc";
-import { client_db } from "@cocalc/util/schema";
 
 declare const $: any; // jQuery
 
 export class QueryClient {
   private client: any;
-  private changefeeds: { [id: string]: NatsChangefeed } = {};
+  private changefeeds: { [id: string]: ConatChangefeed } = {};
 
   constructor(client: any) {
     this.client = client;
@@ -47,7 +46,7 @@ export class QueryClient {
       }
       let changefeed;
       try {
-        changefeed = new NatsChangefeed({
+        changefeed = new ConatChangefeed({
           account_id: this.client.account_id,
           query: opts.query,
           options: opts.options,
@@ -73,19 +72,11 @@ export class QueryClient {
         if (err) {
           throw Error(err);
         }
-        const query = await this.client.nats_client.hub.db.userQuery({
+        const query = await this.client.conat_client.hub.db.userQuery({
           query: opts.query,
           options: opts.options,
         });
 
-        if (query && !opts.options?.[0]?.["set"]) {
-          // set thing isn't needed but doesn't hurt
-          // deal with timestamp versus Date and JSON using our schema.
-          for (const table in query) {
-            client_db.processDates({ table, rows: query[table] });
-          }
-        }
-
         if (opts.cb == null) {
           return { query };
         } else {
diff --git a/src/packages/frontend/client/time.ts b/src/packages/frontend/client/time.ts
index 76b4ce3e99..f8146db60a 100644
--- a/src/packages/frontend/client/time.ts
+++ b/src/packages/frontend/client/time.ts
@@ -3,7 +3,7 @@
  *  License: MS-RSL – see LICENSE.md for details
  */
 
-import getTime, { getLastSkew, getLastPingTime } from "@cocalc/nats/time";
+import getTime, { getLastSkew, getLastPingTime } from "@cocalc/conat/time";
 
 const PING_INTERVAL_MS = 10000;
 
diff --git a/src/packages/frontend/client/tracking.ts b/src/packages/frontend/client/tracking.ts
index 3a8dc2b13b..723327e725 100644
--- a/src/packages/frontend/client/tracking.ts
+++ b/src/packages/frontend/client/tracking.ts
@@ -23,7 +23,7 @@ export class TrackingClient {
         ?.get("user_tracking");
     }
     if (this.userTrackingEnabled == "yes") {
-      await this.client.nats_client.hub.system.userTracking({ event, value });
+      await this.client.conat_client.hub.system.userTracking({ event, value });
     }
   };
 
diff --git a/src/packages/frontend/client/users.ts b/src/packages/frontend/client/users.ts
index 4598ef9e7e..75e1cbf01a 100644
--- a/src/packages/frontend/client/users.ts
+++ b/src/packages/frontend/client/users.ts
@@ -50,7 +50,7 @@ export class UsersClient {
       admin?: boolean; // admins can do an admin version of the query, which also does substring searches on email address (not just name)
       only_email?: boolean; // search only via email address
     }): Promise => {
-      return await this.client.nats_client.hub.system.userSearch({
+      return await this.client.conat_client.hub.system.userSearch({
         query,
         limit,
         admin,
@@ -99,7 +99,7 @@ export class UsersClient {
       }
     }
     if (v.length > 0) {
-      const names = await this.client.nats_client.hub.system.getNames(v);
+      const names = await this.client.conat_client.hub.system.getNames(v);
       for (const account_id of v) {
         // iterate over v to record accounts that don't exist too
         x[account_id] = names[account_id];
diff --git a/src/packages/frontend/compute/manager.ts b/src/packages/frontend/compute/manager.ts
index 25cc99824a..346cf1dcb2 100644
--- a/src/packages/frontend/compute/manager.ts
+++ b/src/packages/frontend/compute/manager.ts
@@ -12,7 +12,7 @@ When doing dev from the browser console, do:
 import {
   computeServerManager,
   type ComputeServerManager,
-} from "@cocalc/nats/compute/manager";
+} from "@cocalc/conat/compute/manager";
 
 const computeServerManagerCache: {
   [project_id: string]: ComputeServerManager;
diff --git a/src/packages/frontend/nats/client.ts b/src/packages/frontend/conat/client.ts
similarity index 50%
rename from src/packages/frontend/nats/client.ts
rename to src/packages/frontend/conat/client.ts
index feb3565ad4..356c9d8953 100644
--- a/src/packages/frontend/nats/client.ts
+++ b/src/packages/frontend/conat/client.ts
@@ -1,99 +1,118 @@
-import * as nats from "nats.ws";
-import { connect, type CoCalcNatsConnection } from "./connection";
 import { redux } from "@cocalc/frontend/app-framework";
 import type { WebappClient } from "@cocalc/frontend/client/client";
 import { reuseInFlight } from "@cocalc/util/reuse-in-flight";
-import * as jetstream from "@nats-io/jetstream";
 import {
   createSyncTable,
   type NatsSyncTable,
   NatsSyncTableFunction,
-} from "@cocalc/nats/sync/synctable";
-import { randomId } from "@cocalc/nats/names";
-import { browserSubject, projectSubject } from "@cocalc/nats/names";
+} from "@cocalc/conat/sync/synctable";
+import { randomId, inboxPrefix } from "@cocalc/conat/names";
+import { projectSubject } from "@cocalc/conat/names";
 import { parse_query } from "@cocalc/sync/table/util";
 import { sha1 } from "@cocalc/util/misc";
 import { keys } from "lodash";
-import { type HubApi, initHubApi } from "@cocalc/nats/hub-api";
-import { type ProjectApi, initProjectApi } from "@cocalc/nats/project-api";
-import { type BrowserApi, initBrowserApi } from "@cocalc/nats/browser-api";
-import { getPrimusConnection } from "@cocalc/nats/primus";
+import { type HubApi, initHubApi } from "@cocalc/conat/hub-api";
+import { type ProjectApi, initProjectApi } from "@cocalc/conat/project-api";
+import { getPrimusConnection } from "@cocalc/conat/primus";
 import { isValidUUID } from "@cocalc/util/misc";
-import { createOpenFiles, OpenFiles } from "@cocalc/nats/sync/open-files";
-import { PubSub } from "@cocalc/nats/sync/pubsub";
+import { createOpenFiles, OpenFiles } from "@cocalc/conat/sync/open-files";
+import { PubSub } from "@cocalc/conat/sync/pubsub";
 import type { ChatOptions } from "@cocalc/util/types/llm";
-import { kv, type KVOptions, type KV } from "@cocalc/nats/sync/kv";
-import { dkv, type DKVOptions, type DKV } from "@cocalc/nats/sync/dkv";
-import { dko, type DKO } from "@cocalc/nats/sync/dko";
-import {
-  stream,
-  type UserStreamOptions,
-  type Stream,
-} from "@cocalc/nats/sync/stream";
-import { dstream } from "@cocalc/nats/sync/dstream";
-import { initApi } from "@cocalc/frontend/nats/api";
+import { dkv } from "@cocalc/conat/sync/dkv";
+import { akv } from "@cocalc/conat/sync/akv";
+import { dko } from "@cocalc/conat/sync/dko";
+import { dstream } from "@cocalc/conat/sync/dstream";
 import { delay } from "awaiting";
-import { callNatsService, createNatsService } from "@cocalc/nats/service";
+import { callConatService, createConatService } from "@cocalc/conat/service";
 import type {
-  CallNatsServiceFunction,
-  CreateNatsServiceFunction,
-} from "@cocalc/nats/service";
-import { listingsClient } from "@cocalc/nats/service/listings";
+  CallConatServiceFunction,
+  CreateConatServiceFunction,
+} from "@cocalc/conat/service";
+import { listingsClient } from "@cocalc/conat/service/listings";
 import {
   computeServerManager,
   type Options as ComputeServerManagerOptions,
-} from "@cocalc/nats/compute/manager";
-import getTime, { getSkew, init as initTime } from "@cocalc/nats/time";
-import { llm } from "@cocalc/nats/llm/client";
-import { inventory } from "@cocalc/nats/sync/inventory";
+} from "@cocalc/conat/compute/manager";
+import getTime, { getSkew, init as initTime } from "@cocalc/conat/time";
+import { llm } from "@cocalc/conat/llm/client";
+import { inventory } from "@cocalc/conat/sync/inventory";
 import { EventEmitter } from "events";
 import {
   getClient as getClientWithState,
-  setNatsClient,
+  setConatClient,
   type ClientWithState,
   getEnv,
-} from "@cocalc/nats/client";
+} from "@cocalc/conat/client";
 import type { ConnectionInfo } from "./types";
-import { fromJS } from "immutable";
-import { requestMany } from "@cocalc/nats/service/many";
 import Cookies from "js-cookie";
 import { ACCOUNT_ID_COOKIE } from "@cocalc/frontend/client/client";
-import { isConnected, waitUntilConnected } from "@cocalc/nats/util";
+import { isConnected, waitUntilConnected } from "@cocalc/conat/util";
 import { info as refCacheInfo } from "@cocalc/util/refcache";
-import * as tieredStorage from "@cocalc/nats/tiered-storage/client";
-
-const NATS_STATS_INTERVAL = 2500;
+import { connect as connectToConat } from "@cocalc/conat/core/client";
+import { join } from "path";
+import { appBasePath } from "@cocalc/frontend/customize/app-base-path";
+
+export interface ConatConnectionStatus {
+  state: "connected" | "disconnected";
+  reason: string;
+  details: any;
+}
 
 const DEFAULT_TIMEOUT = 15000;
 
 declare var DEBUG: boolean;
 
-export class NatsClient extends EventEmitter {
+export class ConatClient extends EventEmitter {
   client: WebappClient;
-  private sc = nats.StringCodec();
-  private jc = nats.JSONCodec();
-  private nc?: CoCalcNatsConnection;
-  public nats = nats;
-  public jetstream = jetstream;
+  private sc: any = null;
+  private jc: any = null;
+  private nc?: any;
   public hub: HubApi;
   public sessionId = randomId();
   private openFilesCache: { [project_id: string]: OpenFiles } = {};
   private clientWithState: ClientWithState;
+  private _conatClient: null | ReturnType;
 
   constructor(client: WebappClient) {
     super();
     this.setMaxListeners(100);
     this.client = client;
     this.hub = initHubApi(this.callHub);
-    this.initBrowserApi();
-    this.initNatsClient();
+    this.initConatClient();
     this.on("state", (state) => {
       this.emit(state);
       this.setConnectionState(state);
     });
   }
 
-  private initNatsClient = async () => {
+  private setConnectionStatus = (status: ConatConnectionStatus) => {
+    if (redux == null) {
+      return;
+    }
+    redux.getActions("page")?.setState({ conat: status } as any);
+  };
+
+  conat = () => {
+    if (this._conatClient == null) {
+      this._conatClient = connectToConat("/", {
+        path: join(appBasePath, "conat"),
+        inboxPrefix: inboxPrefix({ account_id: this.client.account_id }),
+      });
+      this._conatClient.conn.on("connect", () => {
+        this.setConnectionStatus({
+          state: "connected",
+          reason: "",
+          details: "",
+        });
+      });
+      this._conatClient.conn.on("disconnect", (reason, details) => {
+        this.setConnectionStatus({ state: "disconnected", reason, details });
+      });
+    }
+    return this._conatClient!;
+  };
+
+  private initConatClient = async () => {
     let d = 100;
     // wait until you're signed in -- usually the account_id cookie ensures this,
     // but if somehow it got deleted, the normal websocket sign in message from the
@@ -107,7 +126,7 @@ export class NatsClient extends EventEmitter {
       // we know the account_id, so set it so next time sign is faster.
       Cookies.set(ACCOUNT_ID_COOKIE, this.client.account_id);
     }
-    setNatsClient({
+    setConatClient({
       account_id: this.client.account_id,
       getNatsEnv: this.getNatsEnv,
       reconnect: this.reconnect,
@@ -133,48 +152,22 @@ export class NatsClient extends EventEmitter {
 
   getEnv = async () => await getEnv();
 
-  private initBrowserApi = async () => {
-    if (!this.client.account_id) {
-      // it's impossible to initialize the browser api if user is not signed in,
-      // and there is no way to ever sign in without explicitly leaving this
-      // page and coming back.
-      return;
-    }
-    // have to delay so that this.client is fully created.
-    await delay(1);
-    let d = 2000;
-    while (true) {
-      try {
-        await initApi();
-        return;
-      } catch (err) {
-        console.log(
-          `WARNING: failed to initialize browser api (will retry) -- ${err}`,
-        );
-      }
-      d = Math.min(25000, d * 1.3) + Math.random();
-      await delay(d);
-    }
-  };
-
   private getConnection = reuseInFlight(async () => {
-    if (this.nc != null) {
-      return this.nc;
-    }
-    this.nc = await connect();
-    this.setConnectionState("connected");
-    this.monitorConnectionState(this.nc);
-    this.reportConnectionStats(this.nc);
-    return this.nc;
+    return null as any;
+
+    //     if (this.nc != null) {
+    //       return this.nc;
+    //     }
+    //     this.nc = await connect();
+    //     this.setConnectionState("connected");
+    //     this.monitorConnectionState(this.nc);
+    //     this.reportConnectionStats(this.nc);
+    //     return this.nc;
   });
 
   reconnect = reuseInFlight(async () => {
-    if (this.nc != null) {
-      console.log("NATS connection: reconnecting...");
-      this.standby();
-      await delay(50);
-      await this.resume();
-    }
+    this._conatClient?.conn.io.engine.close();
+    this._conatClient?.conn.connect();
   });
 
   // if there is a connection, put it in standby
@@ -186,16 +179,6 @@ export class NatsClient extends EventEmitter {
     await this.nc?.resume();
   };
 
-  // reconnect to nats with access to additional projects.
-  // If you request projects that you're not actually a collaborator
-  // on, then it will silently NOT give you permission to use them.
-  addProjectPermissions = async (project_ids: string[]) => {
-    if (this.nc == null) {
-      throw Error("must have a connection");
-    }
-    await this.nc.addProjectPermissions(project_ids);
-  };
-
   private setConnectionState = (state?) => {
     const page = redux?.getActions("page");
     if (page == null) {
@@ -209,47 +192,26 @@ export class NatsClient extends EventEmitter {
     } as any);
   };
 
-  private monitorConnectionState = async (nc) => {
-    for await (const _ of nc.statusOfCurrentConnection()) {
-      this.setConnectionState();
-    }
+  callConatService: CallConatServiceFunction = async (options) => {
+    return await callConatService(options);
   };
 
-  private reportConnectionStats = async (nc) => {
-    while (true) {
-      const store = redux?.getStore("page");
-      const actions = redux?.getActions("page");
-      if (store != null && actions != null) {
-        const cur = store.get("nats") ?? (fromJS({}) as any);
-        const nats = cur.set("data", fromJS(nc.stats()));
-        if (!cur.equals(nats)) {
-          actions.setState({ nats });
-        }
-      }
-      await delay(NATS_STATS_INTERVAL);
-    }
-  };
-
-  callNatsService: CallNatsServiceFunction = async (options) => {
-    return await callNatsService(options);
+  createConatService: CreateConatServiceFunction = (options) => {
+    return createConatService(options);
   };
 
-  createNatsService: CreateNatsServiceFunction = (options) => {
-    return createNatsService(options);
-  };
-
-  // TODO: plan to deprecated...
+  // TODO: plan to deprecated...?
   projectWebsocketApi = async ({
     project_id,
     mesg,
     timeout = DEFAULT_TIMEOUT,
   }) => {
-    const { nc } = await this.getEnv();
+    const { cn } = await this.getEnv();
     const subject = projectSubject({ project_id, service: "browser-api" });
-    const resp = await nc.request(subject, this.jc.encode(mesg), {
+    const resp = await cn.request(subject, mesg, {
       timeout,
     });
-    return this.jc.decode(resp.data);
+    return resp.data;
   };
 
   private callHub = async ({
@@ -257,31 +219,18 @@ export class NatsClient extends EventEmitter {
     name,
     args = [],
     timeout = DEFAULT_TIMEOUT,
-    requestMany: requestMany0 = false,
   }: {
     service?: string;
     name: string;
     args: any[];
     timeout?: number;
-    // requestMany -- if true do a requestMany request, which is more complicated/slower, but
-    // supports arbitrarily large responses irregardless of the nats server max message size.
-    requestMany?: boolean;
   }) => {
-    const { nc } = await this.getEnv();
+    const { cn } = await this.getEnv();
     const subject = `hub.account.${this.client.account_id}.${service}`;
     try {
-      const data = this.jc.encode({
-        name,
-        args,
-      });
-      let resp;
-      await waitUntilConnected();
-      if (requestMany0) {
-        resp = await requestMany({ nc, subject, data, maxWait: timeout });
-      } else {
-        resp = await nc.request(subject, data, { timeout });
-      }
-      return this.jc.decode(resp.data);
+      const data = { name, args };
+      const resp = await cn.request(subject, data, { timeout });
+      return resp.data;
     } catch (err) {
       err.message = `${err.message} - callHub: subject='${subject}', name='${name}', `;
       throw err;
@@ -307,7 +256,6 @@ export class NatsClient extends EventEmitter {
     if (!isValidUUID(project_id)) {
       throw Error(`project_id = '${project_id}' must be a valid uuid`);
     }
-    let lastAddedPermission = 0;
     if (compute_server_id == null) {
       const actions = redux.getProjectActions(project_id);
       if (path != null) {
@@ -319,30 +267,14 @@ export class NatsClient extends EventEmitter {
       }
     }
     const callProjectApi = async ({ name, args }) => {
-      const opts = {
+      return await this.callProject({
         project_id,
         compute_server_id,
         timeout,
         service: "api",
         name,
         args,
-      };
-      try {
-        await waitUntilConnected();
-        return await this.callProject(opts);
-      } catch (err) {
-        if (
-          err.code == "PERMISSIONS_VIOLATION" &&
-          Date.now() - lastAddedPermission >= 30000
-        ) {
-          lastAddedPermission = Date.now();
-          await this.addProjectPermissions([project_id]);
-          await waitUntilConnected();
-          return await this.callProject(opts);
-        } else {
-          throw err;
-        }
-      }
+      });
     };
     return initProjectApi(callProjectApi);
   };
@@ -362,75 +294,10 @@ export class NatsClient extends EventEmitter {
     args: any[];
     timeout?: number;
   }) => {
-    const { nc } = await this.getEnv();
+    const { cn } = await this.getEnv();
     const subject = projectSubject({ project_id, compute_server_id, service });
-    const mesg = this.jc.encode({
-      name,
-      args,
-    });
-    let resp;
-    try {
-      await waitUntilConnected();
-      resp = await nc.request(subject, mesg, { timeout });
-    } catch (err) {
-      if (err.code == "PERMISSIONS_VIOLATION") {
-        // request update of our credentials to include this project, then try again
-        await (nc as any).addProjectPermissions([project_id]);
-        await waitUntilConnected();
-        resp = await nc.request(subject, mesg, { timeout });
-      } else {
-        throw err;
-      }
-    }
-    return this.jc.decode(resp.data);
-  };
-
-  private callBrowser = async ({
-    service = "api",
-    sessionId,
-    name,
-    args = [],
-    timeout = DEFAULT_TIMEOUT,
-  }: {
-    service?: string;
-    sessionId: string;
-    name: string;
-    args: any[];
-    timeout?: number;
-  }) => {
-    const { nc } = await this.getEnv();
-    const subject = browserSubject({
-      account_id: this.client.account_id,
-      sessionId,
-      service,
-    });
-    const mesg = this.jc.encode({
-      name,
-      args,
-    });
-    // console.log("request to subject", { subject, name, args });
-    await waitUntilConnected();
-    const resp = await nc.request(subject, mesg, { timeout });
-    return this.jc.decode(resp.data);
-  };
-
-  browserApi = ({
-    sessionId,
-    timeout = DEFAULT_TIMEOUT,
-  }: {
-    sessionId: string;
-    timeout?: number;
-  }): BrowserApi => {
-    const callBrowserApi = async ({ name, args }) => {
-      return await this.callBrowser({
-        sessionId,
-        timeout,
-        service: "api",
-        name,
-        args,
-      });
-    };
-    return initBrowserApi(callBrowserApi);
+    const resp = await cn.request(subject, { name, args }, { timeout });
+    return resp.data;
   };
 
   request = async (subject: string, data: string) => {
@@ -440,17 +307,12 @@ export class NatsClient extends EventEmitter {
     return this.sc.decode(resp.data);
   };
 
-  consumer = async (stream: string) => {
-    const { nc } = await this.getEnv();
-    const js = jetstream.jetstream(nc);
-    return await js.consumers.get(stream);
-  };
-
   private getNatsEnv = async () => {
     return {
       sha1,
       jc: this.jc,
       nc: await this.getConnection(),
+      cn: this.conat(),
     };
   };
 
@@ -479,30 +341,6 @@ export class NatsClient extends EventEmitter {
     return s;
   };
 
-  changefeedInterest = async (query, noError?: boolean) => {
-    // express interest
-    // (re-)start changefeed going
-    try {
-      await this.client.nats_client.callHub({
-        service: "db",
-        name: "userQuery",
-        args: [{ changes: true, query }],
-      });
-    } catch (err) {
-      if (noError) {
-        console.log(`WARNING: changefeed -- ${err}`, query);
-        return;
-      } else {
-        throw err;
-      }
-    }
-  };
-
-  changefeed = async (query, options?) => {
-    this.changefeedInterest(query, true);
-    return await this.synctable(query, options);
-  };
-
   // DEPRECATED
   primus = async (project_id: string) => {
     return getPrimusConnection({
@@ -571,43 +409,10 @@ export class NatsClient extends EventEmitter {
     return await llm({ account_id: this.client.account_id, ...opts });
   };
 
-  stream = async (
-    opts: Partial & { name: string },
-  ): Promise> => {
-    if (!opts.account_id && !opts.project_id && opts.limits != null) {
-      throw Error("account client can't set limits on public stream");
-    }
-    return await stream({ env: await this.getEnv(), ...opts });
-  };
-
   dstream = dstream;
-
-  kv = async (
-    opts: Partial & { name: string },
-  ): Promise> => {
-    //     if (!opts.account_id && !opts.project_id && opts.limits != null) {
-    //       throw Error("account client can't set limits on public stream");
-    //     }
-    return await kv({ env: await this.getEnv(), ...opts });
-  };
-
-  dkv = async (
-    opts: Partial & { name: string },
-  ): Promise> => {
-    //     if (!opts.account_id && !opts.project_id && opts.limits != null) {
-    //       throw Error("account client can't set limits on public stream");
-    //     }
-    return await dkv({ env: await this.getEnv(), ...opts });
-  };
-
-  dko = async (
-    opts: Partial & { name: string },
-  ): Promise> => {
-    //     if (!opts.account_id && !opts.project_id && opts.limits != null) {
-    //       throw Error("account client can't set limits on public stream");
-    //     }
-    return await dko({ env: await this.getEnv(), ...opts });
-  };
+  dkv = dkv;
+  akv = akv;
+  dko = dko;
 
   listings = async (opts: {
     project_id: string;
@@ -617,21 +422,9 @@ export class NatsClient extends EventEmitter {
   };
 
   computeServerManager = async (options: ComputeServerManagerOptions) => {
-    const f = async () => {
-      const M = computeServerManager(options);
-      await M.init();
-      return M;
-    };
-    try {
-      return await f();
-    } catch (err) {
-      if (err.code == "PERMISSIONS_VIOLATION" && options.project_id) {
-        await this.addProjectPermissions([options.project_id]);
-        return await f();
-      } else {
-        throw err;
-      }
-    }
+    const M = computeServerManager(options);
+    await M.init();
+    return M;
   };
 
   getTime = (): number => {
@@ -667,8 +460,6 @@ export class NatsClient extends EventEmitter {
   waitUntilConnected = async () => await waitUntilConnected();
 
   refCacheInfo = () => refCacheInfo();
-
-  tieredStorage = tieredStorage;
 }
 
 function setDeleted({ project_id, path, deleted }) {
diff --git a/src/packages/frontend/nats/listings.ts b/src/packages/frontend/conat/listings.ts
similarity index 97%
rename from src/packages/frontend/nats/listings.ts
rename to src/packages/frontend/conat/listings.ts
index 2d08285bfa..9d8bb2a232 100644
--- a/src/packages/frontend/nats/listings.ts
+++ b/src/packages/frontend/conat/listings.ts
@@ -15,10 +15,10 @@ import {
   createListingsApiClient,
   type ListingsApi,
   MIN_INTEREST_INTERVAL_MS,
-} from "@cocalc/nats/service/listings";
+} from "@cocalc/conat/service/listings";
 import { delay } from "awaiting";
 import { reuseInFlight } from "@cocalc/util/reuse-in-flight";
-import { getLogger } from "@cocalc/nats/client";
+import { getLogger } from "@cocalc/conat/client";
 
 const logger = getLogger("listings");
 
@@ -46,7 +46,6 @@ export class Listings extends EventEmitter {
   }
 
   private createClient = async () => {
-    await webapp_client.nats_client.addProjectPermissions([this.project_id]);
     let d = 3000;
     const MAX_DELAY_MS = 15000;
     while (this.state != "closed") {
diff --git a/src/packages/frontend/nats/types.ts b/src/packages/frontend/conat/types.ts
similarity index 100%
rename from src/packages/frontend/nats/types.ts
rename to src/packages/frontend/conat/types.ts
diff --git a/src/packages/frontend/nats/use-listing.ts b/src/packages/frontend/conat/use-listing.ts
similarity index 94%
rename from src/packages/frontend/nats/use-listing.ts
rename to src/packages/frontend/conat/use-listing.ts
index c6d46fc16b..d851b1454c 100644
--- a/src/packages/frontend/nats/use-listing.ts
+++ b/src/packages/frontend/conat/use-listing.ts
@@ -4,7 +4,7 @@ React Hook to provide access to directory listings in a project.
 This is NOT used yet, but seems like the right way to do directly listings in a modern
 clean dynamic way.  It would be used like this:
 
-import useListing from "@cocalc/frontend/nats/use-listing";
+import useListing from "@cocalc/frontend/conat/use-listing";
 function ListingTest({ path, compute_server_id }) {
   const listing = useListing({ path, compute_server_id });
   return 
{JSON.stringify(listing)}
; @@ -17,7 +17,7 @@ import { listingsClient, type ListingsClient, type Listing, -} from "@cocalc/nats/service/listings"; +} from "@cocalc/conat/service/listings"; import { useAsyncEffect } from "use-async-effect"; import { useProjectContext } from "@cocalc/frontend/project/context"; diff --git a/src/packages/frontend/course/common/student-assignment-info.tsx b/src/packages/frontend/course/common/student-assignment-info.tsx index 68e9733975..adbf682501 100644 --- a/src/packages/frontend/course/common/student-assignment-info.tsx +++ b/src/packages/frontend/course/common/student-assignment-info.tsx @@ -15,7 +15,6 @@ import { MarkdownInput } from "@cocalc/frontend/editors/markdown-input"; import { labels } from "@cocalc/frontend/i18n"; import { NotebookScores } from "@cocalc/frontend/jupyter/nbgrader/autograde"; import { webapp_client } from "@cocalc/frontend/webapp-client"; -import { to_json } from "@cocalc/util/misc"; import { BigTime } from "."; import { CourseActions } from "../actions"; import { NbgraderScores } from "../nbgrader/scores"; @@ -469,7 +468,7 @@ export function StudentAssignmentInfo({ function render_error(step: Steps, error) { if (typeof error !== "string") { - error = to_json(error); + error = `${error}`; } // We search for two different error messages, since different errors happen in // KuCalc versus other places cocalc runs. It depends on what is doing the copy. diff --git a/src/packages/frontend/course/export/file-use-times.ts b/src/packages/frontend/course/export/file-use-times.ts index 28d4067866..f9aa5eae8b 100644 --- a/src/packages/frontend/course/export/file-use-times.ts +++ b/src/packages/frontend/course/export/file-use-times.ts @@ -37,7 +37,7 @@ async function one_student_file_use_times( const times: { [path: string]: PathUseTimes } = {}; for (const path of paths) { const { edit_times, access_times } = - await webapp_client.nats_client.hub.db.fileUseTimes({ + await webapp_client.conat_client.hub.db.fileUseTimes({ project_id, path, target_account_id: account_id, diff --git a/src/packages/frontend/frame-editors/terminal-editor/nats-terminal-connection.ts b/src/packages/frontend/frame-editors/terminal-editor/nats-terminal-connection.ts index c07c3b059a..903084860f 100644 --- a/src/packages/frontend/frame-editors/terminal-editor/nats-terminal-connection.ts +++ b/src/packages/frontend/frame-editors/terminal-editor/nats-terminal-connection.ts @@ -2,14 +2,14 @@ import { webapp_client } from "@cocalc/frontend/webapp-client"; import { EventEmitter } from "events"; import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; import { delay } from "awaiting"; -import { type DStream } from "@cocalc/nats/sync/dstream"; +import { type DStream } from "@cocalc/conat/sync/dstream"; import { createTerminalClient, type TerminalServiceApi, createBrowserService, SIZE_TIMEOUT_MS, createBrowserClient, -} from "@cocalc/nats/service/terminal"; +} from "@cocalc/conat/service/terminal"; import { NATS_OPEN_FILE_TOUCH_INTERVAL } from "@cocalc/util/nats"; type State = "disconnected" | "init" | "running" | "closed"; @@ -56,7 +56,7 @@ export class NatsTerminalConnection extends EventEmitter { this.terminalResize = terminalResize; this.openPaths = openPaths; this.closePaths = closePaths; - webapp_client.nats_client.on("connected", this.clearWriteQueue); + webapp_client.conat_client.on("connected", this.clearWriteQueue); } clearWriteQueue = () => { @@ -155,7 +155,7 @@ export class NatsTerminalConnection extends EventEmitter { }; close = async () => { - webapp_client.nats_client.removeListener("connected", this.clearWriteQueue); + webapp_client.conat_client.removeListener("connected", this.clearWriteQueue); this.stream?.close(); delete this.stream; this.service?.close(); @@ -204,8 +204,8 @@ export class NatsTerminalConnection extends EventEmitter { }); private getStream = async () => { - const { nats_client } = webapp_client; - return await nats_client.dstream({ + const { conat_client } = webapp_client; + return await conat_client.dstream({ name: `terminal-${this.path}`, project_id: this.project_id, ephemeral: this.ephemeral, diff --git a/src/packages/frontend/jupyter/browser-actions.ts b/src/packages/frontend/jupyter/browser-actions.ts index 3c99b517ec..1aa81da890 100644 --- a/src/packages/frontend/jupyter/browser-actions.ts +++ b/src/packages/frontend/jupyter/browser-actions.ts @@ -162,7 +162,7 @@ export class JupyterActions extends JupyterActions0 { // Put an entry in the project log once the jupyter notebook gets opened. // NOTE: Obviously, the project does NOT need to put entries in the log. this.syncdb.once("change", () => - this.redux.getProjectActions(this.project_id).log_opened_time(this.path), + this.redux?.getProjectActions(this.project_id).log_opened_time(this.path), ); // project doesn't care about cursors, but browser clients do: diff --git a/src/packages/frontend/jupyter/kernelspecs.ts b/src/packages/frontend/jupyter/kernelspecs.ts index ef9be9d2c5..596e55b57a 100644 --- a/src/packages/frontend/jupyter/kernelspecs.ts +++ b/src/packages/frontend/jupyter/kernelspecs.ts @@ -33,7 +33,7 @@ const getKernelSpec = reuseInFlight( return spec; } } - const api = webapp_client.nats_client.projectApi({ + const api = webapp_client.conat_client.projectApi({ project_id, compute_server_id, timeout: 7500, diff --git a/src/packages/frontend/jupyter/logo.tsx b/src/packages/frontend/jupyter/logo.tsx index 9a299d35e2..fe26b5f15e 100644 --- a/src/packages/frontend/jupyter/logo.tsx +++ b/src/packages/frontend/jupyter/logo.tsx @@ -112,7 +112,7 @@ async function getLogo({ if (!noCache && cache[key]) { return cache[key]; } - const api = client.nats_client.projectApi({ project_id }); + const api = client.conat_client.projectApi({ project_id }); const { filename, base64 } = await api.editor.jupyterKernelLogo(kernel, { noCache, }); diff --git a/src/packages/frontend/nats/api/index.ts b/src/packages/frontend/nats/api/index.ts deleted file mode 100644 index d048317314..0000000000 --- a/src/packages/frontend/nats/api/index.ts +++ /dev/null @@ -1,65 +0,0 @@ -/* - -*/ - -import { webapp_client } from "@cocalc/frontend/webapp-client"; -import { type BrowserApi } from "@cocalc/nats/browser-api"; -import { Svcm } from "@nats-io/services"; -import { browserSubject } from "@cocalc/nats/names"; - -export async function initApi() { - const { account_id } = webapp_client; - if (!account_id) { - throw Error("must be signed in"); - } - const { sessionId } = webapp_client.nats_client; - const { jc, nc } = await webapp_client.nats_client.getEnv(); - // @ts-ignore - const svcm = new Svcm(nc); - const subject = browserSubject({ - account_id, - sessionId, - service: "api", - }); - const service = await svcm.add({ - name: `account-${account_id}`, - version: "0.1.0", - description: "CoCalc Web Browser", - }); - const api = service.addEndpoint("api", { subject }); - listen({ api, jc }); -} - -async function listen({ api, jc }) { - for await (const mesg of api) { - const request = jc.decode(mesg.data); - handleApiRequest({ request, mesg, jc }); - } -} - -async function handleApiRequest({ request, mesg, jc }) { - let resp; - try { - const { name, args } = request as any; - console.log("handling browser.api request:", { name }); - resp = (await getResponse({ name, args })) ?? null; - } catch (err) { - resp = { error: `${err}` }; - } - mesg.respond(jc.encode(resp)); -} - -import * as system from "./system"; - -export const browserApi: BrowserApi = { - system, -}; - -async function getResponse({ name, args }) { - const [group, functionName] = name.split("."); - const f = browserApi[group]?.[functionName]; - if (f == null) { - throw Error(`unknown function '${name}'`); - } - return await f(...args); -} diff --git a/src/packages/frontend/nats/api/system.ts b/src/packages/frontend/nats/api/system.ts deleted file mode 100644 index 36e9ccf115..0000000000 --- a/src/packages/frontend/nats/api/system.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { webapp_client } from "@cocalc/frontend/webapp-client"; - -export async function ping() { - return { now: Date.now(), sessionId: webapp_client.nats_client.sessionId }; -} - -import { version as versionNumber } from "@cocalc/util/smc-version"; -export async function version() { - return versionNumber; -} diff --git a/src/packages/frontend/nats/connection.ts b/src/packages/frontend/nats/connection.ts deleted file mode 100644 index c48234e39e..0000000000 --- a/src/packages/frontend/nats/connection.ts +++ /dev/null @@ -1,498 +0,0 @@ -/* -This should work for clients just like a normal NATS connection, but it -also dynamically reconnects to adjust permissions for projects -a browser client may connect to. - -This is needed ONLY because: - - - in NATS you can't change the permissions of an existing - connection when auth is done via auth-callout like we're doing. - This could become possible in the future, with some change - to the NATS server. Or maybe I just don't understand it. - - - There is a relatively small limit on the number of permissions for - one connection, which must be explicitly listed on creation of - the connection. However, in CoCalc, a single account can be a - collaborator on 20,000+ projects, and connect to any one of them - at any time. - - -The other option would be to have a separate nats connection for each -project that the browser has open. This is also viable and probably -simpler. We basically do that with primus. The drawbacks: - - - browsers limit the number of websockets for a tab to about 200 - - more connections ==> more load on nats and limits scalability - -I generally "feel" like this should be the optimal approach given -all the annoying constraints. We will likely do something -involving always including recent projects. - ---- - -Subscription Leaks: - -This code in a browser is useful for monitoring the number of subscriptions: - -setInterval(()=>console.log(cc.redux.getStore('page').get('nats').toJS().data.numSubscriptions),1000) - -If things are off, look at - -cc.client.nats_client.refCacheInfo() -*/ - -import { appBasePath } from "@cocalc/frontend/customize/app-base-path"; -import { webapp_client } from "@cocalc/frontend/webapp-client"; -import { join } from "path"; -import type { - NatsConnection, - ServerInfo, - Payload, - PublishOptions, - RequestOptions, - Msg, - SubscriptionOptions, - RequestManyOptions, - Stats, - Status, - Subscription, -} from "@nats-io/nats-core"; -import { connect as natsConnect } from "nats.ws"; -import { inboxPrefix } from "@cocalc/nats/names"; -import { CONNECT_OPTIONS } from "@cocalc/util/nats"; -import { EventEmitter } from "events"; -import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import { asyncDebounce } from "@cocalc/util/async-utils"; -import { delay } from "awaiting"; -import { - getPermissionsCache, - type NatsProjectPermissionsCache, -} from "./permissions-cache"; -import { isEqual } from "lodash"; -import { alert_message } from "@cocalc/frontend/alerts"; -import jsonStable from "json-stable-stringify"; - -const MAX_SUBSCRIPTIONS = 400; - -// When we create a new connection to change permissions (i.e., open a project -// we have not opened in a while), we wait this long before draining the -// old connection. Draining immediately should work fine and be more efficient; -// however, it might cause more "disruption". On the other hand, this might -// mask a subtle bug hence set this to 0 for some debugging purposes. -const DELAY_UNTIL_DRAIN_PREVIOUS_CONNECTION_MS = 30 * 1000; -// for debugging/testing -// const DELAY_UNTIL_DRAIN_PREVIOUS_CONNECTION_MS = 0; - -function natsWebsocketUrl() { - return `${location.protocol == "https:" ? "wss" : "ws"}://${location.host}${join(appBasePath, "nats")}`; -} - -function connectingMessage({ server, project_ids }) { - console.log( - `Connecting to ${server} to use ${JSON.stringify(project_ids)}...`, - ); -} - -const getNewNatsConn = reuseInFlight(async ({ cache, user }) => { - const account_id = await getAccountId(); - if (!account_id) { - throw Error("you must be signed in before connecting to NATS"); - } - const server = natsWebsocketUrl(); - const project_ids = cache.get(); - connectingMessage({ server, project_ids }); - const options = { - name: jsonStable(user), - user: `account-${account_id}`, - ...CONNECT_OPTIONS, - servers: [server], - inboxPrefix: inboxPrefix({ account_id }), - }; - while (true) { - try { - console.log("Connecting to NATS..."); - return await natsConnect(options); - } catch (err) { - console.log(`WARNING: failed to connect to NATS -- will retry -- ${err}`); - await delay(3000); - } - } -}); - -// This is a hack to get around circular import during initial page load. -// TODO: properly clean up the import order -async function getAccountId() { - try { - return webapp_client.account_id; - } catch { - await delay(1); - return webapp_client.account_id; - } -} - -let cachedConnection: CoCalcNatsConnection | null = null; -export const connect = reuseInFlight(async () => { - if (cachedConnection != null) { - return cachedConnection; - } - const account_id = await getAccountId(); - const cache = getPermissionsCache(); - const project_ids = cache.get(); - const user = { account_id, project_ids }; - const nc = await getNewNatsConn({ cache, user }); - cachedConnection = new CoCalcNatsConnection(nc, user, cache); - return cachedConnection; -}); - -// There should be at most one single global instance of CoCalcNatsConnection! It -// is responsible for managing any connection to nats. It is assumed that nothing else -// does and that there is only one of these. -class CoCalcNatsConnection extends EventEmitter implements NatsConnection { - conn: NatsConnection; - prev: NatsConnection[] = []; - private standbyMode = false; - info?: ServerInfo; - protocol; - options; - user: { account_id: string; project_ids: string[] }; - permissionsCache: NatsProjectPermissionsCache; - currStatus?; - - constructor(conn, user, permissionsCache) { - super(); - this.setMaxListeners(500); - this.conn = conn; - this.protocol = conn.protocol; - this.info = conn.info; - this.options = conn.options; - this.user = { - project_ids: uniq(user.project_ids), - account_id: user.account_id, - }; - this.permissionsCache = permissionsCache; - this.updateCache(); - } - - standby = () => { - if (this.standbyMode) { - return; - } - this.standbyMode = true; - // standby is used when you are idle, so you should have nothing important to save. - // Also, we can't get rid of this.conn until we have a new connection, which would make - // no sense here.... so we do NOT use this.conn.drain(). - this.conn.close(); - // @ts-ignore - if (this.conn.protocol) { - // @ts-ignore - this.conn.protocol.connected = false; - } - }; - - resume = async () => { - console.log("nats connection: resume"); - if (!this.standbyMode) { - console.log("nats connection: not in standby mode"); - return; - } - this.standbyMode = false; - // @ts-ignore - if (this.conn.protocol?.connected) { - console.log("nats connection: already connected"); - return; - } - console.log("nats connection: getNewNatsConn"); - const conn = await getNewNatsConn({ - cache: this.permissionsCache, - user: this.user, - }); - console.log("nats connection: got conn"); - // @ts-ignore - this.conn = conn; - // @ts-ignore - this.protocol = conn.protocol; - // @ts-ignore - this.info = conn.info; - // @ts-ignore - this.options = conn.options; - this.emit("reconnect"); - }; - - // gets *actual* projects that this connection has permission to access - getProjectPermissions = async (): Promise => { - const info = await this.getConnectionInfo(); - const project_ids: string[] = []; - for (const x of info.data.permissions.publish.allow) { - if (x.startsWith("project.")) { - const v = x.split("."); - project_ids.push(v[1]); - } - } - return project_ids; - }; - - // one time on first connection we set the cache to match - // the actual projects, so we don't keep requesting ones we - // don't have access to, e.g., on sign out, then sign in as - // different user (or being removed as collaborator). - private updateCache = async () => { - try { - this.permissionsCache.set(await this.getProjectPermissions()); - } catch {} - }; - - getConnectionInfo = async () => { - return await webapp_client.nats_client.info(this.conn); - }; - - private subscriptionPenalty = 20000; - numSubscriptions = () => { - // @ts-ignore - let subs = this.conn.protocol.subscriptions.subs.size; - for (const nc of this.prev) { - // @ts-ignore - subs += nc.protocol.subscriptions.subs.size; - } - if (subs >= MAX_SUBSCRIPTIONS) { - // For now, we put them in standby for a bit - // then resume. This saves any work and disconnects them. - // They then get reconnected. This might help. - console.warn( - `WARNING: Using ${subs} subscriptions which exceeds the limit of ${MAX_SUBSCRIPTIONS}.`, - ); - alert_message({ - type: "warning", - message: - "Your browser is using too many resources; refresh your browser or close some files.", - }); - this.standby(); - this.subscriptionPenalty *= 1.25; - setTimeout(this.resume, this.subscriptionPenalty); - } - return subs; - }; - - getSubscriptions = (): string[] => { - const subjects: string[] = []; - // @ts-ignore - for (const sub of this.conn.protocol.subscriptions.subs) { - subjects.push(sub[1].subject); - } - return subjects; - }; - - addProjectPermissions = async (project_ids: string[]) => { - this.permissionsCache.add(project_ids); - await this.updateProjectPermissions(); - }; - - // this is debounce since adding permissions tends to come in bursts: - private updateProjectPermissions = asyncDebounce( - async () => { - let project_ids = this.permissionsCache.get(); - if (isEqual(this.user.project_ids, project_ids)) { - // nothing to do - return; - } - const account_id = await getAccountId(); - if (!account_id) { - throw Error("you must be signed in before connecting to NATS"); - } - const user = { - account_id, - project_ids, - }; - const server = natsWebsocketUrl(); - connectingMessage({ server, project_ids }); - const options = { - // name: used to convey who we claim to be: - name: jsonStable(user), - // user: displayed in logs - user: `account-${account_id}`, - ...CONNECT_OPTIONS, - servers: [server], - inboxPrefix: inboxPrefix({ account_id }), - }; - const cur = this.conn; - const conn = (await natsConnect(options)) as any; - - this.conn = conn; - this.prev.push(cur); - this.currStatus?.stop(); - - this.protocol = conn.protocol; - this.info = conn.info; - this.options = options; - this.user = user; - // tell clients they should reconnect, since the connection they - // had used is going to drain soon. - this.emit("reconnect"); - // we wait a while, then drain the previous connection. - // Since connection usually change rarely, it's fine to wait a while, - // to minimize disruption. Make this short as a sort of "bug stress test". - delayThenDrain(cur, DELAY_UNTIL_DRAIN_PREVIOUS_CONNECTION_MS); - }, - 1000, - { leading: true, trailing: true }, - ); - - async closed(): Promise { - return await this.conn.closed(); - } - - async close(): Promise { - await this.conn.close(); - } - - publish(subject: string, payload?: Payload, options?: PublishOptions): void { - this.conn.publish(subject, payload, options); - } - - publishMessage(msg: Msg): void { - this.conn.publishMessage(msg); - } - - respondMessage(msg: Msg): boolean { - return this.conn.respondMessage(msg); - } - - subscribe(subject: string, opts?: SubscriptionOptions): Subscription { - return this.conn.subscribe(subject, opts); - } - - // not in the public api, but used by jetstream. - _resub(s: Subscription, subject: string, max?: number) { - return (this.conn as any)._resub(s, subject, max); - } - - // not in the public api - _check(subject: string, sub: boolean, pub: boolean) { - return (this.conn as any)._check(subject, sub, pub); - } - - async request( - subject: string, - payload?: Payload, - opts?: RequestOptions, - ): Promise { - return await this.conn.request(subject, payload, opts); - } - - async requestMany( - subject: string, - payload?: Payload, - opts?: Partial, - ): Promise> { - return await this.conn.requestMany(subject, payload, opts); - } - - async flush(): Promise { - this.conn.flush(); - } - - async drain(): Promise { - await this.conn.drain(); - } - - isClosed(): boolean { - return this.conn.isClosed(); - } - - isDraining(): boolean { - return this.conn.isDraining(); - } - - getServer(): string { - return this.conn.getServer(); - } - - // The kv and stream clients use this, which alerts when connection is closing. - // They also get the 'reconnect' event and drop this connection and get a new one, - // thus also getting a new status. - status(): AsyncIterable { - return this.conn.status(); - } - - // The main client here (./client.ts) uses this to know the status of the primary - // connection, mainly for presentation in the UI. Thus this has to always have - // the latest connection status. - async *statusOfCurrentConnection() { - while (true) { - this.currStatus = this.conn.status(); - for await (const x of this.currStatus) { - this.emit("status", x); - yield x; - } - } - } - - // sum total of all data across *all* connections we've made here. - stats(): Stats & { numSubscriptions: number } { - // @ts-ignore: undocumented API - let { inBytes, inMsgs, outBytes, outMsgs } = this.conn.stats(); - for (const conn of this.prev) { - // @ts-ignore - const x = conn.stats(); - inBytes += x.inBytes; - outBytes += x.outBytes; - inMsgs += x.inMsgs; - outMsgs += x.outMsgs; - } - return { - inBytes, - inMsgs, - outBytes, - outMsgs, - numSubscriptions: this.numSubscriptions(), - }; - } - - async rtt(): Promise { - return await this.conn.rtt(); - } - - async reconnect(): Promise { - try { - await this.conn.reconnect(); - } catch (err) { - console.warn(`NATS reconnect failed -- ${err}`); - } - } - - get features() { - return this.protocol.features; - } - - getServerVersion(): SemVer | undefined { - const info = this.info; - return info ? parseSemVer(info.version) : undefined; - } -} - -async function delayThenDrain(conn, time) { - await delay(time); - try { - await conn.drain(); - } catch (err) { - console.log("delayThenDrain err", err); - } -} - -export { type CoCalcNatsConnection }; - -export type SemVer = { major: number; minor: number; micro: number }; -export function parseSemVer(s = ""): SemVer { - const m = s.match(/(\d+).(\d+).(\d+)/); - if (m) { - return { - major: parseInt(m[1]), - minor: parseInt(m[2]), - micro: parseInt(m[3]), - }; - } - throw new Error(`'${s}' is not a semver value`); -} - -function uniq(v: string[]): string[] { - return Array.from(new Set(v)); -} diff --git a/src/packages/frontend/nats/permissions-cache.ts b/src/packages/frontend/nats/permissions-cache.ts deleted file mode 100644 index 21d9393fb7..0000000000 --- a/src/packages/frontend/nats/permissions-cache.ts +++ /dev/null @@ -1,124 +0,0 @@ -import { isValidUUID } from "@cocalc/util/misc"; -import { appBasePath } from "@cocalc/frontend/customize/app-base-path"; - -// This limit is because there is a limit on -// the length authentication protocol message, which is what we use to send the list of projects. -// This limit is the max_control_line (see https://docs.nats.io/running-a-nats-service/configuration) -// By default it is 4KB, which supports about 50 projects. We increase it in the server -// to 32KB and allow up to 250 projects, which is way more than enough (oldest projects are -// automatically removed as needed). -const MAX_PROJECT_PERMISSIONS = 250; -const NORMAL_PROJECT_PERMISSIONS = 25; -const CUTOFF = 1000 * 60 * 60 * 24 * 7; // 1 week ago - -// For dev/testing -- uncomment these to cause chaos as you click to open projects -// and close them and if you open several at once there's no permissions. then -// test that things don't crash, but just keep trying, properly. -// const MAX_PROJECT_PERMISSIONS = 4; -// const NORMAL_PROJECT_PERMISSIONS = 1; -// const CUTOFF = 1000 * 30; - -type NatsProjectCache = { [project_id: string]: number }; - -const localStorageKey = `${appBasePath}-nats-projects`; -console.log(localStorageKey); - -let cache: NatsProjectPermissionsCache | null = null; -export function getPermissionsCache() { - if (cache == null) { - cache = new NatsProjectPermissionsCache(); - } - return cache; -} - -export class NatsProjectPermissionsCache { - cache: NatsProjectCache; - - constructor() { - this.cache = this.loadCache(); - } - - add = (project_ids: string[]) => { - for (const project_id of project_ids) { - if (!isValidUUID(project_id)) { - throw Error(`invalid project_id -- ${project_id}`); - } - this.cache[project_id] = Date.now(); - } - this.enforceLimits(); - this.saveCache(); - }; - - get = () => { - return Object.keys(this.cache).sort(); - }; - - set = (project_ids: string[]) => { - this.cache = {}; - const now = Date.now(); - for (const project_id of project_ids) { - this.cache[project_id] = now; - } - this.enforceLimits(); - this.saveCache(); - }; - - private enforceLimits = () => { - const k = Object.keys(this.cache); - if (k.length <= NORMAL_PROJECT_PERMISSIONS) { - return; - } - let n = k.length; - const cutoff = new Date(Date.now() - CUTOFF).valueOf(); - for (const project_id in this.cache) { - if (this.cache[project_id] <= cutoff) { - delete this.cache[project_id]; - n -= 1; - if (n <= NORMAL_PROJECT_PERMISSIONS) { - return; - } - } - } - if (n > MAX_PROJECT_PERMISSIONS) { - const v = Object.values(this.cache); - v.sort(); - const c = v.slice(-MAX_PROJECT_PERMISSIONS)[0]; - if (c != null) { - for (const project_id in this.cache) { - if (this.cache[project_id] <= c) { - delete this.cache[project_id]; - n -= 1; - if (n <= MAX_PROJECT_PERMISSIONS) { - return; - } - } - } - } - } - }; - - private saveCache = () => { - localStorage[localStorageKey] = JSON.stringify(this.cache); - }; - - private loadCache = (): NatsProjectCache => { - const s = localStorage[localStorageKey]; - if (!s) { - return {}; - } - // don't trust s at all; - try { - const a = JSON.parse(s) as any; - const cache: NatsProjectCache = {}; - for (const project_id in a) { - if (isValidUUID(project_id)) { - cache[project_id] = parseInt(a[project_id]); - } - } - return cache; - } catch (err) { - console.log("warning: ", err); - return {}; - } - }; -} diff --git a/src/packages/frontend/package.json b/src/packages/frontend/package.json index d5c3c4b420..9af374b26d 100644 --- a/src/packages/frontend/package.json +++ b/src/packages/frontend/package.json @@ -41,10 +41,10 @@ "@cocalc/assets": "workspace:*", "@cocalc/cdn": "workspace:*", "@cocalc/comm": "workspace:*", + "@cocalc/conat": "workspace:*", "@cocalc/frontend": "workspace:*", "@cocalc/jupyter": "workspace:*", "@cocalc/local-storage-lru": "^2.4.3", - "@cocalc/nats": "workspace:*", "@cocalc/sync": "workspace:*", "@cocalc/util": "workspace:*", "@cocalc/widgets": "^1.2.0", @@ -58,10 +58,6 @@ "@jupyter-widgets/output": "^4.1.0", "@lumino/widgets": "^1.31.1", "@microlink/react-json-view": "^1.23.3", - "@nats-io/jetstream": "3.0.0", - "@nats-io/kv": "3.0.0", - "@nats-io/nats-core": "3.0.0", - "@nats-io/services": "3.0.0", "@orama/orama": "3.0.0-rc-3", "@react-hook/mouse-position": "^4.1.3", "@rinsuki/lz4-ts": "^1.0.1", @@ -132,7 +128,6 @@ "md5": "^2", "memoize-one": "^5.1.1", "mermaid": "^11.4.1", - "nats.ws": "^1.30.2", "node-forge": "^1.0.0", "octicons": "^3.5.0", "onecolor": "^3.1.0", diff --git a/src/packages/frontend/project/directory-listing.ts b/src/packages/frontend/project/directory-listing.ts index 08b08dc2f0..a3f0c00510 100644 --- a/src/packages/frontend/project/directory-listing.ts +++ b/src/packages/frontend/project/directory-listing.ts @@ -112,7 +112,7 @@ export async function get_directory_listing(opts: ListingOpts) { } } -import { Listings } from "@cocalc/frontend/nats/listings"; +import { Listings } from "@cocalc/frontend/conat/listings"; export async function get_directory_listing2(opts: ListingOpts): Promise { log("get_directory_listing2", opts); diff --git a/src/packages/frontend/project/explorer/file-listing/file-listing.tsx b/src/packages/frontend/project/explorer/file-listing/file-listing.tsx index 89f304a27e..4a75f3af42 100644 --- a/src/packages/frontend/project/explorer/file-listing/file-listing.tsx +++ b/src/packages/frontend/project/explorer/file-listing/file-listing.tsx @@ -23,7 +23,7 @@ import { useTypedRedux, } from "@cocalc/frontend/app-framework"; import useVirtuosoScrollHook from "@cocalc/frontend/components/virtuoso-scroll-hook"; -import { WATCH_THROTTLE_MS } from "@cocalc/frontend/nats/listings"; +import { WATCH_THROTTLE_MS } from "@cocalc/frontend/conat/listings"; import { ProjectActions } from "@cocalc/frontend/project_actions"; import { MainConfiguration } from "@cocalc/frontend/project_configuration"; import * as misc from "@cocalc/util/misc"; diff --git a/src/packages/frontend/project/explorer/path-navigator.tsx b/src/packages/frontend/project/explorer/path-navigator.tsx index 6ad7ba6165..133b4ea06b 100644 --- a/src/packages/frontend/project/explorer/path-navigator.tsx +++ b/src/packages/frontend/project/explorer/path-navigator.tsx @@ -35,7 +35,7 @@ export const PathNavigator: React.FC = React.memo( const actions = useActions({ project_id }); function make_path() { - const v: ReturnType[] = []; + const v: any[] = []; const current_path_depth = (current_path == "" ? 0 : current_path.split("/").length) - 1; diff --git a/src/packages/frontend/project/page/flyouts/files.tsx b/src/packages/frontend/project/page/flyouts/files.tsx index ec6fdb6615..97c1687d3f 100644 --- a/src/packages/frontend/project/page/flyouts/files.tsx +++ b/src/packages/frontend/project/page/flyouts/files.tsx @@ -35,7 +35,7 @@ import { DirectoryListingEntry, FileMap, } from "@cocalc/frontend/project/explorer/types"; -import { WATCH_THROTTLE_MS } from "@cocalc/frontend/nats/listings"; +import { WATCH_THROTTLE_MS } from "@cocalc/frontend/conat/listings"; import { mutate_data_to_compute_public_files } from "@cocalc/frontend/project_store"; import track from "@cocalc/frontend/user-tracking"; import { diff --git a/src/packages/frontend/project/websocket/api.ts b/src/packages/frontend/project/websocket/api.ts index 2597e81e56..c88d664007 100644 --- a/src/packages/frontend/project/websocket/api.ts +++ b/src/packages/frontend/project/websocket/api.ts @@ -31,14 +31,14 @@ import type { } from "@cocalc/comm/websocket/types"; import call from "@cocalc/sync/client/call"; import { webapp_client } from "@cocalc/frontend/webapp-client"; -import { type ProjectApi } from "@cocalc/nats/project-api"; +import { type ProjectApi } from "@cocalc/conat/project-api"; import type { ExecuteCodeOutput, ExecuteCodeOptions, } from "@cocalc/util/types/execute-code"; -import { formatterClient } from "@cocalc/nats/service/formatter"; -import { syncFsClientClient } from "@cocalc/nats/service/syncfs-client"; -// import { syncFsServerClient } from "@cocalc/nats/service/syncfs-server"; +import { formatterClient } from "@cocalc/conat/service/formatter"; +import { syncFsClientClient } from "@cocalc/conat/service/syncfs-client"; +// import { syncFsServerClient } from "@cocalc/conat/service/syncfs-server"; export class API { private conn; @@ -68,7 +68,7 @@ export class API { } const key = `${compute_server_id}-${timeout}`; if (this.apiCache[key] == null) { - this.apiCache[key] = webapp_client.nats_client.projectApi({ + this.apiCache[key] = webapp_client.conat_client.projectApi({ project_id: this.project_id, compute_server_id, timeout, @@ -82,7 +82,7 @@ export class API { }; private _call = async (mesg: Mesg, timeout: number): Promise => { - return await webapp_client.nats_client.projectWebsocketApi({ + return await webapp_client.conat_client.projectWebsocketApi({ project_id: this.project_id, mesg, timeout, @@ -90,25 +90,13 @@ export class API { }; private getChannel = async (channel_name: string) => { - const natsConn = await webapp_client.nats_client.primus(this.project_id); + const natsConn = await webapp_client.conat_client.primus(this.project_id); // TODO -- typing return natsConn.channel(channel_name) as unknown as Channel; }; call = async (mesg: Mesg, timeout: number) => { - try { - return await this._call(mesg, timeout); - } catch (err) { - if (err.code == "PERMISSIONS_VIOLATION") { - // request update of our credentials to include this project, then try again - await webapp_client.nats_client.addProjectPermissions([ - this.project_id, - ]); - return await this._call(mesg, timeout); - } else { - throw err; - } - } + return await this._call(mesg, timeout); }; getComputeServerId = (path: string) => { diff --git a/src/packages/frontend/project_actions.ts b/src/packages/frontend/project_actions.ts index 880260d7e1..5ee82a42c3 100644 --- a/src/packages/frontend/project_actions.ts +++ b/src/packages/frontend/project_actions.ts @@ -107,7 +107,8 @@ import { get_editor } from "./editors/react-wrapper"; import { computeServerManager, type ComputeServerManager, -} from "@cocalc/nats/compute/manager"; +} from "@cocalc/conat/compute/manager"; +import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; const { defaults, required } = misc; @@ -297,7 +298,6 @@ export class ProjectActions extends Actions { this.new_filename_generator = new NewFilenames("", false); this._activity_indicator_timers = {}; this.open_files = new OpenFiles(this); - this.initNatsPermissions(); this.initComputeServers(); } @@ -317,7 +317,7 @@ export class ProjectActions extends Actions { delete this.open_files; this.computeServerManager?.close(); delete this.computeServerManager; - webapp_client.nats_client.closeOpenFiles(this.project_id); + webapp_client.conat_client.closeOpenFiles(this.project_id); }; private save_session(): void { @@ -1938,75 +1938,80 @@ export class ProjectActions extends Actions { // retrieve project configuration (capabilities, etc.) from the back-end // also return it as a convenience - async init_configuration( - aspect: ConfigurationAspect = "main", - no_cache = false, - ): Promise { - this.setState({ configuration_loading: true }); + init_configuration = reuseInFlight( + async ( + aspect: ConfigurationAspect = "main", + no_cache = false, + ): Promise => { + console.log("init_configuration", this.project_id); + this.setState({ configuration_loading: true }); - const store = this.get_store(); - if (store == null) { - // console.warn("project_actions::init_configuration: no store"); - this.setState({ configuration_loading: false }); - return; - } + const store = this.get_store(); + if (store == null) { + // console.warn("project_actions::init_configuration: no store"); + this.setState({ configuration_loading: false }); + return; + } - const prev = store.get("configuration") as ProjectConfiguration; - if (!no_cache) { - // already done before? - if (prev != null) { - const conf = prev.get(aspect) as Configuration; - if (conf != null) { - this.setState({ configuration_loading: false }); - return conf; + const prev = store.get("configuration") as ProjectConfiguration; + if (!no_cache) { + // already done before? + if (prev != null) { + const conf = prev.get(aspect) as Configuration; + if (conf != null) { + this.setState({ configuration_loading: false }); + return conf; + } } } - } - // we do not know the configuration aspect. "next" will be the updated datastructure. - let next; + // we do not know the configuration aspect. "next" will be the updated datastructure. + let next; - await retry_until_success({ - f: async () => { - try { - next = await get_configuration( - webapp_client, - this.project_id, - aspect, - prev, - no_cache, - ); - } catch (e) { - // not implemented error happens, when the project is still the old one - // in that case, do as if everything is available - if (e.message.indexOf("not implemented") >= 0) { - return null; + await retry_until_success({ + f: async () => { + try { + next = await get_configuration( + webapp_client, + this.project_id, + aspect, + prev, + no_cache, + ); + } catch (e) { + // not implemented error happens, when the project is still the old one + // in that case, do as if everything is available + if (e.message.indexOf("not implemented") >= 0) { + return null; + } + // console.log( + // `WARNING -- project_actions::init_configuration err: ${e}`, + // ); + throw e; } - // console.log("project_actions::init_configuration err:", e); - throw e; - } - }, - start_delay: 1000, - max_delay: 5000, - desc: "project_actions::init_configuration", - }); + }, + start_delay: 2000, + max_delay: 5000, + desc: "project_actions::init_configuration", + }); - // there was a problem or configuration is not known - if (next == null) { - this.setState({ configuration_loading: false }); - return; - } + // there was a problem or configuration is not known + if (next == null) { + this.setState({ configuration_loading: false }); + return; + } - this.setState( - fromJS({ - configuration: next, - available_features: feature_is_available(next), - configuration_loading: false, - } as any), - ); + this.setState( + fromJS({ + configuration: next, + available_features: feature_is_available(next), + configuration_loading: false, + } as any), + ); - return next.get(aspect) as Configuration; - } + return next.get(aspect) as Configuration; + }, + ); // this is called once by the project initialization private async init_library() { @@ -2113,9 +2118,9 @@ export class ProjectActions extends Actions { misc.retry_until_success({ f: fetch, - start_delay: 1000, - max_delay: 10000, - max_time: 1000 * 60 * 3, // try for at most 3 minutes + start_delay: 15000, + max_delay: 30000, + max_time: 1000 * 60, // try for at most 3 minutes cb: () => { _init_library_index_ongoing[this.project_id] = false; }, @@ -3504,7 +3509,7 @@ export class ProjectActions extends Actions { this.setRecentlyDeleted(path, 0); (async () => { try { - const o = await webapp_client.nats_client.openFiles(this.project_id); + const o = await webapp_client.conat_client.openFiles(this.project_id); o.setNotDeleted(path); } catch (err) { console.log("WARNING: issue undeleting file", err); @@ -3535,16 +3540,6 @@ export class ProjectActions extends Actions { ); }; - private initNatsPermissions = async () => { - try { - await webapp_client.nats_client.addProjectPermissions([this.project_id]); - } catch (err) { - console.log( - `WARNING: issue getting permission to access project ${this.project_id} -- ${err}`, - ); - } - }; - private closeComputeServers = () => { computeServers.close(this.project_id); this.computeServerManager?.removeListener( @@ -3644,7 +3639,7 @@ export class ProjectActions extends Actions { }; projectApi = (opts?) => { - return webapp_client.nats_client.projectApi({ + return webapp_client.conat_client.projectApi({ ...opts, project_id: this.project_id, }); diff --git a/src/packages/frontend/project_configuration.ts b/src/packages/frontend/project_configuration.ts index 3ee6b14234..37aea5beb9 100644 --- a/src/packages/frontend/project_configuration.ts +++ b/src/packages/frontend/project_configuration.ts @@ -60,11 +60,7 @@ export function isMainConfiguration( ): config is MainConfiguration { const mconf = config; // don't test for disabled_ext, because that's added later - return ( - isMainCapabilities(mconf.capabilities) && - mconf.timestamp != null && - typeof mconf.timestamp == "string" - ); + return isMainCapabilities(mconf.capabilities) && !!mconf.timestamp; } // if prettier exists, this adds all syntaxes to format via prettier @@ -165,11 +161,16 @@ export async function get_configuration( aspect, no_cache, ); - if (config == null) return prev; + if (config == null) { + return prev; + } // console.log("project_actions::init_configuration", aspect, config); if (aspect == ("main" as ConfigurationAspect)) { - if (!isMainConfiguration(config)) return; + if (!isMainConfiguration(config)) { + console.log("reject", isMainConfiguration(config), config); + return; + } const caps = config.capabilities; // TEST x11/latex/sage disabilities // caps.x11 = false; @@ -184,7 +185,9 @@ export async function get_configuration( // jupyter.lab = false; // TEST no kernelspec → we can't read any kernels → entirely disable jupyter // jupyter.kernelspec = false; - if (!jupyter.kernelspec) caps.jupyter = false; + if (!jupyter.kernelspec) { + caps.jupyter = false; + } } // disable/hide certain file extensions if certain capabilities are missing diff --git a/src/packages/frontend/project_store.ts b/src/packages/frontend/project_store.ts index db7e4d711b..a29f2b7bc5 100644 --- a/src/packages/frontend/project_store.ts +++ b/src/packages/frontend/project_store.ts @@ -26,7 +26,7 @@ import { TypedMap, } from "@cocalc/frontend/app-framework"; import { ProjectLogMap } from "@cocalc/frontend/project/history/types"; -import { Listings, listings } from "@cocalc/frontend/nats/listings"; +import { Listings, listings } from "@cocalc/frontend/conat/listings"; import { FILE_ACTIONS, ProjectActions, diff --git a/src/packages/frontend/tsconfig.json b/src/packages/frontend/tsconfig.json index c91da6e99e..11043dc262 100644 --- a/src/packages/frontend/tsconfig.json +++ b/src/packages/frontend/tsconfig.json @@ -28,7 +28,7 @@ "references": [ { "path": "../comm" }, { "path": "../jupyter" }, - { "path": "../nats" }, + { "path": "../conat" }, { "path": "../sync" }, { "path": "../util" } ] diff --git a/src/packages/hub/hub.ts b/src/packages/hub/hub.ts index 5e799a4c85..7be1adc70c 100644 --- a/src/packages/hub/hub.ts +++ b/src/packages/hub/hub.ts @@ -42,12 +42,10 @@ import { getLogger } from "./logger"; import initDatabase, { database } from "./servers/database"; import initExpressApp from "./servers/express-app"; import { - loadNatsConfiguration, - initNatsDatabaseServer, - initNatsChangefeedServer, - initNatsTieredStorage, - initNatsServer, -} from "@cocalc/server/nats"; + loadConatConfiguration, + initConatChangefeedServer, + initConatMicroservices, +} from "@cocalc/server/conat"; import initHttpRedirect from "./servers/http-redirect"; import initPrimus from "./servers/primus"; import initVersionServer from "./servers/version"; @@ -185,24 +183,15 @@ async function startServer(): Promise { initIdleTimeout(projectControl); } - // all configuration MUST load nats configuration. This loads - // credentials to use nats from the database, and is needed - // by many things. - await loadNatsConfiguration(); + // This loads from the database credentials to use Conat. + await loadConatConfiguration(); - if (program.natsServer) { - await initNatsServer(); + if (program.conatMicroservices) { + await initConatMicroservices(); } - if (program.natsDatabaseServer) { - await initNatsDatabaseServer(); - } - if (program.natsChangefeedServer) { - await initNatsChangefeedServer(); - } - if (program.natsTieredStorage) { - // currently there must be exactly ONE of these, running on the same - // node as the nats-server. E.g., for development it's just part of the server. - await initNatsTieredStorage(); + + if (program.conatChangefeedServer) { + await initConatChangefeedServer(); } if (program.websocketServer) { @@ -243,6 +232,7 @@ async function startServer(): Promise { const { router, httpServer } = await initExpressApp({ isPersonal: program.personal, projectControl, + conatServer: !!program.conatServer, proxyServer: !!program.proxyServer, nextServer: !!program.nextServer, cert: program.httpsCert, @@ -255,8 +245,6 @@ async function startServer(): Promise { process.env["NODE_ENV"] == "development", }); - //initNatsServer(); - // The express app create via initExpressApp above **assumes** that init_passport is done // or complains a lot. This is obviously not really necessary, but we leave it for now. await callback2(init_passport, { @@ -428,16 +416,16 @@ async function main(): Promise { ) .option("--websocket-server", "run a websocket server in this process") .option( - "--nats-server", - "run a hub that servers standard nats microservices, e.g., LLM's, authentication, etc. There should be at least one of these.", + "--conat-server", + "run a hub that provides a single-core conat server (socketio) as part of its http server. This is needed for dev and small deployments of cocalc.", ) .option( - "--nats-database-server", - "run NATS microservice to provide access (including changefeeds) to the database", + "--conat-microservices", + "run a hub that serves standard conat microservices, e.g., LLM's, authentication, etc. There should be at least one of these.", ) .option( - "--nats-changefeed-server", - "run NATS microservice to provide postgres based changefeeds; there must be AT LEAST one of these.", + "--conat-changefeed-server", + "run Conat microservice to provide postgre/s based changefeeds; there must be AT LEAST one of these.", ) .option("--proxy-server", "run a proxy server in this process") .option( @@ -537,9 +525,9 @@ async function main(): Promise { } if (program.all) { program.websocketServer = - program.natsServer = - program.natsChangefeedServer = - program.natsTieredStorage = + program.conatMicroservices = + program.conatServer = + program.conatChangefeedServer = program.proxyServer = program.nextServer = program.mentions = diff --git a/src/packages/hub/package.json b/src/packages/hub/package.json index 206f507d76..02d6e21ff5 100644 --- a/src/packages/hub/package.json +++ b/src/packages/hub/package.json @@ -10,10 +10,10 @@ "@cocalc/assets": "workspace:*", "@cocalc/backend": "workspace:*", "@cocalc/cdn": "workspace:*", + "@cocalc/conat": "workspace:*", "@cocalc/database": "workspace:*", "@cocalc/frontend": "workspace:*", "@cocalc/hub": "workspace:*", - "@cocalc/nats": "workspace:*", "@cocalc/next": "workspace:*", "@cocalc/server": "workspace:*", "@cocalc/static": "workspace:*", @@ -39,7 +39,7 @@ "escape-html": "^1.0.3", "express": "^4.21.2", "formidable": "^3.5.4", - "http-proxy-3": "^1.20.0", + "http-proxy-3": "^1.20.5", "immutable": "^4.3.0", "jquery": "^3.6.0", "json-stable-stringify": "^1.0.1", @@ -49,7 +49,6 @@ "mime-types": "^2.1.35", "mkdirp": "^1.0.4", "ms": "2.1.2", - "nats": "^2.29.3", "next": "14.2.28", "parse-domain": "^5.0.0", "passport": "^0.6.0", diff --git a/src/packages/hub/proxy/handle-request.ts b/src/packages/hub/proxy/handle-request.ts index 97b39290b0..99c4b01369 100644 --- a/src/packages/hub/proxy/handle-request.ts +++ b/src/packages/hub/proxy/handle-request.ts @@ -10,7 +10,7 @@ import { stripBasePath } from "./util"; import { ProjectControlFunction } from "@cocalc/server/projects/control"; import siteUrl from "@cocalc/database/settings/site-url"; import { parseReq } from "./parse"; -import { readFile as readProjectFile } from "@cocalc/nats/files/read"; +import { readFile as readProjectFile } from "@cocalc/conat/files/read"; import { path_split } from "@cocalc/util/misc"; import { once } from "@cocalc/util/async-utils"; import hasAccess from "./check-for-access-to-project"; @@ -84,7 +84,7 @@ export default function init({ projectControl, isPersonal }: Options) { // TODO: parseReq is called again in getTarget so need to refactor... const { type, project_id } = parsed; if (type == "files") { - dbg("handling the request via nats"); + dbg("handling the request via conat file streaming"); if ( !(await hasAccess({ project_id, @@ -103,7 +103,7 @@ export default function init({ projectControl, isPersonal }: Options) { j = url.length; } const path = decodeURIComponent(url.slice(i + "files/".length, j)); - dbg("NATs: get", { project_id, path, compute_server_id, url }); + dbg("conat: get file", { project_id, path, compute_server_id, url }); const fileName = path_split(path).tail; const contentType = mime.lookup(fileName); if ( diff --git a/src/packages/hub/proxy/handle-upgrade.ts b/src/packages/hub/proxy/handle-upgrade.ts index 1cbfc351a7..ba9cbed02c 100644 --- a/src/packages/hub/proxy/handle-upgrade.ts +++ b/src/packages/hub/proxy/handle-upgrade.ts @@ -8,7 +8,6 @@ import stripRememberMeCookie from "./strip-remember-me-cookie"; import { getTarget } from "./target"; import { stripBasePath } from "./util"; import { versionCheckFails } from "./version"; -import { proxyNatsWebsocket } from "@cocalc/hub/servers/nats"; const logger = getLogger("proxy:handle-upgrade"); @@ -24,24 +23,21 @@ export default function init( const re = new RegExp(proxy_regexp); async function handleProxyUpgradeRequest(req, socket, head): Promise { + if (!req.url.match(re)) { + // something else (e.g., the socket.io server) is handling this websocket; + // we do NOT mess with anything in this case + return; + } + socket.on("error", (err) => { // server will crash sometimes without this: logger.debug("WARNING -- websocket socket error", err); }); + const dbg = (...args) => { logger.silly(req.url, ...args); }; dbg("got upgrade request from url=", req.url); - const url = stripBasePath(req.url); - - if (url == "/nats") { - proxyNatsWebsocket(req, socket, head); - return; - } - - if (!req.url.match(re)) { - throw Error(`url=${req.url} does not support upgrade`); - } // Check that minimum version requirement is satisfied (this is in the header). // This is to have a way to stop buggy clients from causing trouble. It's a purely @@ -61,6 +57,7 @@ export default function init( } dbg("calling getTarget"); + const url = stripBasePath(req.url); const { host, port, internal_url } = await getTarget({ url, isPersonal, @@ -76,7 +73,7 @@ export default function init( } if (cache.has(target)) { dbg("using cache"); - const proxy = cache.get(target); + const proxy = cache.get(target)!; proxy.ws(req, socket, head); return; } @@ -142,6 +139,10 @@ export default function init( // NOTE: I had to do something similar that is in packages/next/lib/init.js, // and is NOT a hack. That technique could probably be used to fix this properly. + // NOTE2: It's May 2025, and I basically don't use HMR anymore and just refresh + // my page, since dealing with this is so painful. Also rspack is superfast and + // refresh is fast, so HMR feels less necessary. Finally, frequently any dev work + // I do requires a page refresh anyways. let listeners: any[] = []; handler = async (req, socket, head) => { diff --git a/src/packages/hub/proxy/strip-remember-me-cookie.ts b/src/packages/hub/proxy/strip-remember-me-cookie.ts index 5e582b9c19..802f30a064 100644 --- a/src/packages/hub/proxy/strip-remember-me-cookie.ts +++ b/src/packages/hub/proxy/strip-remember-me-cookie.ts @@ -16,20 +16,17 @@ export default function stripRememberMeCookie(cookie): { cookie: string; remember_me: string | undefined; // the value of the cookie we just stripped out. api_key: string | undefined; - nats_jwt: string | undefined; } { if (cookie == null) { return { cookie, remember_me: undefined, api_key: undefined, - nats_jwt: undefined, }; } else { const v: string[] = []; let remember_me: string | undefined = undefined; let api_key: string | undefined = undefined; - let nats_jwt: string | undefined = undefined; for (const c of cookie.split(";")) { const z = c.split("="); if (z[0].trim() == REMEMBER_ME_COOKIE_NAME) { @@ -43,6 +40,6 @@ export default function stripRememberMeCookie(cookie): { v.push(c); } } - return { cookie: v.join(";"), remember_me, api_key, nats_jwt }; + return { cookie: v.join(";"), remember_me, api_key }; } } diff --git a/src/packages/hub/servers/app/upload.ts b/src/packages/hub/servers/app/upload.ts index 95df3847cc..9ac22efaa8 100644 --- a/src/packages/hub/servers/app/upload.ts +++ b/src/packages/hub/servers/app/upload.ts @@ -25,7 +25,7 @@ import getAccount from "@cocalc/server/auth/get-account"; import isCollaborator from "@cocalc/server/projects/is-collaborator"; import formidable from "formidable"; import { PassThrough } from "node:stream"; -import { writeFile as writeFileToProject } from "@cocalc/nats/files/write"; +import { writeFile as writeFileToProject } from "@cocalc/conat/files/write"; import { join } from "path"; import { callback } from "awaiting"; diff --git a/src/packages/hub/servers/express-app.ts b/src/packages/hub/servers/express-app.ts index d2776f339d..109b028378 100644 --- a/src/packages/hub/servers/express-app.ts +++ b/src/packages/hub/servers/express-app.ts @@ -32,7 +32,7 @@ import initStats from "./app/stats"; import { database } from "./database"; import initHttpServer from "./http"; import initRobots from "./robots"; -import { initNatsServer } from "./nats"; +import { initConatServer } from "@cocalc/server/conat/socketio"; // Used for longterm caching of files. This should be in units of seconds. const MAX_AGE = Math.round(ms("10 days") / 1000); @@ -43,6 +43,7 @@ interface Options { isPersonal: boolean; nextServer: boolean; proxyServer: boolean; + conatServer: boolean; cert?: string; key?: string; listenersHack: boolean; @@ -129,7 +130,6 @@ export default async function init(opts: Options): Promise<{ initBlobUpload(router); initUpload(router); initSetCookies(router); - initNatsServer(router); initCustomize(router, opts.isPersonal); initStats(router); initAppRedirect(router); @@ -157,6 +157,11 @@ export default async function init(opts: Options): Promise<{ }); } + if (opts.conatServer) { + winston.info(`initializing the Conat Server`); + initConatServer({ httpServer, path: join(basePath, "conat") }); + } + // IMPORTANT: // The nextjs server must be **LAST** (!), since it takes // all routes not otherwise handled above. diff --git a/src/packages/hub/servers/nats.ts b/src/packages/hub/servers/nats.ts deleted file mode 100644 index 2d551d2113..0000000000 --- a/src/packages/hub/servers/nats.ts +++ /dev/null @@ -1,66 +0,0 @@ -/* -NATS WebSocket proxy -- this primarily just directly proxied the nats -websocket server, so outside browsers can connect to it. - -This assumes there is a NATS server. This gets configured in dev mode -automatically and started via: - -$ cd ~/cocalc/src -$ pnpm nats-server - -*/ - -import { createProxyServer, type ProxyServer } from "http-proxy-3"; -import getLogger from "@cocalc/backend/logger"; -import { type Router } from "express"; -import { natsWebsocketServer } from "@cocalc/backend/data"; -import { - versionCheckFails, - init as initVersionCheck, -} from "@cocalc/hub/proxy/version"; -import { delay } from "awaiting"; - -const logger = getLogger("hub:nats"); - -let proxy: ProxyServer | null = null; -export async function proxyNatsWebsocket(req, socket, head) { - const target = natsWebsocketServer; - logger.debug(`nats proxy -- proxying a connection to ${target}`); - // todo -- allowing no cookie, since that's used by projects and compute servers! - // do NOT disable this until compute servers all set a cookie... which could be a long time. - if (versionCheckFails(req)) { - logger.debug("NATS client failed version check -- closing"); - socket.destroy(); - return; - } - if (proxy == null) { - // make the proxy server - proxy = createProxyServer({ - ws: true, - target, - }); - proxy.on("error", (err) => { - logger.debug(`WARNING: nats websocket proxy error -- ${err}`); - }); - } - - // connect the client's socket to nats via the proxy server: - proxy.ws(req, socket, head); - - while (socket.readyState !== socket.CLOSED) { - if (versionCheckFails(req)) { - logger.debug("NATS client failed version check -- closing"); - setTimeout(() => socket.end(), 10 * 1000); - return; - } - await delay(2 * 60 * 1000); - } -} - -// this is immediately upgraded to a websocket -export function initNatsServer(router: Router) { - initVersionCheck(); - router.get("/nats", async (_req, res) => { - res.send(""); - }); -} diff --git a/src/packages/hub/tsconfig.json b/src/packages/hub/tsconfig.json index 4c91795874..9d5854ad40 100644 --- a/src/packages/hub/tsconfig.json +++ b/src/packages/hub/tsconfig.json @@ -14,6 +14,6 @@ { "path": "../server" }, { "path": "../static" }, { "path": "../util" }, - { "path": "../nats" } + { "path": "../conat" } ] } diff --git a/src/packages/jupyter/kernel/nats-service.ts b/src/packages/jupyter/kernel/nats-service.ts index cac33913e7..b3936de0ee 100644 --- a/src/packages/jupyter/kernel/nats-service.ts +++ b/src/packages/jupyter/kernel/nats-service.ts @@ -9,11 +9,11 @@ your terminal running the open files service to interact with anything here from In particular, set global.x = ..., etc. */ -import { createNatsJupyterService } from "@cocalc/nats/service/jupyter"; +import { createNatsJupyterService } from "@cocalc/conat/service/jupyter"; import { get_existing_kernel as getKernel } from "@cocalc/jupyter/kernel"; import { bufferToBase64 } from "@cocalc/util/base64"; -export async function initNatsService({ +export async function initConatService({ path, project_id, }: { diff --git a/src/packages/jupyter/package.json b/src/packages/jupyter/package.json index 582336ef2e..d26b150ff5 100644 --- a/src/packages/jupyter/package.json +++ b/src/packages/jupyter/package.json @@ -30,7 +30,7 @@ "dependencies": { "@cocalc/backend": "workspace:*", "@cocalc/jupyter": "workspace:*", - "@cocalc/nats": "workspace:*", + "@cocalc/conat": "workspace:*", "@cocalc/sync": "workspace:*", "@cocalc/sync-client": "workspace:*", "@cocalc/util": "workspace:*", diff --git a/src/packages/jupyter/redux/actions.ts b/src/packages/jupyter/redux/actions.ts index eb6bf2d355..3c8b87a8c7 100644 --- a/src/packages/jupyter/redux/actions.ts +++ b/src/packages/jupyter/redux/actions.ts @@ -47,8 +47,8 @@ import { import { SyncDB } from "@cocalc/sync/editor/db/sync"; import type { Client } from "@cocalc/sync/client/types"; import latexEnvs from "@cocalc/util/latex-envs"; -import { jupyterApiClient } from "@cocalc/nats/service/jupyter"; -import { type AKV, akv } from "@cocalc/nats/sync/akv"; +import { jupyterApiClient } from "@cocalc/conat/service/jupyter"; +import { type AKV, akv } from "@cocalc/conat/sync/akv"; import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; const { close, required, defaults } = misc; @@ -812,7 +812,7 @@ export abstract class JupyterActions extends Actions { // has processed it! const version = this.syncdb.newestVersion(); try { - await this.api({ timeout: 30000 }).save_ipynb_file({ version }); + await this.api({ timeout: 5 * 60 * 1000 }).save_ipynb_file({ version }); } catch (err) { console.log(err); throw Error( diff --git a/src/packages/jupyter/redux/project-actions.ts b/src/packages/jupyter/redux/project-actions.ts index 2ec9faa850..7a9b337c27 100644 --- a/src/packages/jupyter/redux/project-actions.ts +++ b/src/packages/jupyter/redux/project-actions.ts @@ -30,9 +30,9 @@ import nbconvertChange from "./handle-nbconvert-change"; import type { ClientFs } from "@cocalc/sync/client/types"; import { kernel as createJupyterKernel } from "@cocalc/jupyter/kernel"; import { removeJupyterRedux } from "@cocalc/jupyter/kernel"; -import { initNatsService } from "@cocalc/jupyter/kernel/nats-service"; -import { type DKV, dkv } from "@cocalc/nats/sync/dkv"; -import { computeServerManager } from "@cocalc/nats/compute/manager"; +import { initConatService } from "@cocalc/jupyter/kernel/nats-service"; +import { type DKV, dkv } from "@cocalc/conat/sync/dkv"; +import { computeServerManager } from "@cocalc/conat/compute/manager"; import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; // see https://github.com/sagemathinc/cocalc/issues/8060 @@ -223,7 +223,7 @@ export class JupyterActions extends JupyterActions0 { this.natsService.close(); this.natsService = null; } - const service = (this.natsService = await initNatsService({ + const service = (this.natsService = await initConatService({ project_id: this.project_id, path: this.path, })); diff --git a/src/packages/jupyter/tsconfig.json b/src/packages/jupyter/tsconfig.json index 56e63b5680..831b6fef23 100644 --- a/src/packages/jupyter/tsconfig.json +++ b/src/packages/jupyter/tsconfig.json @@ -10,6 +10,6 @@ { "path": "../sync" }, { "path": "../sync-client" }, { "path": "../util" }, - { "path": "../nats" } + { "path": "../conat" } ] } diff --git a/src/packages/nats/browser-api/index.ts b/src/packages/nats/browser-api/index.ts deleted file mode 100644 index 16c9326856..0000000000 --- a/src/packages/nats/browser-api/index.ts +++ /dev/null @@ -1,48 +0,0 @@ -/* -Request/response API that runs in each browser client. - -DEVELOPMENT: - -Refresh your browser and do this in the console to connect to your own browser: - - > a = cc.client.nats_client.browserApi({sessionId:cc.client.nats_client.sessionId}) - -Then try everything. - -You can also open a second browser tab (with the same account), view the sessionId - - > cc.client.nats_client.sessionId - -then connect from one to the other using that sessionId. This way you can coordinate -between different browsers. -*/ - -import { type System, system } from "./system"; -import { handleErrorMessage } from "@cocalc/nats/util"; - -export interface BrowserApi { - system: System; -} - -const BrowserApiStructure = { - system, -} as const; - -export function initBrowserApi(callBrowserApi): BrowserApi { - const browserApi: any = {}; - for (const group in BrowserApiStructure) { - if (browserApi[group] == null) { - browserApi[group] = {}; - } - for (const functionName in BrowserApiStructure[group]) { - browserApi[group][functionName] = async (...args) => - handleErrorMessage( - await callBrowserApi({ - name: `${group}.${functionName}`, - args, - }), - ); - } - } - return browserApi as BrowserApi; -} diff --git a/src/packages/nats/browser-api/system.ts b/src/packages/nats/browser-api/system.ts deleted file mode 100644 index 155a87e344..0000000000 --- a/src/packages/nats/browser-api/system.ts +++ /dev/null @@ -1,9 +0,0 @@ -export const system = { - version: true, - ping: true, -}; - -export interface System { - version: () => Promise; - ping: () => Promise<{ now: number }>; -} diff --git a/src/packages/nats/files/util.ts b/src/packages/nats/files/util.ts deleted file mode 100644 index 8e46415134..0000000000 --- a/src/packages/nats/files/util.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { delay } from "awaiting"; -import { waitUntilConnected } from "@cocalc/nats/util"; -import { getLogger } from "@cocalc/nats/client"; - -const logger = getLogger("files:util"); - -export async function runLoop({ subs, listen, opts, subject, nc }) { - while (true) { - const sub = nc.subscribe(subject); - subs[subject] = sub; - try { - await listen({ ...opts, sub }); - } catch (err) { - logger.debug(`runLoop: error - ${err}`); - } - if (subs[subject] == null) return; - await delay(3000 + Math.random()); - await waitUntilConnected(); - if (subs[subject] == null) return; - logger.debug(`runLoop: will restart`); - } -} diff --git a/src/packages/nats/service/index.ts b/src/packages/nats/service/index.ts deleted file mode 100644 index 8007ade3ab..0000000000 --- a/src/packages/nats/service/index.ts +++ /dev/null @@ -1,8 +0,0 @@ -export type { - ServiceDescription, - CallNatsServiceFunction, - ServiceCall, - CreateNatsServiceFunction, - NatsService, -} from "./service"; -export { callNatsService, createNatsService } from "./service"; diff --git a/src/packages/nats/service/many.ts b/src/packages/nats/service/many.ts deleted file mode 100644 index 2ebc4b5e16..0000000000 --- a/src/packages/nats/service/many.ts +++ /dev/null @@ -1,77 +0,0 @@ -/* -Support requestMany and respond to requestMany request transparently. - -If the request is sent via the function requestMany below, then: - (1) it contains the HEADER ("requestMany") with value "Empty", - (2) it combines all the responses together (until receiving Empty) and returns that - -On the other side, respondMany looks for HEADER and if it is set, -breaks up the response data into maximum size chunks based on the -server configured max payload size. - -By using this pair of functions the client can control whether or not -request many is used for a particular request. In particular, if the -header isn't set to request many, then no extra messages get sent back. -*/ - -import { Empty, headers } from "@nats-io/nats-core"; -import { getMaxPayload } from "@cocalc/nats/util"; - -export async function respondMany({ mesg, data }) { - if (!hasRequestManyHeader(mesg)) { - // console.log("respondMany: using NORMAL response"); - // header not set, so just send a normal response. - await mesg.respond(data); - return; - } - // console.log("respondMany: using CHUNKED response"); - // header set, so send response as multiple messages broken into - // chunks followed by an Empty message to terminate. - const maxPayload = await getMaxPayload(); - for (let i = 0; i < data.length; i += maxPayload) { - const slice = data.slice(i, i + maxPayload); - await mesg.respond(slice); - } - await mesg.respond(Empty); -} - -export async function requestMany({ - nc, - subject, - data, - maxWait, -}: { - nc; - subject: string; - data; - maxWait?: number; -}): Promise<{ data: Buffer }> { - // set a special header so that server knows to use our respond many protocol. - const h = headers(); - h.append(HEADER, TERMINATE); - const v: any[] = []; - for await (const resp of await nc.requestMany(subject, data, { - maxWait, - headers: h, - })) { - if (resp.data.length == 0) { - break; - } - v.push(resp); - } - const respData = Buffer.concat(v.map((x) => x.data)); - return { data: respData }; -} - -export const HEADER = "requestMany"; -// terminate on empty message -- only protocol we support right now -export const TERMINATE = "Empty"; - -function hasRequestManyHeader(mesg) { - for (const [key, value] of mesg.headers ?? []) { - if (key == HEADER && value == TERMINATE) { - return true; - } - } - return false; -} diff --git a/src/packages/nats/service/service.ts b/src/packages/nats/service/service.ts deleted file mode 100644 index 5b279a9204..0000000000 --- a/src/packages/nats/service/service.ts +++ /dev/null @@ -1,481 +0,0 @@ -/* -Simple to use UI to connect anything in cocalc via request/reply services. - -- callNatsService -- createNatsService - -The input is basically where the service is (account, project, public), -and either what message to send or how to handle messages. -Also if the handler throws an error, the caller will throw -an error too. -*/ - -import { - Svcm, - type ServiceInfo, - type ServiceStats, - type ServiceIdentity, -} from "@nats-io/services"; -import { type Location } from "@cocalc/nats/types"; -import { trunc_middle } from "@cocalc/util/misc"; -import { getEnv, getLogger } from "@cocalc/nats/client"; -import { randomId } from "@cocalc/nats/names"; -import { delay } from "awaiting"; -import { EventEmitter } from "events"; -import { requestMany, respondMany } from "./many"; -import { encodeBase64, waitUntilConnected } from "@cocalc/nats/util"; - -const DEFAULT_TIMEOUT = 10 * 1000; -const MONITOR_INTERVAL = 45 * 1000; - -// switching this is awkward since it would have to be changed in projects -// and frontends or things would hang. I'm making it toggleable just for -// dev purposes so we can benchmark. -// Using the service framework gives us no real gain and cost a massive amount -// in terms of subscriptions -- basically there's a whole bunch for every file, etc. -// **In short: Do NOT enable this by default.** -const ENABLE_SERVICE_FRAMEWORK = false; - -const logger = getLogger("nats:service"); - -export interface ServiceDescription extends Location { - service: string; - - description?: string; - - // if true and multiple servers are setup in same "location", then they ALL get to respond (sender gets first response). - all?: boolean; - - // DEFAULT: ENABLE_SERVICE_FRAMEWORK - enableServiceFramework?: boolean; - - subject?: string; -} - -export interface ServiceCall extends ServiceDescription { - mesg: any; - timeout?: number; - - // if true, call returns the raw response message, with no decoding or error wrapping. - // (do not combine with many:true) - raw?: boolean; - - // if true, uses requestMany so **responses can be arbitrarily large**. - // This MUST be set for both client and server! Don't use this unless - // you need it, since every response involves 2 messages instead of 1 - // (the extra termination message). A good example that uses this is - // the jupyter api, since large output gets returned when you click on - // "Fetch more output". - many?: boolean; - - // if it fails with NatsError, we wait for service to be ready and try again, - // unless this is set -- e.g., when waiting for the service in the first - // place we set this to avoid an infinite loop. - noRetry?: boolean; -} - -export async function callNatsService(opts: ServiceCall): Promise { - // console.log("callNatsService", opts); - const env = await getEnv(); - const { nc, jc } = env; - const subject = serviceSubject(opts); - let resp; - const timeout = opts.timeout ?? DEFAULT_TIMEOUT; - const data = jc.encode(opts.mesg); - - const doRequest = async () => { - await waitUntilConnected(); - if (opts.many) { - resp = await requestMany({ nc, subject, data, maxWait: timeout }); - } else { - resp = await nc.request(subject, data, { - timeout, - }); - } - if (opts.raw) { - return resp; - } - const result = jc.decode(resp.data); - if (result?.error) { - throw Error(result.error); - } - return result; - }; - - // we just try to call the service first - try { - return await doRequest(); - } catch (err) { - //console.log(`request to '${subject}' failed -- ${err}`); - // it failed. - if (err.name == "NatsError" && !opts.noRetry) { - // it's a nats problem - const p = opts.path ? `${trunc_middle(opts.path, 64)}:` : ""; - if (err.code == "503") { - // it's actually just not ready, so - // wait for the service to be ready, then try again - await waitForNatsService({ options: opts, maxWait: timeout }); - try { - return await doRequest(); - } catch (err) { - if (err.code == "503") { - err.message = `Not Available: service ${p}${opts.service} is not available`; - } - throw err; - } - } else if (err.code == "TIMEOUT") { - throw Error( - `Timeout: service ${p}${opts.service} did not respond for ${Math.round(timeout / 1000)} seconds`, - ); - } - } - throw err; - } -} - -export type CallNatsServiceFunction = typeof callNatsService; - -export interface Options extends ServiceDescription { - description?: string; - version?: string; - handler: (mesg) => Promise; - // see corresponding call option above. - many?: boolean; -} - -export function createNatsService(options: Options) { - return new NatsService(options); -} - -export type CreateNatsServiceFunction = typeof createNatsService; - -export function serviceSubject({ - service, - - account_id, - browser_id, - - project_id, - compute_server_id, - - path, - - subject, -}: ServiceDescription): string { - if (subject) { - return subject; - } - let segments; - path = path ? encodeBase64(path) : "_"; - if (!project_id && !account_id) { - segments = ["public", service]; - } else if (account_id) { - segments = [ - "services", - `account-${account_id}`, - browser_id ?? "_", - project_id ?? "_", - path ?? "_", - service, - ]; - } else if (project_id) { - segments = [ - "services", - `project-${project_id}`, - compute_server_id ?? "_", - service, - path, - ]; - } - return segments.join("."); -} - -export function serviceName({ - service, - - account_id, - browser_id, - - project_id, - compute_server_id, -}: ServiceDescription): string { - let segments; - if (!project_id && !account_id) { - segments = [service]; - } else if (account_id) { - segments = [`account-${account_id}`, browser_id ?? "-", service]; - } else if (project_id) { - segments = [`project-${project_id}`, compute_server_id ?? "-", service]; - } - return segments.join("-"); -} - -export function serviceDescription({ - description, - path, -}: ServiceDescription): string { - return [description, path ? `\nPath: ${path}` : ""].join(""); -} - -export class NatsService extends EventEmitter { - private options: Options; - private subject: string; - private api?; - private name: string; - - constructor(options: Options) { - super(); - this.options = options; - this.name = serviceName(this.options); - this.subject = serviceSubject(options); - this.startMonitor(); - this.startMainLoop(); - } - - private log = (...args) => { - logger.debug(`service:'${this.name}' -- `, ...args); - }; - - private startMainLoop = async () => { - while (this.subject) { - await this.runService(); - await delay(5000); - } - }; - - // The service monitor checks every MONITOR_INTERVAL when - // connected that the service is definitely working and - // responding to pings. If not, it calls restartService. - private startMonitor = async () => { - while (this.subject) { - this.log(`serviceMonitor: waiting ${MONITOR_INTERVAL}ms...`); - await delay(MONITOR_INTERVAL); - if (this.subject == null) return; - await waitUntilConnected(); - if (this.subject == null) return; - try { - this.log(`serviceMonitor: ping`); - await callNatsService({ ...this.options, mesg: "ping", timeout: 7500 }); - if (this.subject == null) return; - this.log("serviceMonitor: ping SUCCESS"); - } catch (err) { - if (this.subject == null) return; - this.log(`serviceMonitor: ping FAILED -- ${err}`); - this.restartService(); - } - } - }; - - private restartService = () => { - if (this.api) { - this.api.stop(); - delete this.api; - } - this.runService(); - }; - - // create and run the service until something goes wrong, when this - // willl return. It does not throw an error. - private runService = async () => { - try { - await waitUntilConnected(); - this.emit("starting"); - - this.log("starting service"); - const env = await getEnv(); - - // close any subscriptions by this client to the subject, which might be left from previous runs of this service. - // @ts-ignore - for (const sub of env.nc.protocol.subscriptions.subs) { - if (sub[1].subject == this.subject) { - sub[1].close(); - } - } - - const queue = this.options.all ? randomId() : "0"; - if (this.options.enableServiceFramework ?? ENABLE_SERVICE_FRAMEWORK) { - const svcm = new Svcm(env.nc); - const service = await svcm.add({ - name: this.name, - version: this.options.version ?? "0.0.1", - description: serviceDescription(this.options), - queue, - }); - if (!this.subject) { - return; - } - this.api = service.addEndpoint("api", { subject: this.subject }); - } else { - this.api = env.nc.subscribe(this.subject, { queue }); - } - this.emit("running"); - await this.listen(); - } catch (err) { - this.log(`service stopping due to ${err}`); - } - }; - - private listen = async () => { - const env = await getEnv(); - const jc = env.jc; - for await (const mesg of this.api) { - const request = jc.decode(mesg.data) ?? ({} as any); - - // console.logger.debug("handle nats service call", request); - let resp; - if (request == "ping") { - resp = "pong"; - } else { - try { - resp = await this.options.handler(request); - } catch (err) { - resp = { error: `${err}` }; - } - } - try { - const data = jc.encode(resp); - if (this.options.many) { - await respondMany({ mesg, data }); - } else { - await mesg.respond(data); - } - } catch (err) { - // If, e.g., resp is too big, then the error would be - // "NatsError: MAX_PAYLOAD_EXCEEDED" - // and it is of course very important to make the caller aware that - // there was an error, as opposed to just silently leaving - // them hanging forever. - const data = jc.encode({ error: `${err}` }); - if (this.options.many) { - await respondMany({ mesg, data }); - } else { - await mesg.respond(data); - } - } - } - }; - - close = () => { - if (!this.subject) { - return; - } - this.emit("close"); - this.removeAllListeners(); - this.api?.stop(); - delete this.api; - // @ts-ignore - delete this.subject; - // @ts-ignore - delete this.options; - }; -} - -interface ServiceClientOpts { - options: ServiceDescription; - maxWait?: number; - id?: string; -} - -export async function pingNatsService({ - options, - maxWait = 500, - id, -}: ServiceClientOpts): Promise<(ServiceIdentity | string)[]> { - if (!(options.enableServiceFramework ?? ENABLE_SERVICE_FRAMEWORK)) { - // console.log( - // `pingNatsService: ${options.service}.${options.description ?? ""} -- using fallback ping`, - // ); - const pong = await callNatsService({ - ...options, - mesg: "ping", - timeout: Math.max(3000, maxWait), - // set no-retry to avoid infinite loop - noRetry: true, - }); - // console.log( - // `pingNatsService: ${options.service}.${options.description ?? ""} -- success`, - // ); - return [pong]; - } - const env = await getEnv(); - const svc = new Svcm(env.nc); - const m = svc.client({ maxWait, strategy: "stall" }); - const v: ServiceIdentity[] = []; - for await (const ping of await m.ping(serviceName(options), id)) { - v.push(ping); - } - return v; -} - -export async function natsServiceInfo({ - options, - maxWait = 500, - id, -}: ServiceClientOpts): Promise { - if (!(options.enableServiceFramework ?? ENABLE_SERVICE_FRAMEWORK)) { - throw Error(`service framework not enabled for ${options.service}`); - } - const env = await getEnv(); - const svc = new Svcm(env.nc); - const m = svc.client({ maxWait, strategy: "stall" }); - const v: ServiceInfo[] = []; - for await (const info of await m.info(serviceName(options), id)) { - v.push(info); - } - return v; -} - -export async function natsServiceStats({ - options, - maxWait = 500, - id, -}: ServiceClientOpts): Promise { - if (!(options.enableServiceFramework ?? ENABLE_SERVICE_FRAMEWORK)) { - throw Error(`service framework not enabled for ${options.service}`); - } - const env = await getEnv(); - const svc = new Svcm(env.nc); - const m = svc.client({ maxWait, strategy: "stall" }); - const v: ServiceStats[] = []; - for await (const stats of await m.stats(serviceName(options), id)) { - v.push(stats); - } - return v; -} - -export async function waitForNatsService({ - options, - maxWait = 60000, -}: { - options: ServiceDescription; - maxWait?: number; -}) { - let d = 1000; - let m = 100; - const start = Date.now(); - const getPing = async (m: number) => { - try { - await waitUntilConnected(); - return await pingNatsService({ options, maxWait: m }); - } catch { - // ping can fail, e.g, if not connected to nats at all or the ping - // service isn't up yet. - return [] as ServiceIdentity[]; - } - }; - let ping = await getPing(m); - while (ping.length == 0) { - d = Math.min(10000, d * 1.3); - m = Math.min(1500, m * 1.3); - if (Date.now() - start + d >= maxWait) { - logger.debug( - `timeout waiting for ${serviceName(options)} to start...`, - d, - ); - throw Error("timeout"); - } - await delay(d); - ping = await getPing(m); - } - return ping; -} diff --git a/src/packages/nats/sync/akv.ts b/src/packages/nats/sync/akv.ts deleted file mode 100644 index a5276b9b7a..0000000000 --- a/src/packages/nats/sync/akv.ts +++ /dev/null @@ -1,173 +0,0 @@ -/* -Asynchronous Memory Efficient Access to Key:Value Store - -This provides the same abstraction as dkv, except it doesn't download any -data to the client until you actually call get. The calls to get and -set are thus async. - -Because AsyncKV has no global knowledge of this key:value store, the inventory -is not updated and limits are not enforced. Of course chunking (storing large -values properly) is supported. - -There is no need to close this because it is stateless. - -DEVELOPMENT: - -~/cocalc/src/packages/backend$ node -> t = require("@cocalc/backend/nats/sync").akv({name:'test'}) - -*/ - -import { GeneralKV } from "./general-kv"; -import { getEnv } from "@cocalc/nats/client"; -import { type DKVOptions, getPrefix } from "./dkv"; -import { once } from "@cocalc/util/async-utils"; -import { jsName } from "@cocalc/nats/names"; -import { encodeBase64 } from "@cocalc/nats/util"; - -export class AKV { - private options: DKVOptions; - private prefix: string; - private noChunks?: boolean; - - constructor({ noChunks, ...options }: DKVOptions & { noChunks?: boolean }) { - this.options = options; - this.noChunks = noChunks; - const { name, valueType = "json" } = options; - this.prefix = getPrefix({ - name, - valueType, - options, - }); - } - - private encodeKey = (key) => { - if (typeof key != "string") { - key = `${key}`; - } - return key ? `${this.prefix}.${encodeBase64(key)}` : this.prefix; - }; - - private getGeneralKVForOneKey = async ( - key: string, - { noWatch = true }: { noWatch?: boolean } = {}, - ): Promise> => { - const { valueType = "json", limits, account_id, project_id } = this.options; - const filter = this.encodeKey(key); - const kv = new GeneralKV({ - name: jsName({ account_id, project_id }), - env: await getEnv(), - // IMPORTANT: need both filter and .> to get CHUNKS in case of chunked data! - filter: [filter, filter + ".>"], - limits, - valueType, - noWatch, - noGet: noWatch && this.noChunks, - }); - await kv.init(); - return kv; - }; - - // Just get one value asynchronously, rather than the entire dkv. - // If the timeout option is given and the value of key is not set, - // will wait until that many ms for the key to get - get = async (key: string, { timeout }: { timeout?: number } = {}) => { - const start = Date.now(); - let noWatch = true; - if (timeout) { - // there's a timeout so in this unusual nondefault case we will watch: - noWatch = false; - } - const kv = await this.getGeneralKVForOneKey(key, { noWatch }); - const filter = this.encodeKey(key); - if (noWatch && this.noChunks) { - const x = await kv.getDirect(filter); - await kv.close(); - return x; - } - try { - let value = kv.get(filter); - if (!timeout) { - return value; - } - while (value === undefined && Date.now() - start <= timeout) { - try { - await once(kv, "change", timeout - (Date.now() - start)); - } catch { - // failed due to timeout -- result is undefined since key isn't set - return undefined; - } - value = kv.get(filter); - } - return value; - } finally { - await kv.close(); - } - }; - - headers = async (key: string) => { - const kv = await this.getGeneralKVForOneKey(key); - const filter = this.encodeKey(key); - if (this.noChunks) { - const x = await kv.getDirect(filter); - if (x === undefined) { - return; - } - } - const h = kv.headers(filter); - await kv.close(); - return h; - }; - - time = async (key: string) => { - const kv = await this.getGeneralKVForOneKey(key); - const filter = this.encodeKey(key); - if (this.noChunks) { - const x = await kv.getDirect(filter); - if (x === undefined) { - return; - } - } - const t = kv.time(filter); - await kv.close(); - return t; - }; - - delete = async (key: string) => { - const kv = await this.getGeneralKVForOneKey(key); - const filter = this.encodeKey(key); - await kv.delete(filter); - await kv.close(); - }; - - // NOTE: set does NOT update the inventory or apply limits, since this - // has no global knowledge of the kv. - set = async ( - key: string, - value: T, - options?: { headers?: { [key: string]: string }; previousSeq?: number }, - ) => { - const kv = await this.getGeneralKVForOneKey(key); - const filter = this.encodeKey(key); - await kv.set(filter, value, { - ...options, - headers: { ...options?.headers }, - }); - }; - - seq = async (key: string) => { - const kv = await this.getGeneralKVForOneKey(key); - const filter = this.encodeKey(key); - if (this.noChunks) { - const x = await kv.getDirect(filter); - if (x === undefined) { - return; - } - } - return kv.seq(filter); - }; -} - -export function akv(opts: DKVOptions) { - return new AKV(opts); -} diff --git a/src/packages/nats/sync/dkv.ts b/src/packages/nats/sync/dkv.ts deleted file mode 100644 index bfc12ba104..0000000000 --- a/src/packages/nats/sync/dkv.ts +++ /dev/null @@ -1,504 +0,0 @@ -/* -Eventually Consistent Distributed Key Value Store - -Supports: - - - automatic value chunking to store arbitrarily large values - - type checking the values - - arbitrary name - - arbitrary keys - - limits - -This downloads ALL data for the key:value store to the client, -then keeps it is in sync with NATS. - -For an alternative space efficient async interface to the EXACT SAME DATA, -see akv.ts. - -IMPORTANT: When you set data in a dkv, it is NOT guaranteed to be saved remotely -forever immediately, obviously. The dkv attempts to save to NATS in the background. -**There are reasons whey saving might fail or data you have set could disappear!** -E.g., if you set the max_msg_size limit, and try to set a value that is too -large, then it is removed during save and a 'reject' event is fired. -The other limits will silently delete data for other reasons as well (e.g., too old, -too many messages). - -EVENTS: - -- 'change', {key:string, value?:T, prev:T} -- there is a change. - if value===undefined, that means that key is deleted and the value used to be prev. - -- 'reject', {key, value} -- data you set is rejected when trying to save, e.g., if too large - -- 'stable' -- there are no unsaved changes and all saved changes have been - echoed back from server. - -- 'closed' -- the dkv is now closed. Note that close's are reference counted, e.g., you can - grab the same dkv in multiple places in your code, close it when do with each, and it - is freed when the number of closes equals the number of objects you created. - -Merge conflicts are handled by your custom merge function, and no event is fired. - -DEVELOPMENT: - -From node.js - - ~/cocalc/src/packages/backend$ n - Welcome to Node.js v18.17.1. - Type ".help" for more information. - > t = await require("@cocalc/backend/nats/sync").dkv({name:'test'}) - -From the browser: - -If you want a persistent distributed key:value store in the browser, -which shares state to all browser clients for a given **account_id**, -do this in the dev console: - - > a = await cc.client.nats_client.dkv({name:'test', account_id:cc.client.account_id}) - -Then do the same thing in another dev console in another browser window: - - > a = await cc.client.nats_client.dkv({name:'test', account_id:cc.client.account_id}) - -Now do this in one: - - > a.x = 10 - -and - - > a.x - 10 - -in the other. Yes, it's that easy to have a persistent distributed eventually consistent -synchronized key-value store! - -For library code, replace cc.client by webapp_client, which you get via: - - import { webapp_client } from "@cocalc/frontend/webapp-client" - -If instead you need to share state with a project (or compute server), use - -> b = await cc.client.nats_client.dkv({name:'test', project_id:'...'}) - - -UNIT TESTS: See backend/nats/test/ - -They aren't right here, because this module doesn't have info to connect to NATS. - -*/ - -import { EventEmitter } from "events"; -import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import { GeneralDKV, TOMBSTONE, type MergeFunction } from "./general-dkv"; -import { jsName } from "@cocalc/nats/names"; -import { userKvKey, type KVOptions } from "./kv"; -import { localLocationName } from "@cocalc/nats/names"; -import refCache from "@cocalc/util/refcache"; -import { getEnv } from "@cocalc/nats/client"; -import { - inventory, - INVENTORY_NAME, - THROTTLE_MS, - type Inventory, -} from "./inventory"; -import { asyncThrottle } from "@cocalc/util/async-utils"; -import { delay } from "awaiting"; -import { decodeBase64, encodeBase64 } from "@cocalc/nats/util"; -import { getLogger } from "@cocalc/nats/client"; -import { waitUntilConnected } from "@cocalc/nats/util"; - -const logger = getLogger("dkv"); - -export interface DKVOptions extends KVOptions { - merge?: MergeFunction; - noAutosave?: boolean; - noInventory?: boolean; -} - -export class DKV extends EventEmitter { - generalDKV?: GeneralDKV; - name: string; - private prefix: string; - private opts; - private keys: { [encodedKey: string]: string } = {}; - - constructor(options: DKVOptions) { - super(); - const { - name, - account_id, - project_id, - merge, - env, - noAutosave, - limits, - noInventory, - desc, - valueType = "json", - } = options; - if (env == null) { - throw Error("env must not be null"); - } - if ( - noInventory || - (process.env.COCALC_TEST_MODE && noInventory == null) || - name == INVENTORY_NAME - ) { - // @ts-ignore - this.updateInventory = () => {}; - } - // name of the jetstream key:value store. - this.name = name; - - this.prefix = getPrefix({ name, valueType, options }); - - this.opts = { - location: { account_id, project_id }, - name: jsName({ account_id, project_id }), - desc, - noInventory, - filter: [this.prefix, `${this.prefix}.>`], - env, - merge, - noAutosave, - limits, - valueType, - }; - - this.init(); - return new Proxy(this, { - deleteProperty(target, prop) { - if (typeof prop == "string") { - target.delete(prop); - } - return true; - }, - set(target, prop, value) { - prop = String(prop); - if (prop == "_eventsCount" || prop == "_events" || prop == "close") { - target[prop] = value; - return true; - } - if (target[prop] != null) { - throw Error(`method name '${prop}' is read only`); - } - target.set(prop, value); - return true; - }, - get(target, prop) { - return target[String(prop)] ?? target.get(String(prop)); - }, - }); - } - - init = reuseInFlight(async () => { - if (this.generalDKV != null) { - return; - } - // the merge conflict algorithm must be adapted since we encode - // the key in the header. - const merge = (opts) => { - // here is what the input might look like: - // opts = { - // key: '71d7616250fed4dc27b70ee3b934178a3b196bbb.11f6ad8ec52a2984abaafd7c3b516503785c2072', - // remote: { key: 'x', value: 10 }, - // local: { key: 'x', value: 5 }, - // prev: { key: 'x', value: 3 } - // } - const key = this.getKey(opts.key); - if (key == null) { - console.warn("BUG in merge conflict resolution", opts); - throw Error("local key must be defined"); - } - const { local, remote, prev } = opts; - try { - return this.opts.merge?.({ key, local, remote, prev }) ?? local; - } catch (err) { - console.warn("exception in merge conflict resolution", err); - return local; - } - }; - this.generalDKV = new GeneralDKV({ - ...this.opts, - merge, - desc: `${this.name} ${this.opts.desc ?? ""}`, - }); - this.generalDKV.on("change", ({ key, value, prev }) => { - if (this.generalDKV == null) { - return; - } - let decodedKey; - try { - decodedKey = this.getKey(key); - } catch (err) { - // key is missing so at this point there is no knowledge of it and - // nothing we can alert on. - // TODO: may remove this when/if we completely understand why - // this ever happens - // console.log("WARNING: missing key for -- ", { key, err }); - return; - } - if (value !== undefined && value !== TOMBSTONE) { - this.emit("change", { - key: decodedKey, - value, - prev, - }); - } else { - // value is undefined or TOMBSTONE, so it's a delete, so do not set value here - this.emit("change", { key: decodedKey, prev }); - } - }); - this.generalDKV.on("reject", ({ key, value }) => { - if (this.generalDKV == null) { - return; - } - if (value != null) { - this.emit("reject", { key: this.getKey(key), value }); - } - }); - this.generalDKV.on("stable", () => this.emit("stable")); - await this.generalDKV.init(); - this.updateInventory(); - }); - - close = async () => { - const generalDKV = this.generalDKV; - if (generalDKV == null) { - return; - } - delete this.generalDKV; - await generalDKV.close(); - // @ts-ignore - delete this.opts; - this.emit("closed"); - this.removeAllListeners(); - }; - - delete = (key: string) => { - if (this.generalDKV == null) { - throw Error("closed"); - } - this.generalDKV.delete(this.encodeKey(key)); - this.updateInventory(); - }; - - clear = () => { - if (this.generalDKV == null) { - throw Error("closed"); - } - this.generalDKV.clear(); - this.updateInventory(); - }; - - // server assigned time - time = (key?: string): Date | undefined | { [key: string]: Date } => { - if (this.generalDKV == null) { - throw Error("closed"); - } - const times = this.generalDKV.time(key ? this.encodeKey(key) : undefined); - if (key != null || times == null) { - return times; - } - const obj = this.generalDKV.getAll(); - const x: any = {}; - for (const k in obj) { - const { key } = obj[k]; - x[key] = times[k]; - } - return x; - }; - - // WARNING: (1) DO NOT CHANGE THIS or all stored data will become invalid. - // (2) This definition is used implicitly in akv.ts also! - // The encoded key which we actually store in NATS. It has to have - // a restricted form, and a specified prefix, which - // is why the hashing, etc. This allows arbitrary keys. - // We have to monkey patch nats to accept even base64 keys!) - // There are NOT issues with key length though. This same strategy of encoding - // keys using base64 is used by Nats object store: - // https://github.com/nats-io/nats-architecture-and-design/blob/main/adr/ADR-20.md#object-name - private encodeKey = (key) => { - if (typeof key != "string") { - key = `${key}`; - } - return key ? `${this.prefix}.${encodeBase64(key)}` : this.prefix; - }; - - // decodeKey is the inverse of encodeKey - private decodeKey = (encodedKey) => { - const v = encodedKey.split("."); - return v[1] ? decodeBase64(v[1]) : ""; - }; - - has = (key: string): boolean => { - if (this.generalDKV == null) { - throw Error("closed"); - } - return this.generalDKV.has(this.encodeKey(key)); - }; - - get = (key: string): T | undefined => { - if (this.generalDKV == null) { - throw Error("closed"); - } - return this.generalDKV.get(this.encodeKey(key)); - }; - - getAll = (): { [key: string]: T } => { - if (this.generalDKV == null) { - throw Error("closed"); - } - const obj = this.generalDKV.getAll(); - const x: any = {}; - for (const k in obj) { - const key = this.getKey(k); - x[key] = obj[k]; - } - return x; - }; - - private getKey = (k) => { - if (this.keys[k] != null) { - return this.keys[k]; - } - return this.decodeKey(k); - }; - - headers = (key: string) => { - return this.generalDKV?.headers(this.encodeKey(key)); - }; - - get length(): number { - if (this.generalDKV == null) { - throw Error("closed"); - } - return this.generalDKV.length; - } - - set = ( - key: string, - value: T, - // NOTE: if you call this.headers(n) it is NOT visible until the publish is confirmed. - // This could be changed with more work if it matters. - options?: { headers?: { [key: string]: string } }, - ): void => { - if (this.generalDKV == null) { - throw Error("closed"); - } - if (value === undefined) { - // undefined can't be JSON encoded, so we can't possibly represent it, and this - // *must* be treated as a delete. - // NOTE that jc.encode encodes null and undefined the same, so supporting this - // as a value is just begging for misery. - this.delete(key); - this.updateInventory(); - return; - } - const encodedKey = this.encodeKey(key); - this.keys[encodedKey] = key; - this.generalDKV.set(encodedKey, value, { - headers: { ...options?.headers }, - }); - this.updateInventory(); - }; - - hasUnsavedChanges = (): boolean => { - if (this.generalDKV == null) { - return false; - } - return this.generalDKV.hasUnsavedChanges(); - }; - - unsavedChanges = (): string[] => { - const generalDKV = this.generalDKV; - if (generalDKV == null) { - return []; - } - return generalDKV.unsavedChanges().map((key) => this.getKey(key)); - }; - - isStable = () => { - return this.generalDKV?.isStable(); - }; - - save = async () => { - return await this.generalDKV?.save(); - }; - - private updateInventory = asyncThrottle( - async () => { - if (this.generalDKV == null || this.opts.noInventory) { - return; - } - await delay(500); - if (this.generalDKV == null) { - return; - } - const { valueType } = this.opts; - const name = this.name; - let inv: null | Inventory = null; - - try { - await waitUntilConnected(); - inv = await inventory(this.opts.location); - if (this.generalDKV == null) { - return; - } - if (!inv.needsUpdate({ name, type: "kv", valueType })) { - return; - } - const stats = this.generalDKV.stats(); - if (stats == null) { - return; - } - const { count, bytes } = stats; - inv.set({ - type: "kv", - name, - count, - bytes, - desc: this.opts.desc, - valueType: this.opts.valueType, - limits: this.opts.limits, - }); - } catch (err) { - logger.debug( - `WARNING: unable to update inventory for ${this.opts?.name} -- ${err}`, - ); - } finally { - await inv?.close(); - } - }, - THROTTLE_MS, - { leading: true, trailing: true }, - ); -} - -// *** WARNING: THIS CAN NEVER BE CHANGE! ** -// The recipe for 'this.prefix' must never be changed, because -// it determines where the data is actually stored. If you change -// it, then every user's data vanishes. -export function getPrefix({ name, valueType, options }) { - return encodeBase64( - JSON.stringify([name, valueType, localLocationName(options)]), - ); -} - -export const cache = refCache({ - name: "dkv", - createKey: userKvKey, - createObject: async (opts) => { - await waitUntilConnected(); - if (opts.env == null) { - opts.env = await getEnv(); - } - const k = new DKV(opts); - await k.init(); - return k; - }, -}); - -export async function dkv(options: DKVOptions): Promise> { - return await cache(options); -} diff --git a/src/packages/nats/sync/ephemeral-stream.ts b/src/packages/nats/sync/ephemeral-stream.ts deleted file mode 100644 index 24b47e2e02..0000000000 --- a/src/packages/nats/sync/ephemeral-stream.ts +++ /dev/null @@ -1,729 +0,0 @@ -/* -An Ephemeral Stream - -DEVELOPMENT: - -~/cocalc/src/packages/backend$ node - - - require('@cocalc/backend/nats'); a = require('@cocalc/nats/sync/ephemeral-stream'); s = await a.estream({name:'test', leader:true}) - - -Testing two at once (a leader and non-leader): - - require('@cocalc/backend/nats'); s = await require('@cocalc/backend/nats/sync').dstream({ephemeral:true,name:'test', leader:1, noAutosave:true}); t = await require('@cocalc/backend/nats/sync').dstream({ephemeral:true,name:'test', leader:0,noAutosave:true}) - -*/ - -import { - type FilteredStreamLimitOptions, - last, - enforceLimits, - enforceRateLimits, - headersFromRawMessages, -} from "./stream"; -import { type NatsEnv, type ValueType } from "@cocalc/nats/types"; -import { EventEmitter } from "events"; -import { Empty, type Msg, type Subscription } from "@nats-io/nats-core"; -import { isNumericString } from "@cocalc/util/misc"; -import type { JSONValue } from "@cocalc/util/types"; -import { - // getMaxPayload, - waitUntilConnected, - encodeBase64, -} from "@cocalc/nats/util"; -import refCache from "@cocalc/util/refcache"; -import { streamSubject } from "@cocalc/nats/names"; -import { getEnv } from "@cocalc/nats/client"; -import { headers as createHeaders } from "@nats-io/nats-core"; -import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import { throttle } from "lodash"; -import { once } from "@cocalc/util/async-utils"; -import { callback, delay } from "awaiting"; -import { randomId } from "@cocalc/nats/names"; - -export interface RawMsg extends Msg { - timestamp: number; - seq: number; - sessionId: string; -} - -export const ENFORCE_LIMITS_THROTTLE_MS = process.env.COCALC_TEST_MODE - ? 100 - : 45000; - -const HEADER_PREFIX = "CoCalc-"; - -const COCALC_SEQUENCE_HEADER = `${HEADER_PREFIX}Seq`; -const COCALC_TIMESTAMP_HEADER = `${HEADER_PREFIX}Timestamp`; -const COCALC_OPTIONS_HEADER = `${HEADER_PREFIX}Options`; -const COCALC_SESSION_ID_HEADER = `${HEADER_PREFIX}Session-Id`; -export const COCALC_MESSAGE_ID_HEADER = `${HEADER_PREFIX}Msg-Id`; - -const PUBLISH_TIMEOUT = 7500; - -const DEFAULT_HEARTBEAT_INTERVAL = 30 * 1000; - -export interface EphemeralStreamOptions { - // what it's called - name: string; - // where it is located - account_id?: string; - project_id?: string; - limits?: Partial; - // only load historic messages starting at the given seq number. - start_seq?: number; - desc?: JSONValue; - valueType?: ValueType; - leader?: boolean; - - noCache?: boolean; - heartbeatInterval?: number; -} - -export class EphemeralStream extends EventEmitter { - public readonly name: string; - private readonly subject: string; - private readonly limits: FilteredStreamLimitOptions; - private _start_seq?: number; - public readonly valueType: ValueType; - // don't do "this.raw=" or "this.messages=" anywhere in this class!!! - public readonly raw: RawMsg[][] = []; - public readonly messages: T[] = []; - private readonly msgIDs = new Set(); - - private env?: NatsEnv; - private sub?: Subscription; - private leader: boolean; - private server?: Subscription; - // seq used by the *leader* only to assign sequence numbers - private seq: number = 1; - private lastHeartbeat: number = 0; - private heartbeatInterval: number; - // lastSeq used by clients to keep track of what they have received; if one - // is skipped they reconnect starting with the last one they didn't miss. - private lastSeq: number = 0; - private sendQueue: { data; options?; seq: number; cb: Function }[] = []; - private bytesSent: { [time: number]: number } = {}; - - private sessionId?: string; - - constructor({ - name, - project_id, - account_id, - limits, - start_seq, - valueType = "json", - leader = false, - heartbeatInterval = DEFAULT_HEARTBEAT_INTERVAL, - }: EphemeralStreamOptions) { - super(); - this.valueType = valueType; - this.heartbeatInterval = heartbeatInterval; - this.name = name; - this.leader = !!leader; - const subjects = streamSubject({ account_id, project_id, ephemeral: true }); - this.subject = subjects.replace(">", encodeBase64(name)); - this._start_seq = start_seq; - this.limits = { - max_msgs: -1, - max_age: 0, - max_bytes: -1, - max_msg_size: -1, - max_bytes_per_second: -1, - max_msgs_per_second: -1, - ...limits, - }; - return new Proxy(this, { - get(target, prop) { - return typeof prop == "string" && isNumericString(prop) - ? target.get(parseInt(prop)) - : target[String(prop)]; - }, - }); - } - - init = async () => { - this.env = await getEnv(); - if (!this.leader) { - // try to get current data from a leader - await this.getAllFromLeader({ - start_seq: this._start_seq ?? 0, - noEmit: true, - }); - } else { - // start listening on the subject for new data - this.serve(); - } - // NOTE: if we miss a message between getAllFromLeader and when we start listening, - // then the sequence number will have a gap, and we'll immediately reconnect, starting - // at the right point. So no data can possibly be lost. - this.listen(); - if (!this.leader) { - this.heartbeatMonitor(); - } - if (this.env?.nc?.on != null) { - this.env.nc.on("reconnect", this.reconnect); - } - }; - - private resetState = () => { - delete this.sessionId; - this.bytesSent = {}; - this.msgIDs.clear(); - this.raw.length = 0; - this.messages.length = 0; - this.seq = 0; - this.sendQueue.length = 0; - this.lastSeq = 0; - delete this._start_seq; - this.emit("reset"); - }; - - private reset = async () => { - this.resetState(); - await this.reconnect(); - }; - - close = () => { - if (this.env?.nc?.removeListener != null) { - this.env.nc.removeListener("reconnect", this.reconnect); - } - delete this.env; - this.removeAllListeners(); - // @ts-ignore - this.sub?.close(); - delete this.sub; - // @ts-ignore - this.server?.close(); - delete this.server; - }; - - private getAllFromLeader = async ({ - maxWait = 30000, - start_seq = 0, - noEmit, - }: { maxWait?: number; start_seq?: number; noEmit?: boolean } = {}) => { - if (this.leader) { - throw Error("this is the leader"); - } - let d = 1000; - while (this.env != null) { - await waitUntilConnected(); - if (this.env == null) { - return; - } - // console.log("getAllFromLeader", { start_seq }); - try { - for await (const raw0 of await this.env.nc.requestMany( - this.subject + ".all", - this.env.jc.encode({ start_seq }), - { maxWait }, - )) { - this.lastHeartbeat = Date.now(); - if (raw0.data.length == 0) { - // done - return; - } - const raw = getRawMsg(raw0); - if ( - !this.leader && - this.sessionId && - this.sessionId != raw.sessionId - ) { - await this.reset(); - return; - } else if (this.lastSeq && raw.seq > this.lastSeq + 1) { - // console.log("skipped a sequence number - reconnecting"); - await this.reconnect(); - return; - } else if (raw.seq <= this.lastSeq) { - // already saw this - continue; - } - if (!this.sessionId) { - this.sessionId = raw.sessionId; - } - this.lastSeq = raw.seq; - const mesg = this.decodeValue(raw.data); - this.messages.push(mesg); - this.raw.push([raw]); - if (!noEmit) { - this.emit("change", mesg, [raw]); - } - } - return; - } catch (err) { - // console.log(`err connecting -- ${err}`); - if (err.code == "503") { - // leader just isn't ready yet - d = Math.min(15000, d * 1.3); - await delay(d); - continue; - } else { - throw err; - } - } - } - }; - - private serve = async () => { - if (this.env == null) { - throw Error("closed"); - } - this.sessionId = randomId(); - this.sendHeartbeats(); - this.server = this.env.nc.subscribe(this.subject + ".>"); - for await (const raw of this.server) { - if (raw.subject.endsWith(".all")) { - const { start_seq = 0 } = this.env.jc.decode(raw.data) ?? {}; - for (const [m] of this.raw) { - if (m.seq >= start_seq) { - raw.respond(m.data, { headers: m.headers }); - } - } - raw.respond(Empty); - continue; - } else if (raw.subject.endsWith(".send")) { - let options: any = undefined; - if (raw.headers) { - for (const [key, value] of raw.headers) { - if (key == COCALC_OPTIONS_HEADER) { - options = JSON.parse(value[0]); - break; - } - } - } - let resp; - try { - resp = await this.sendAsLeader(raw.data, options); - } catch (err) { - raw.respond(this.env.jc.encode({ error: `${err}` })); - return; - } - raw.respond(this.env.jc.encode(resp)); - continue; - } - } - }; - - private sendHeartbeats = async () => { - while (this.env != null) { - await waitUntilConnected(); - const now = Date.now(); - const wait = this.heartbeatInterval - (now - this.lastHeartbeat); - if (wait > 100) { - await delay(wait); - } else { - const now = Date.now(); - this.env.nc.publish(this.subject, Empty); - this.lastHeartbeat = now; - await delay(this.heartbeatInterval); - } - } - }; - - private heartbeatMonitor = async () => { - while (this.env != null) { - if (Date.now() - this.lastHeartbeat >= 2.1 * this.heartbeatInterval) { - try { - // console.log("skipped a heartbeat -- reconnecting"); - await this.reconnect(); - } catch {} - } - await delay(this.heartbeatInterval); - } - }; - - private listen = async () => { - await waitUntilConnected(); - if (this.env == null) { - return; - } - while (this.env != null) { - // @ts-ignore - this.sub?.close(); - this.sub = this.env.nc.subscribe(this.subject); - try { - for await (const raw0 of this.sub) { - if (!this.leader) { - this.lastHeartbeat = Date.now(); - } - if (raw0.data.length == 0 && raw0.headers == null) { - // console.log("received heartbeat"); - // it's a heartbeat probe - continue; - } - const raw = getRawMsg(raw0); - if ( - !this.leader && - this.sessionId && - this.sessionId != raw.sessionId - ) { - await this.reset(); - return; - } else if ( - !this.leader && - this.lastSeq && - raw.seq > this.lastSeq + 1 - ) { - // console.log("skipped a sequence number - reconnecting"); - await this.reconnect(); - return; - } else if (raw.seq <= this.lastSeq) { - // already saw this - continue; - } - if (!this.sessionId) { - this.sessionId = raw.sessionId; - } - // move sequence number forward one and record the data - this.lastSeq = raw.seq; - const mesg = this.decodeValue(raw.data); - this.messages.push(mesg); - this.raw.push([raw]); - this.lastSeq = raw.seq; - this.emit("change", mesg, [raw]); - } - } catch (err) { - console.log(`Error listening -- ${err}`); - } - await delay(3000); - } - this.enforceLimits(); - }; - - private reconnect = reuseInFlight(async () => { - if (this.leader) { - // leader doesn't have a notion of reconnect -- it is the one that - // gets connected to - return; - } - // @ts-ignore - this.sub?.close(); - delete this.sub; - await this.getAllFromLeader({ start_seq: this.lastSeq + 1, noEmit: false }); - this.listen(); - }); - - private encodeValue = (value: T) => { - if (this.env == null) { - throw Error("closed"); - } - return this.valueType == "json" ? this.env.jc.encode(value) : value; - }; - - private decodeValue = (value): T => { - if (this.env == null) { - throw Error("closed"); - } - return this.valueType == "json" ? this.env.jc.decode(value) : value; - }; - - publish = async ( - mesg: T, - options?: { headers?: { [key: string]: string }; msgID?: string }, - ) => { - const data = this.encodeValue(mesg); - - // this may throw an exception: - enforceRateLimits({ - limits: this.limits, - bytesSent: this.bytesSent, - subject: this.subject, - data, - mesg, - }); - - if (this.leader) { - // sending from leader -- so assign seq, timestamp and sent it out. - return await this.sendAsLeader(data, options); - } else { - const timeout = 15000; // todo - // sending as non-leader -- ask leader to send it. - let headers; - if (options != null && Object.keys(options).length > 0) { - headers = createHeaders(); - headers.append(COCALC_OPTIONS_HEADER, JSON.stringify(options)); - } else { - headers = undefined; - } - await waitUntilConnected(); - if (this.env == null) { - throw Error("closed"); - } - const resp = await this.env.nc.request(this.subject + ".send", data, { - headers, - timeout, - }); - const r = this.env.jc.decode(resp.data); - if (r.error) { - throw Error(r.error); - } - return resp; - } - }; - - private sendAsLeader = async (data, options?): Promise<{ seq: number }> => { - if (!this.leader) { - throw Error("must be the leader"); - } - const seq = this.seq; - this.seq += 1; - const f = (cb) => { - this.sendQueue.push({ data, options, seq, cb }); - this.processQueue(); - }; - await callback(f); - return { seq }; - }; - - private processQueue = reuseInFlight(async () => { - if (!this.leader) { - throw Error("must be the leader"); - } - const { sessionId } = this; - while ( - this.sendQueue.length > 0 && - this.env != null && - this.sessionId == sessionId - ) { - const x = this.sendQueue.shift(); - if (x == null) { - continue; - } - const { data, options, seq, cb } = x; - if (options?.msgID && this.msgIDs.has(options?.msgID)) { - // it's a dup of one already successfully sent before -- dedup by ignoring. - cb(); - continue; - } - await waitUntilConnected(); - if (this.env == null) { - cb("closed"); - return; - } - const timestamp = Date.now(); - const headers = createHeaders(); - if (options?.headers) { - for (const k in options.headers) { - headers.append(k, `${options.headers[k]}`); - } - } - headers.append(COCALC_SEQUENCE_HEADER, `${seq}`); - headers.append(COCALC_TIMESTAMP_HEADER, `${timestamp}`); - if (!this.sessionId) { - throw Error("sessionId must be set"); - } - headers.append(COCALC_SESSION_ID_HEADER, this.sessionId); - if (options?.msgID) { - headers.append(COCALC_MESSAGE_ID_HEADER, options.msgID); - } - // we publish it until we get it as a change event, and only - // then do we respond, being sure it was sent. - const now = Date.now(); - while (this.env != null && this.sessionId == sessionId) { - this.env.nc.publish(this.subject, data, { headers }); - const start = Date.now(); - let done = false; - try { - while ( - Date.now() - start <= PUBLISH_TIMEOUT && - this.sessionId == sessionId - ) { - const [_, raw] = await once(this, "change", PUBLISH_TIMEOUT); - if (last(raw)?.seq == seq) { - done = true; - break; - } - } - if (done && options?.msgID) { - this.msgIDs.add(options.msgID); - } - cb(done ? undefined : "timeout"); - break; - } catch (err) { - console.warn(`Error processing sendQueue -- ${err}`); - cb(`${err}`); - break; - } - } - if (now > this.lastHeartbeat) { - this.lastHeartbeat = now; - } - } - }); - - get = (n?): T | T[] => { - if (n == null) { - return this.getAll(); - } else { - return this.messages[n]; - } - }; - - getAll = () => { - return [...this.messages]; - }; - - get length(): number { - return this.messages.length; - } - - get start_seq(): number | undefined { - return this._start_seq; - } - - headers = (n: number): { [key: string]: string } | undefined => { - return headersFromRawMessages(this.raw[n]); - }; - - // load older messages starting at start_seq - load = async ({ - start_seq, - noEmit, - }: { - start_seq: number; - noEmit?: boolean; - }) => { - if (this._start_seq == null || this._start_seq <= 1 || this.leader) { - // we already loaded everything on initialization; there can't be anything older; - // or we are leader, so we are the full source of truth. - return; - } - // this is NOT efficient - it just discards everything and starts over. - const n = this.messages.length; - this.resetState(); - this._start_seq = start_seq; - this.lastSeq = start_seq - 1; - await this.reconnect(); - if (!noEmit) { - for (let i = 0; i < this.raw.length - n; i++) { - this.emit("change", this.messages[i], this.raw[i]); - } - } - }; - - // get server assigned time of n-th message in stream - time = (n: number): Date | undefined => { - const r = last(this.raw[n]); - if (r == null) { - return; - } - return new Date(r.timestamp); - }; - - times = () => { - const v: (Date | undefined)[] = []; - for (let i = 0; i < this.length; i++) { - v.push(this.time(i)); - } - return v; - }; - - stats = ({ - start_seq = 1, - }: { - start_seq?: number; - }): { count: number; bytes: number } | undefined => { - if (this.raw == null) { - return; - } - let count = 0; - let bytes = 0; - for (const raw of this.raw) { - const seq = last(raw)?.seq; - if (seq == null) { - continue; - } - if (seq < start_seq) { - continue; - } - count += 1; - for (const r of raw) { - bytes += r.data.length; - } - } - return { count, bytes }; - }; - - // delete all messages up to and including the - // one at position index, i.e., this.messages[index] - // is deleted. - // NOTE: other clients will NOT see the result of a purge. - purge = async ({ index = -1 }: { index?: number } = {}) => { - if (index >= this.raw.length - 1 || index == -1) { - index = this.raw.length - 1; - } - this.messages.splice(0, index + 1); - this.raw.splice(0, index + 1); - }; - - private enforceLimitsNow = reuseInFlight(async () => { - const index = enforceLimits({ - messages: this.messages, - raw: this.raw, - limits: this.limits, - }); - if (index > -1) { - try { - // console.log("imposing limit via purge ", { index }); - await this.purge({ index }); - } catch (err) { - if (err.code != "TIMEOUT") { - console.log(`WARNING: purging old messages - ${err}`); - } - } - } - }); - - private enforceLimits = throttle( - this.enforceLimitsNow, - ENFORCE_LIMITS_THROTTLE_MS, - { leading: false, trailing: true }, - ); -} - -export const cache = refCache({ - name: "ephemeral-stream", - createObject: async (options: EphemeralStreamOptions) => { - const estream = new EphemeralStream(options); - await estream.init(); - return estream; - }, -}); -export async function estream( - options: EphemeralStreamOptions, -): Promise> { - return await cache(options); -} - -function getRawMsg(raw: Msg): RawMsg { - let seq = 0, - timestamp = 0, - sessionId = ""; - for (const [key, value] of raw.headers ?? []) { - if (key == COCALC_SEQUENCE_HEADER) { - seq = parseInt(value[0]); - } else if (key == COCALC_TIMESTAMP_HEADER) { - timestamp = parseFloat(value[0]); - } else if (key == COCALC_SESSION_ID_HEADER) { - sessionId = value[0]; - } - } - if (!seq) { - throw Error("missing seq header"); - } - if (!timestamp) { - throw Error("missing timestamp header"); - } - // @ts-ignore - raw.seq = seq; - // @ts-ignore - raw.timestamp = timestamp; - // @ts-ignore - raw.sessionId = sessionId; - // @ts-ignore - return raw; -} diff --git a/src/packages/nats/sync/general-kv.ts b/src/packages/nats/sync/general-kv.ts deleted file mode 100644 index 2dfd9c07c4..0000000000 --- a/src/packages/nats/sync/general-kv.ts +++ /dev/null @@ -1,1199 +0,0 @@ -/* -Async Consistent Centralized Key Value Store - -- You give one or more subjects and this provides an asynchronous but consistent - way to work with the KV store of keys matching any of those subjects, - inside of the named KV store. -- The get operation is sync. (It can of course be slightly out of date, but that is detected - if you try to immediately write it.) -- The set will fail if the local cached value (returned by get) turns out to be out of date. -- Also delete and set will fail if the NATS connection is down or times out. -- For an eventually consistent sync wrapper around this, use DKV, defined in the sibling file dkv.ts. - -WARNING: Nats itself actually currently seems to have no model for consistency, especially -with multiple nodes. See https://github.com/nats-io/nats-server/issues/6557 - -This is a simple KV wrapper around NATS's KV, for small KV stores. Each client holds a local cache -of all data, which is used to ensure set's are a no-op if there is no change. Also, this automates -ensuring that if you do a read-modify-write, this will succeed only if nobody else makes a change -before you. - -- You must explicitly call "await store.init()" to initialize it before using it. - -- The store emits an event ('change', key, newValue, previousValue) whenever anything changes - -- Calling "store.get()" provides ALL the data and is synchronous. It uses various API tricks to - ensure this is fast and is updated when there is any change from upstream. Use "store.get(key)" - to get the value of one key. - -- Use "await store.set(key,value)" or "await store.set({key:value, key2:value2, ...})" to set data, - with the following semantics: - - - set ONLY makes a change if our local version ("store.get(key)") of the value is different from - what you're trying to set the value to, where different is defined by lodash isEqual. - - - if our local version this.get(key) was not the most recent version in NATS, then the set will - definitely throw an exception! This is fantastic because it means you can modify and save what - is in the local cache on multiple nodes at once anywhere, and be 100% certain to never overwrite - data in complicated objects. Of course, you have to assume "await store.set(...)" WILL - sometimes fail. - - - Set with multiple keys "pipelines" in that MAX_PARALLEL key/value pairs are set at once, without - waiting for every single individual set to get ACK'd from the server before doing more sets. - This makes this **massively** faster, but means that if "await store.set(...)" fails, you don't - immediately know which keys were successfully set and which failed, though all keys worked will get - updated soon and reflected in store.get(). - -- Use "await store.expire(ageMs)" to delete every key that was last changed at least ageMs - milliseconds in the past. - - TODO/WARNING: the timestamps are defined by NATS (and its clock), but - the definition of "ageMs in the past" is defined by the client where this is called. Thus - if the client's clock is off, that would be a huge problem. An obvious solution is to - get the current time from NATS, and use that. I don't know a "good" way to get the current - time except maybe publishing a message to myself...? - - -CHUNKING: - - -Similar to streams, unlike NATS itself, hwere we allow storing arbitrarily large -values, in particular, values that could be much larger than the configured message -size. When doing a set if the value exceeds the limit, we store the part of -the value that fits, and store a *header* that describes where the rest of the -values are stored. For a given key, the extra chunks are stored with keys: - - ${key}.${i}.chunk - -When receiving changes, these extra chunks are temporarily kept separately, -then used to compute the value for key. All other paramaters, e.g., sequence -numbers, last time, etc., use the main key. - -TODO: - -- [ ] maybe expose some functionality related to versions/history? - -DEVELOPMENT: - -(See packages/backend/nats/test/sync/general-kv.test.ts for a unit tested version of what is below that -actually works.) - -~/cocalc/src/packages/server$ n -Welcome to Node.js v18.17.1. -Type ".help" for more information. -> env = await require("@cocalc/backend/nats/env").getEnv(); a = require("@cocalc/nats/sync/general-kv"); s = new a.GeneralKV({name:'test',env,filter:['foo.>']}); await s.init(); - -> await s.set("foo.x", 10) -> s.getAll() -{ 'foo.x': 10 } -> await s.delete("foo.x") -undefined -> s.getAll() -{} -> await s.set("foo.x", 10) - -// Since the filters are disjoint these are totally different: - -> t = new a.GeneralKV({name:'test2',env,filter:['bar.>']}); await t.init(); -> await t.getAll() -{} -> await t.set("bar.abc", 10) -undefined -> await t.getAll() -{ 'bar.abc': 10} -> await s.getAll() -{ 'foo.x': 10 } - -// The union: -> u = new a.GeneralKV({name:'test3',env,filter:['bar.>', 'foo.>']}); await u.init(); -> u.getAll() -{ 'foo.x': 10, 'bar.abc': 10 } -> await s.set('foo.x', 999) -undefined -> u.getAll() -{ 'bar.abc': 10, 'foo.x': 999} -*/ - -import { EventEmitter } from "events"; -import { type NatsEnv } from "@cocalc/nats/types"; -import { Kvm } from "@nats-io/kv"; -import { getAllFromKv, matchesPattern, getMaxPayload } from "@cocalc/nats/util"; -import { isEqual } from "lodash"; -import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import { map as awaitMap } from "awaiting"; -import { throttle } from "lodash"; -import { delay } from "awaiting"; -import { headers as createHeaders } from "@nats-io/nats-core"; -import type { MsgHdrs } from "@nats-io/nats-core"; -import type { ValueType } from "@cocalc/nats/types"; -import { isConnected, waitUntilConnected } from "@cocalc/nats/util"; -import { ENFORCE_LIMITS_THROTTLE_MS } from "./stream"; -import { asyncDebounce } from "@cocalc/util/async-utils"; -import { waitUntilReady } from "@cocalc/nats/tiered-storage/client"; - -const PUBLISH_TIMEOUT = 15000; - -class RejectError extends Error { - code: string; - key: string; -} - -const MAX_PARALLEL = 250; - -const CONNECTION_CHECK_INTERVAL = 5000; - -// Note that the limit options are named in exactly the same was as for streams, -// which is convenient for consistency. This is not consistent with NATS's -// own KV store limit naming. - -export interface KVLimits { - // How many keys may be in the KV store. Oldest keys will be removed - // if the key-value store exceeds this size. -1 for unlimited. - max_msgs: number; - - // Maximum age of any key, expressed in milliseconds. 0 for unlimited. - // Age is updated whenever value of the key is changed. - max_age: number; - - // The maximum number of bytes to store in this KV, which means - // the total of the bytes used to store everything. Since we store - // the key with each value (to have arbitrary keys), this includes - // the size of the keys. - max_bytes: number; - - // The maximum size of any single value, including the key. - max_msg_size: number; -} - -export class GeneralKV extends EventEmitter { - public readonly name: string; - private options?; - private filter?: string[]; - private env: NatsEnv; - private kv?; - private watch?; - private all?: { [key: string]: T }; - private revisions?: { [key: string]: number }; - private chunkCounts: { [key: string]: number } = {}; - private times?: { [key: string]: Date }; - private sizes?: { [key: string]: number }; - private allHeaders: { [key: string]: MsgHdrs } = {}; - private limits: KVLimits; - private revision: number = 0; - public readonly valueType: ValueType; - private noWatch: boolean; - private noGet: boolean; - - constructor({ - name, - env, - filter, - options, - limits, - valueType, - noWatch, - noGet, - }: { - name: string; - // filter: optionally restrict to subset of named kv store matching these subjects. - // NOTE: any key name that you *set or delete* should match one of these - filter?: string | string[]; - env: NatsEnv; - options?; - limits?: Partial; - valueType?: ValueType; - noWatch?: boolean; - noGet?: boolean; - }) { - super(); - this.limits = { - max_msgs: -1, - max_age: 0, - max_bytes: -1, - max_msg_size: -1, - ...limits, - }; - - this.noWatch = !!noWatch; - this.noGet = !!noGet; - this.env = env; - this.name = name; - this.options = options; - this.filter = typeof filter == "string" ? [filter] : filter; - this.valueType = valueType ?? "json"; - if (this.valueType != "json" && this.valueType != "binary") { - throw Error("valueType must be 'json' or 'binary'"); - } - } - - init = reuseInFlight(async () => { - if (this.all != null) { - return; - } - await waitUntilReady(this.name); - const kvm = new Kvm(this.env.nc); - await waitUntilConnected(); - this.kv = await kvm.create(this.name, { - compression: true, - ...this.options, - }); - this.kv.validateKey = validateKey; - this.kv.validateSearchKey = validateSearchKey; - if (this.noGet) { - this.times = {}; - this.revisions = {}; - this.allHeaders = {}; - this.chunkCounts = {}; - this.sizes = {}; - this.all = {}; - this.revision = 0; - return; - } - - await waitUntilConnected(); - const { all, revisions, times, headers } = await getAllFromKv({ - kv: this.kv, - key: this.filter, - }); - this.revisions = revisions; - this.times = times; - this.allHeaders = {}; - this.chunkCounts = {}; - this.sizes = {}; - const usedKeys = new Set(); - const all0: { [key: string]: T } = {}; - const chunkData: { - [key: string]: { chunkCount?: number; chunks: Buffer[] }; - } = {}; - for (const key in all) { - let value: Buffer | null = null; - const chunkCount = getChunkCount(headers[key]); - let chunkKey: string = ""; - let key0 = ""; - if (chunkCount) { - if (chunkData[key] == null) { - chunkData[key] = { chunkCount, chunks: [all[key]] }; - } else { - chunkData[key].chunkCount = chunkCount; - chunkData[key].chunks[0] = all[key]; - } - chunkKey = key; - this.allHeaders[key] = headers[key]; - } else if (isChunkedKey(key)) { - delete this.times[key]; - delete this.revisions[key]; - const { key: ckey, index } = parseChunkedKey(key); - chunkKey = ckey; - if (chunkData[chunkKey] == null) { - chunkData[chunkKey] = { chunks: [] }; - } - chunkData[chunkKey].chunks[index] = all[key]; - } else { - key0 = key; - value = all[key]; - usedKeys.add(key0); - this.allHeaders[key] = headers[key]; - } - - if (chunkKey && chunkData[chunkKey].chunkCount != null) { - let i = 0; - for (const chunk of chunkData[chunkKey].chunks) { - if (chunk !== undefined) { - i += 1; - } - } - const { chunkCount } = chunkData[chunkKey]; - if (i >= chunkCount!) { - // nextjs prod complains about this... - // @ts-ignore - value = Buffer.concat(chunkData[chunkKey].chunks); - key0 = chunkKey; - this.chunkCounts[key0] = chunkCount!; - delete chunkData[chunkKey]; - usedKeys.add(chunkKey); - for (let chunk = 1; chunk < chunkCount!; chunk++) { - usedKeys.add(chunkedKey({ key: chunkKey, chunk })); - } - } - } - - if (value == null) { - continue; - } - this.sizes[key0] = value.length; - try { - all0[key0] = this.decode(value); - } catch (err) { - // invalid json -- corruption. I hit this ONLY when doing development - // and explicitly putting bad data in. This isn't normal. But it's - // help to make this a warning, in order to not make all data not accessible. - console.warn(`WARNING: unable to read ${key0} -- ${err}`); - } - } - this.all = all0; - this.revision = Math.max(0, ...Object.values(this.revisions)); - this.emit("connected"); - if (!this.noWatch) { - this.startWatchLoop(); - this.monitorWatch(); - } - - // Also anything left at this point is garbage that needs to be freed: - for (const key in all) { - if (!usedKeys.has(key)) { - await this.kv.delete(key); - } - } - }); - - private encode = (value) => { - return this.valueType == "json" ? this.env.jc.encode(value) : value; - }; - - private decode = (value) => { - return this.valueType == "json" ? this.env.jc.decode(value) : value; - }; - - private restartWatch = () => { - // this triggers the end of the "for await (const x of this.watch) {" - // loop in startWatch, which results in another watch starting, - // assuming the object isn't closed. - this.watch?.stop(); - }; - - private startWatchLoop = async () => { - let d = 1000; - let lastTime = 0; - while (this.all != null) { - if (Date.now() - lastTime > 60 * 1000) { - // reset delay if it has been a while -- delay is only to prevent frequent bursts - d = 1000; - } - try { - await waitUntilConnected(); - if (this.all == null) { - return; - } - const resumeFromRevision = this.revision - ? this.revision + 1 - : undefined; - await this.startWatch({ resumeFromRevision }); - d = 1000; - } catch (_err) { - // expected to happen sometimes, e.g., when the connection closes temporarily - // if (err.code != "CONNECTION_CLOSED") { - // console.log(`WARNING: getting watch on kv...`); - // } - } - if (this.all == null) { - // closed - return; - } - d = Math.min(20000, d * 1.25) + Math.random(); - // console.log(`waiting ${d}ms then reconnecting`); - await delay(d); - } - }; - - private startWatch = async ({ - resumeFromRevision, - }: { resumeFromRevision?: number } = {}) => { - // watch for changes, starting AFTER the last revision we retrieved - this.watch = await this.kv.watch({ - ignoreDeletes: false, - include: "updates", - key: this.filter, - resumeFromRevision, - }); - const chunkData: { - [key: string]: { - chunkCount?: number; - chunks: Buffer[]; - revision?: number; - }; - } = {}; - for await (const x of this.watch) { - const { revision, key, value, sm } = x; - this.revision = revision; - if ( - this.revisions == null || - this.all == null || - this.times == null || - this.sizes == null - ) { - return; - } - - let value0: Buffer | null = null; - const chunkCount = getChunkCount(sm.headers); - let chunkKey: string = ""; - let key0 = ""; - let revision0 = 0; - if (chunkCount) { - if (chunkData[key] == null) { - chunkData[key] = { chunkCount, chunks: [value], revision }; - } else { - chunkData[key].chunkCount = chunkCount; - chunkData[key].chunks[0] = value; - chunkData[key].revision = revision; - } - chunkKey = key; - this.allHeaders[key] = sm.headers; - } else if (isChunkedKey(key)) { - const { key: ckey, index } = parseChunkedKey(key); - chunkKey = ckey; - if (chunkData[chunkKey] == null) { - chunkData[chunkKey] = { chunks: [] }; - } - chunkData[chunkKey].chunks[index] = value; - } else { - key0 = key; - value0 = value; - revision0 = revision; - if (value.length != 0) { - // NOTE: we *only* set the header to remote when not deleting the key. Deleting - // it would delete the header, which contains the actual non-hashed key. - this.allHeaders[key] = sm.headers; - } - delete this.chunkCounts[key0]; - } - - if (chunkKey && chunkData[chunkKey].chunkCount != null) { - let i = 0; - for (const chunk of chunkData[chunkKey].chunks) { - if (chunk !== undefined) { - i += 1; - } - } - const { chunkCount } = chunkData[chunkKey]; - if (i >= chunkCount!) { - // @ts-ignore (for nextjs prod build) - value0 = Buffer.concat(chunkData[chunkKey].chunks); - key0 = chunkKey; - const r = chunkData[chunkKey].revision; - if (r == null) { - throw Error("bug"); - } - revision0 = r; - this.chunkCounts[chunkKey] = chunkCount!; - delete chunkData[chunkKey]; - } - } - - if (value0 == null) { - continue; - } - this.revisions[key0] = revision0; - const prev = this.all[key0]; - if (value0.length == 0) { - // delete - delete this.all[key0]; - delete this.times[key0]; - delete this.sizes[key0]; - delete this.chunkCounts[key0]; - } else { - this.all[key0] = this.decode(value0); - this.times[key0] = sm.time; - this.sizes[key0] = value0.length; - } - this.emit("change", { key: key0, value: this.all[key0], prev }); - this.enforceLimits(); - } - }; - - private monitorWatch = async () => { - if (this.env.nc.on != null) { - this.env.nc.on("reconnect", this.restartWatch); - this.env.nc.on("status", ({ type }) => { - if (type == "reconnect") { - this.ensureWatchIsValid(); - } - }); - } else { - this.checkWatchOnReconnect(); - } - while (this.revisions != null) { - if (!(await isConnected())) { - await waitUntilConnected(); - //console.log("monitorWatch: not connected so restart", this.name); - await this.restartWatch(); - } - //console.log("monitorWatch: wait", this.name); - await delay(CONNECTION_CHECK_INTERVAL); - } - }; - - private ensureWatchIsValid = asyncDebounce( - async () => { - await waitUntilConnected(); - await delay(2000); - const isValid = await this.isWatchStillValid(); - if (!isValid) { - if (this.kv == null) { - return; - } - console.log(`nats kv: ${this.name} -- watch not valid, so recreating`); - await this.restartWatch(); - } - }, - 3000, - { leading: false, trailing: true }, - ); - - private isWatchStillValid = async () => { - await waitUntilConnected(); - if (this.kv == null || this.watch == null) { - return false; - } - try { - await this.watch._data.info(); - return true; - } catch (err) { - console.log(`nats: watch info error -- ${err}`); - return false; - } - }; - - private checkWatchOnReconnect = async () => { - while (this.kv != null) { - try { - for await (const { type } of await this.env.nc.status()) { - if (type == "reconnect") { - await this.ensureWatchIsValid(); - } - } - } catch { - await delay(15000); - await this.ensureWatchIsValid(); - } - } - }; - - close = () => { - if (this.revisions == null) { - // already closed - return; - } - delete this.all; - - this.watch?.stop(); - delete this.watch; - - delete this.times; - delete this.revisions; - delete this.sizes; - delete this.kv; - // @ts-ignore - delete this.allHeaders; - this.emit("closed"); - this.removeAllListeners(); - this.env.nc.removeListener?.("reconnect", this.restartWatch); - }; - - headers = (key: string): { [key: string]: string } | undefined => { - const headers = this.allHeaders?.[key]; - if (headers == null) { - return; - } - const x: { [key: string]: string } = {}; - for (const [key, value] of headers) { - if (key != CHUNKS_HEADER) { - x[key] = value[0]; - } - } - return x; - }; - - // do not use cached this.all - // this is NOT implemented yet if there are chunks! - getDirect = async (key: string): Promise => { - if ( - this.all == null || - this.revisions == null || - this.times == null || - this.sizes == null || - this.allHeaders == null - ) { - throw Error("not initialized"); - } - const x = await this.kv.get(key); - if (x == null) { - return; - } - const { value, revision, sm } = x; - if (value.length == 0) { - return undefined; - } - const v = this.env.jc.decode(value); - this.all[key] = v; - this.revisions[key] = revision; - if (revision > this.revision) { - this.revision = revision; - } - this.times[key] = sm.time; - this.sizes[key] = value.length; - this.allHeaders[key] = sm.headers; - return v; - }; - - get = (key: string): T => { - if (this.all == null) { - throw Error("not initialized"); - } - return this.all[key]; - }; - - getAll = (): { [key: string]: T } => { - if (this.all == null) { - throw Error("not initialized"); - } - return { ...this.all }; - }; - - get length(): number { - if (this.all == null) { - throw Error("not initialized"); - } - return Object.keys(this.all).length; - } - - has = (key: string): boolean => { - return this.all?.[key] !== undefined; - }; - - time = (key?: string): { [key: string]: Date } | Date | undefined => { - if (key == null) { - return this.times; - } else { - return this.times?.[key]; - } - }; - - assertValidKey = (key: string): void => { - if (!this.isValidKey(key)) { - throw Error( - `delete: key (=${key}) must match the filter: ${JSON.stringify(this.filter)}`, - ); - } - }; - - isValidKey = (key: string): boolean => { - if (this.filter == null) { - return true; - } - for (const pattern of this.filter) { - if (matchesPattern({ pattern, subject: key })) { - return true; - } - } - return false; - }; - - seq = (key) => { - if (this.revisions == null) { - throw Error("not ready"); - } - return this.revisions[key]; - }; - - delete = async (key: string, revision?: number) => { - this.assertValidKey(key); - if ( - this.all == null || - this.revisions == null || - this.times == null || - this.sizes == null - ) { - throw Error("not ready"); - } - if (this.all[key] !== undefined || this.noGet || this.noWatch) { - const cur = this.all[key]; - try { - const newRevision = await this.kv.delete(key, { - previousSeq: revision ?? this.revisions[key], - }); - this.revisions[key] = newRevision; - delete this.all[key]; - } catch (err) { - this.all[key] = cur; - throw err; - } - if (this.chunkCounts[key]) { - // garbage collect the extra chunks - for (let chunk = 1; chunk < this.chunkCounts[key]; chunk++) { - await this.kv.delete(chunkedKey({ key, chunk })); - } - delete this.chunkCounts[key]; - } - } - }; - - // delete everything matching the filter that hasn't been set - // in the given amount of ms. Returns number of deleted records. - // NOTE: This could throw an exception if something that would expire - // were changed right when this is run then it would get expired - // but shouldn't. In that case, run it again. - expire = async ({ - cutoff, - ageMs, - }: { - cutoff?: Date; - ageMs?: number; - }): Promise => { - if (!ageMs && !cutoff) { - throw Error("one of ageMs or cutoff must be set"); - } - if (ageMs && cutoff) { - throw Error("exactly one of ageMs or cutoff must be set"); - } - if (this.times == null || this.all == null) { - throw Error("not initialized"); - } - if (ageMs && !cutoff) { - cutoff = new Date(Date.now() - ageMs); - } - if (cutoff == null) { - throw Error("impossible"); - } - // make copy of revisions *before* we start deleting so that - // if a key is changed exactly while deleting we get an error - // and don't accidently delete it! - const revisions = { ...this.revisions }; - const toDelete = Object.keys(this.all).filter( - (key) => this.times?.[key] != null && this.times[key] <= cutoff, - ); - if (toDelete.length > 0) { - await awaitMap(toDelete, MAX_PARALLEL, async (key) => { - await this.delete(key, revisions[key]); - }); - } - return toDelete.length; - }; - - // delete all that we know about - clear = async () => { - if (this.all == null) { - throw Error("not initialized"); - } - await awaitMap(Object.keys(this.all), MAX_PARALLEL, this.delete); - }; - - setMany = async ( - obj: { [key: string]: T }, - headers?: { [key: string]: { [name: string]: string } }, - ) => { - await awaitMap( - Object.keys(obj), - MAX_PARALLEL, - async (key) => await this.set(key, obj[key], headers?.[key]), - ); - }; - - set = async ( - key: string, - value: T, - options?: { - headers?: { [name: string]: string | null }; - previousSeq?: number; - }, - ) => { - await this._set(key, value, options); - if (this.all != null) { - this.all[key] = value; - } - }; - - private _set = async ( - key: string, - value: T, - options?: { - headers?: { [name: string]: string | null }; - previousSeq?: number; - }, - ) => { - if (!this.isValidKey(key)) { - throw Error( - `set: key (=${key}) must match the filter: ${JSON.stringify(this.filter)}`, - ); - } - if (this.all == null || this.revisions == null) { - throw Error("not ready"); - } - if (isEqual(this.all[key], value)) { - // values equal. What about headers? - - if ( - options?.headers == null || - Object.keys(options.headers).length == 0 - ) { - return; - } - const { headers } = options; - // maybe trying to change headers - let changeHeaders = false; - if (this.allHeaders[key] == null) { - // this is null but headers isn't, so definitely trying to change - changeHeaders = true; - } else { - // look to see if any header is explicitly being changed - const keys = new Set(Object.keys(headers)); - for (const [k, v] of this.allHeaders[key]) { - keys.delete(k); - if (headers[k] !== undefined && headers[k] != v[0]) { - changeHeaders = true; - break; - } - } - if (keys.size > 0) { - changeHeaders = true; - } - } - if (!changeHeaders) { - // not changing any header - return; - } - } - if (value === undefined) { - return await this.delete(key); - } - const revision = options?.previousSeq ?? this.revisions[key]; - let val = this.encode(value); - if ( - this.limits.max_msg_size > -1 && - val.length > this.limits.max_msg_size - ) { - // we reject due to our own size reasons - const err = new RejectError( - `message key:value size (=${val.length}) exceeds max_msg_size=${this.limits.max_msg_size} bytes`, - ); - err.code = "REJECT"; - err.key = key; - throw err; - } - - const maxMessageSize = (await getMaxPayload()) - 10000; - // const maxMessageSize = 100; - - if (val.length > maxMessageSize) { - // chunking - let val0 = val; - const chunks: Buffer[] = []; - while (val0.length > 0) { - chunks.push(val0.slice(0, maxMessageSize)); - val0 = val0.slice(maxMessageSize); - } - val = chunks[0]; - let allHeaders = createHeaders(); - allHeaders.append(CHUNKS_HEADER, `${chunks.length}`); - if (options?.headers) { - const { headers } = options; - for (const k in headers) { - const v = headers[k]; - if (v == null) { - continue; - } - allHeaders.append(k, v); - } - } - await jetstreamPut(this.kv, key, val, { - previousSeq: revision, - headers: allHeaders, - timeout: PUBLISH_TIMEOUT, - }); - // now save the other chunks somewhere. - for (let i = 1; i < chunks.length; i++) { - await jetstreamPut(this.kv, chunkedKey({ key, chunk: i }), chunks[i], { - timeout: PUBLISH_TIMEOUT, - }); - } - if (chunks.length < (this.chunkCounts[key] ?? 0)) { - // value previously had even more chunks, so we get rid of the extra chunks. - for ( - let chunk = chunks.length; - chunk < this.chunkCounts[key]; - chunk++ - ) { - await this.kv.delete(chunkedKey({ key, chunk })); - } - } - - this.chunkCounts[key] = chunks.length; - } else { - // not chunking - try { - let allHeaders; - if (options?.headers) { - const { headers } = options; - allHeaders = createHeaders(); - for (const k in headers) { - const v = headers[k]; - if (v == null) { - continue; - } - allHeaders.append(k, v); - } - } else { - allHeaders = undefined; - } - await jetstreamPut(this.kv, key, val, { - previousSeq: revision, - headers: allHeaders, - timeout: PUBLISH_TIMEOUT, - }); - } catch (err) { - if (err.code == "MAX_PAYLOAD_EXCEEDED") { - // nats rejects due to payload size - const err2 = new RejectError(`${err}`); - err2.code = "REJECT"; - err2.key = key; - throw err2; - } else { - throw err; - } - } - if (this.chunkCounts[key]) { - // it was chunked, so get rid of chunks - for (let chunk = 1; chunk < this.chunkCounts[key]; chunk++) { - await this.kv.delete(chunkedKey({ key, chunk })); - } - delete this.chunkCounts[key]; - } - } - }; - - stats = (): { count: number; bytes: number } | undefined => { - if (this.sizes == null) { - return; - } - let count = 0; - let bytes = 0; - for (const key in this.sizes) { - count += 1; - bytes += this.sizes[key]; - } - return { count, bytes }; - }; - - // separated out from throttled version so it's easy to call directly for unit testing. - private enforceLimitsNow = reuseInFlight(async () => { - if (this.all == null || this.times == null || this.sizes == null) { - return; - } - const { max_msgs, max_age, max_bytes } = this.limits; - let times: { time: Date; key: string }[] | null = null; - const getTimes = (): { time: Date; key: string }[] => { - if (times == null) { - // this is potentially a little worrisome regarding performance, but - // it has to be done, or we have to do something elsewhere to maintain - // this info. The intention with these kv's is they are small and all - // in memory. - const v: { time: Date; key: string }[] = []; - for (const key in this.times) { - v.push({ time: this.times[key], key }); - } - v.sort((a, b) => (a.time < b.time ? -1 : a.time > b.time ? 1 : 0)); - times = v; - } - return times!; - }; - - // we check with each defined limit if some old messages - // should be dropped, and if so move limit forward. If - // it is above -1 at the end, we do the drop. - let index = -1; - const setIndex = (i, _limit) => { - // console.log("setIndex", { i, _limit }); - index = Math.max(i, index); - }; - - //max_msgs = max number of keys - const v = Object.keys(this.all); - // console.log("enforceLimitsNow", this.limits, v, getTimes()); - if (max_msgs > -1 && v.length > max_msgs) { - // ensure there are at most this.limits.max_msgs messages - // by deleting the oldest ones up to a specified point. - const i = v.length - max_msgs; - if (i > 0) { - setIndex(i - 1, "max_msgs"); - } - } - - // max_age - if (max_age > 0) { - const times = getTimes(); - if (times.length > 1) { - // expire messages older than max_age nanoseconds - // to avoid potential clock skew, we define *now* as the time of the most - // recent message. For us, this should be fine, since we only impose limits - // when writing new messages, and none of these limits are guaranteed. - const now = times[times.length - 1].time.valueOf(); - const cutoff = new Date(now - max_age); - for (let i = times.length - 2; i >= 0; i--) { - if (times[i].time < cutoff) { - // it just went over the limit. Everything before - // and including the i-th message should be deleted. - setIndex(i, "max_age"); - break; - } - } - } - } - - // max_bytes - if (max_bytes >= 0) { - let t = 0; - const times = getTimes(); - for (let i = times.length - 1; i >= 0; i--) { - t += this.sizes[times[i].key]; - if (t > max_bytes) { - // it just went over the limit. Everything before - // and including the i-th message must be deleted. - setIndex(i, "max_bytes"); - break; - } - } - } - - if (index > -1 && this.times != null) { - try { - // console.log("enforceLimits: deleting ", { index }); - const times = getTimes(); - const toDelete = times.slice(0, index + 1).map(({ key }) => key); - if (toDelete.length > 0) { - // console.log("enforceLImits: deleting ", toDelete.length, " keys"); - const revisions = { ...this.revisions }; - await awaitMap(toDelete, MAX_PARALLEL, async (key) => { - await this.delete(key, revisions[key]); - }); - } - } catch (err) { - // code 10071 is for "JetStreamApiError: wrong last sequence", which is - // expected when there are multiple clients, since all of them try to impose - // limits up at once. - if (err.code != "TIMEOUT" && err.code != 10071) { - console.log(`WARNING: expiring old messages - ${err}`); - } - } - } - }); - - // ensure any limits are satisfied, always by deleting old keys - private enforceLimits = throttle( - this.enforceLimitsNow, - ENFORCE_LIMITS_THROTTLE_MS, - { leading: false, trailing: true }, - ); -} - -// Support for value chunking below - -// **WARNING: Do not change these constants ever, or it will silently break -// all chunked kv and stream data that has ever been stored!!!** - -const CHUNK = "chunk"; -export const CHUNKS_HEADER = "CoCalc-Chunks"; - -function chunkedKey({ key, chunk }: { key: string; chunk?: number }) { - return `${key}.${chunk}.${CHUNK}`; -} - -function isChunkedKey(key: string) { - return key.endsWith("." + CHUNK); -} - -function getChunkCount(headers) { - if (headers == null) { - return 0; - } - for (const [key, value] of headers) { - if (key == CHUNKS_HEADER) { - return parseInt(value[0]); - } - } - return 0; -} - -function parseChunkedKey(key: string): { - key: string; - index: number; -} { - if (!isChunkedKey(key)) { - return { key, index: 0 }; - } - const v = key.split("."); - return { - key: v.slice(0, v.length - 2).join("."), - index: parseInt(v[v.length - 2]), - }; -} - -// The put function built into jetstream doesn't support -// setting headers, but we set headers for doing chunking. -// So we have to rewrite their put. I attempted to upstream this: -// https://github.com/nats-io/nats.js/issues/217 -// This was explicitly soundly rejected by the NATS developers. -// It's thus important that we unit test this, which is done in -// packages/backend/nats/test/sync/chunk.test.ts -// right now. I think it is highly unlikely NATS will break using -// headers in some future version, based on how KV is implemented -// on top of lower level primitives. However, if they do, we will -// fork whatever part of NATS that does, and maintain it. The code -// is easy to work with and understand. - -// Second, the put function in nats.js doesn't support setting a timeout, -// so that's another thing done below. Upstream: -// https://github.com/nats-io/nats.js/issues/268 -async function jetstreamPut( - kv, - k: string, - data, - opts: any = {}, -): Promise { - const ek = kv.encodeKey(k); - kv.validateKey(ek); - - const o = { timeout: opts.timeout } as any; - if (opts.previousSeq !== undefined) { - const h = createHeaders(); - o.headers = h; - // PubHeaders.ExpectedLastSubjectSequenceHdr is 'Nats-Expected-Last-Subject-Sequence', but - // PubHeaders is defined only internally to jetstream, so I copy/pasted this here. - h.set("Nats-Expected-Last-Subject-Sequence", `${opts.previousSeq}`); - } - if (opts.headers !== undefined) { - for (const [key, value] of opts.headers) { - if (o.headers == null) { - o.headers = createHeaders(); - } - o.headers.set(key, value[0]); - } - } - try { - await waitUntilConnected(); - const pa = await kv.js.publish(kv.subjectForKey(ek, true), data, o); - return pa.seq; - } catch (err) { - return Promise.reject(err); - } -} - -// see https://github.com/nats-io/nats.js/issues/246 -// In particular, we need this just to be able to support -// base64 encoded keys! - -// upstream is: /^[-/=.\w]+$/; - -const validKeyRe = /^[^\u0000\s*>]+$/; -function validateKey(k: string) { - if (k.startsWith(".") || k.endsWith(".") || !validKeyRe.test(k)) { - throw new Error(`invalid key: ${k}`); - } -} - -// upstream is: /^[-/=.>*\w]+$/; -const validSearchKey = /^[^\u0000\s]+$/; -export function validateSearchKey(k: string) { - if (k.startsWith(".") || k.endsWith(".") || !validSearchKey.test(k)) { - throw new Error(`invalid key: ${k}`); - } -} diff --git a/src/packages/nats/sync/kv.ts b/src/packages/nats/sync/kv.ts deleted file mode 100644 index 62ce949bc0..0000000000 --- a/src/packages/nats/sync/kv.ts +++ /dev/null @@ -1,186 +0,0 @@ -/* -Async Consistent Centralized Key Value Store - -NOTE: I think this isn't used by anything actually. Note it doesn't emit -change events. Maybe we should delete this? - -DEVELOPMENT: - -~/cocalc/src/packages/backend$ n -Welcome to Node.js v18.17.1. -Type ".help" for more information. -> t = await require("@cocalc/backend/nats/sync").kv({name:'test'}) - -*/ - -import { EventEmitter } from "events"; -import { type NatsEnv, type Location } from "@cocalc/nats/types"; -import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import { GeneralKV, type KVLimits } from "./general-kv"; -import { jsName, localLocationName } from "@cocalc/nats/names"; -import refCache from "@cocalc/util/refcache"; -import { getEnv } from "@cocalc/nats/client"; -import type { JSONValue } from "@cocalc/util/types"; -import type { ValueType } from "@cocalc/nats/types"; -import { decodeBase64, encodeBase64 } from "@cocalc/nats/util"; -import jsonStableStringify from "json-stable-stringify"; - -export interface KVOptions extends Location { - name: string; - env?: NatsEnv; - limits?: Partial; - noCache?: boolean; - desc?: JSONValue; - valueType?: ValueType; -} - -export class KV extends EventEmitter { - generalKV?: GeneralKV; - name: string; - private prefix: string; - - constructor(options: KVOptions) { - super(); - const { name, account_id, project_id, env, limits, valueType } = options; - // name of the jetstream key:value store. - const kvname = jsName({ account_id, project_id }); - this.name = name + localLocationName(options); - if (env == null) { - throw Error("env must be defined"); - } - this.prefix = encodeBase64(this.name); - this.generalKV = new GeneralKV({ - name: kvname, - filter: `${this.prefix}.>`, - env, - limits, - valueType, - }); - this.init(); - return new Proxy(this, { - deleteProperty(target, prop) { - if (typeof prop == "string") { - target.delete(prop); - return true; - } else { - return false; - } - }, - set(target, prop, value) { - prop = String(prop); - if (prop == "_eventsCount" || prop == "_events" || prop == "close") { - target[prop] = value; - return true; - } - if (target[prop] != null) { - throw Error(`method name '${prop}' is read only`); - } - target.set(prop, value); - return true; - }, - get(target, prop) { - return target[String(prop)] ?? target.get(String(prop)); - }, - }); - } - - init = reuseInFlight(async () => { - if (this.generalKV == null) { - throw Error("closed"); - } - await this.generalKV.init(); - }); - - close = () => { - if (this.generalKV == null) { - return; - } - this.generalKV.close(); - delete this.generalKV; - this.emit("closed"); - this.removeAllListeners(); - }; - - delete = async (key: string) => { - if (this.generalKV == null) { - throw Error("closed"); - } - await this.generalKV.delete(`${this.prefix}.${encodeBase64(key)}`); - }; - - // delete everything - clear = async () => { - if (this.generalKV == null) { - throw Error("closed"); - } - await this.generalKV.clear(); - }; - - // server assigned time - time = (key?: string): { [key: string]: Date } | Date | undefined => { - if (this.generalKV == null) { - throw Error("closed"); - } - return this.generalKV.time( - key ? `${this.prefix}.${encodeBase64(key)}` : undefined, - ); - }; - - get = (key: string): T | undefined => { - if (this.generalKV == null) { - throw Error("closed"); - } - return this.generalKV.get(`${this.prefix}.${encodeBase64(key)}`); - }; - - getAll = (): { [key: string]: T } => { - if (this.generalKV == null) { - throw Error("closed"); - } - const obj = this.generalKV.getAll(); - const x: any = {}; - for (const k in obj) { - const h = this.generalKV.headers(k); - if (h?.key == null) { - throw Error(`missing header for key ${k}`); - } - const key = decodeBase64(h.key); - x[key] = obj[k]; - } - return x; - }; - - set = async (key: string, value: T) => { - if (this.generalKV == null) { - throw Error("closed"); - } - await this.generalKV.set(`${this.prefix}.${encodeBase64(key)}`, value, { - headers: { key: encodeBase64(key) }, - }); - }; -} - -export function userKvKey(options: KVOptions) { - if (!options.name) { - throw Error("name must be specified"); - } - const { env, ...x } = options; - return jsonStableStringify(x); -} - -export const cache = refCache({ - name: "kv", - createKey: userKvKey, - createObject: async (opts) => { - if (opts.env == null) { - opts.env = await getEnv(); - } - const k = new KV(opts); - await k.init(); - return k; - }, -}); - -export async function kv(options: KVOptions): Promise> { - return await cache(options); -} diff --git a/src/packages/nats/sync/stream.ts b/src/packages/nats/sync/stream.ts deleted file mode 100644 index 027ac0d681..0000000000 --- a/src/packages/nats/sync/stream.ts +++ /dev/null @@ -1,1111 +0,0 @@ -/* -Consistent Centralized Event Stream = ordered list of messages - -DEVELOPMENT: - -# note the package directory!! -~/cocalc/src/packages/backend n -Welcome to Node.js v18.17.1. -Type ".help" for more information. -> s = await require("@cocalc/backend/nats/sync").stream({name:'test'}) - - -> env = await require("@cocalc/backend/nats/env").getEnv(); a = require("@cocalc/nats/sync/stream"); s = new a.Stream({name:'test',env,subjects:'foo',filter:'foo'}); await s.init(); - - -With browser client using a project: - -# in browser -> s = await cc.client.nats_client.stream({project_id:cc.current().project_id,name:'foo'}) - -# in node: -> env = await require("@cocalc/backend/nats/env").getEnv(); a = require("@cocalc/nats/sync/stream"); s = await a.stream({project_id:cc.current().project_id,name:'foo', env}) - - -# Involving limits: - -> env = await require("@cocalc/backend/nats/env").getEnv(); a = require("@cocalc/nats/sync/stream"); s = await a.stream({project_id:cc.current().project_id,name:'foo', env, limits:{max_msgs:5,max_age:1000*15,max_bytes:10000,max_msg_size:1000}}) -> s.getAll() - -In browser: -> s = await cc.client.nats_client.stream({project_id:cc.current().project_id, name:'foo',limits:{max_msgs:5,max_age:1000*15,max_bytes:10000,max_msg_size:1000}}) - -TODO: - - maybe the limits and other config should be stored in a KV store so - they are sync'd between clients automatically. That's what NATS surely - does internally. - - -*/ - -import { EventEmitter } from "events"; -import { type NatsEnv, type ValueType } from "@cocalc/nats/types"; -import { - jetstreamManager, - jetstream, - type JetStreamPublishOptions, - AckPolicy, -} from "@nats-io/jetstream"; -import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import { jsName, streamSubject } from "@cocalc/nats/names"; -import { - getMaxPayload, - waitUntilConnected, - isConnected, - millis, - encodeBase64, - nanos, -} from "@cocalc/nats/util"; -import { delay } from "awaiting"; -import { throttle } from "lodash"; -import { isNumericString } from "@cocalc/util/misc"; -import { map as awaitMap } from "awaiting"; -import refCache from "@cocalc/util/refcache"; -import { type JsMsg } from "@nats-io/jetstream"; -import { getEnv } from "@cocalc/nats/client"; -import type { JSONValue } from "@cocalc/util/types"; -import { headers as createHeaders } from "@nats-io/nats-core"; -import { CHUNKS_HEADER } from "./general-kv"; -import jsonStableStringify from "json-stable-stringify"; -import { asyncDebounce } from "@cocalc/util/async-utils"; -import { waitUntilReady } from "@cocalc/nats/tiered-storage/client"; -import { COCALC_MESSAGE_ID_HEADER, type RawMsg } from "./ephemeral-stream"; - -const PUBLISH_TIMEOUT = 15000; - -class PublishRejectError extends Error { - code: string; - mesg: any; - subject?: string; - limit?: string; -} - -const MAX_PARALLEL = 50; - -const CONNECTION_CHECK_INTERVAL = 5000; - -// Making this too LONG is very dangerous since it increases load on the server. -// Making it too short means it has to get recreated whenever the network connection drops. -const EPHEMERAL_CONSUMER_THRESH = 45 * 1000; - -//console.log("!!! ALERT: USING VERY SHORT CONSUMERS FOR TESTING!"); -//const EPHEMERAL_CONSUMER_THRESH = 3 * 1000; - -// We re-implement exactly the same stream-wide limits that NATS has, -// but instead, these are for the stream **with the given filter**. -// Limits are enforced by all clients *client side* within ENFORCE_LIMITS_THROTTLE_MS -// of any client making changes. It is important to significantly throttle -// this, as it can be expensive to the server. -// Also, obviously the true limit is the minimum of the full NATS stream limits and -// these limits. - -// Significant throttling is VERY, VERY important, since purging old messages frequently -// seems to put a very significant load on NATS! -export const ENFORCE_LIMITS_THROTTLE_MS = process.env.COCALC_TEST_MODE - ? 100 - : 45000; - -export interface FilteredStreamLimitOptions { - // How many messages may be in a Stream, oldest messages will be removed - // if the Stream exceeds this size. -1 for unlimited. - max_msgs: number; - // Maximum age of any message in the stream matching the filter, - // expressed in milliseconds. 0 for unlimited. - // **Note that max_age is in milliseoncds, NOT nanoseconds like in Nats!!!** - max_age: number; - // How big the Stream may be, when the combined stream size matching the filter - // exceeds this old messages are removed. -1 for unlimited. - // This is enforced only on write, so if you change it, it only applies - // to future messages. - max_bytes: number; - // The largest message that will be accepted by the Stream. -1 for unlimited. - max_msg_size: number; - - // Attempting to publish a message that causes this to be exceeded - // throws an exception instead. -1 (or 0) for unlimited - // For dstream, the messages are explicitly rejected and the client - // gets a "reject" event emitted. E.g., the terminal running in the project - // writes [...] when it gets these rejects, indicating that data was - // dropped. - max_bytes_per_second: number; - max_msgs_per_second: number; -} - -export interface StreamOptions { - // what it's called by us - name: string; - // actually name of the jetstream in NATS - jsname: string; - // subject = default subject used for publishing; defaults to filter if filter doesn't have any wildcard - subject?: string; - subjects: string | string[]; - filter?: string; - env?: NatsEnv; - natsStreamOptions?; - limits?: Partial; - // only load historic messages starting at the given seq number. - start_seq?: number; - desc?: JSONValue; - valueType?: ValueType; -} - -export class Stream extends EventEmitter { - public readonly name: string; - public readonly jsname: string; - private natsStreamOptions?; - private limits: FilteredStreamLimitOptions; - private bytesSent: { [time: number]: number } = {}; - private subjects: string | string[]; - private filter?: string; - private subject?: string; - private env: NatsEnv; - private _start_seq?: number; - private js; - private jsm; - private stream?; - private watch?; - public readonly valueType: ValueType; - // seq = the last sequence number of a message on this stream that we received - // from NATS. This is used only for resuming without missing anything. - private last_seq: number = 1; - - // don't do "this.raw=" or "this.messages=" anywhere in this class!!! - public readonly raw: JsMsg[][] = []; - public readonly messages: T[] = []; - private consumer?; - - constructor({ - name, - jsname, - env, - subject, - subjects, - filter, - natsStreamOptions, - limits, - start_seq, - valueType = "json", - }: StreamOptions) { - super(); - - this.valueType = valueType; - if (env == null) { - throw Error("bug: env must be specified"); - } - this.env = env; - // create a jetstream client so we can publish to the stream - this.js = jetstream(env.nc); - this.name = name; - this.jsname = jsname; - this.natsStreamOptions = natsStreamOptions; - if ( - subject == null && - filter != null && - !filter.includes("*") && - !filter.includes(">") - ) { - subject = filter; - } - this.subject = subject; - this.subjects = typeof subjects == "string" ? [subjects] : subjects; - if (this.subjects.length == 0) { - throw Error("subjects must be at least one string"); - } - this.filter = filter; - this._start_seq = start_seq; - this.limits = { - max_msgs: -1, - max_age: 0, - max_bytes: -1, - max_msg_size: -1, - max_bytes_per_second: -1, - max_msgs_per_second: -1, - ...limits, - }; - return new Proxy(this, { - get(target, prop) { - return typeof prop == "string" && isNumericString(prop) - ? target.get(parseInt(prop)) - : target[String(prop)]; - }, - }); - } - - init = reuseInFlight(async () => { - if (this.stream != null) { - return; - } - this.jsm = await jetstreamManager(this.env.nc); - const options = { - subjects: this.subjects, - compression: "s2", - // our streams are relatively small so a longer duplicate window than 2 minutes seems ok. - duplicate_window: nanos(1000 * 60 * 5), - ...this.natsStreamOptions, - }; - await waitUntilReady(this.jsname); - try { - this.stream = await this.jsm.streams.add({ - name: this.jsname, - ...options, - }); - } catch (err) { - // probably already exists, so try to modify to have the requested properties. - this.stream = await this.jsm.streams.update(this.jsname, options); - } - await this.fetchInitialData({ - // do not broadcast initial load - noEmit: true, - }); - if (this.stream == null) { - // closed *during* initial load - return; - } - this.ensureConnected(); - this.watchForNewData(); - }); - - private ensureConnected = async () => { - if (this.env.nc.on != null) { - this.env.nc.on("reconnect", this.restartConsumer); - this.env.nc.on("status", ({ type }) => { - if (type == "reconnect") { - this.ensureConsumerIsValid(); - } - }); - } else { - this.checkConsumerOnReconnect(); - } - while (this.stream != null) { - if (!(await isConnected())) { - await this.restartConsumer(); - } - await delay(CONNECTION_CHECK_INTERVAL); - } - }; - - // We can't do this all the time due to efficiency - // (see https://www.synadia.com/blog/jetstream-design-patterns-for-scale) - // but we **MUST do it around connection events** - // no matter what the docs say or otherwise!!!! - // At least with the current nats.js drivers. - // Often nats does recreate the consumer, but sometimes it doesn't. - // I can't nail down which is which. - private ensureConsumerIsValid = asyncDebounce( - async () => { - await waitUntilConnected(); - await delay(2000); - const isValid = await this.isConsumerStillValid(); - if (!isValid) { - if (this.stream == null) { - return; - } - console.log( - `nats stream: ${this.name} -- consumer not valid, so recreating`, - ); - await this.restartConsumer(); - } - }, - 3000, - { leading: false, trailing: true }, - ); - - private checkConsumerOnReconnect = async () => { - while (this.stream != null) { - try { - for await (const { type } of await this.env.nc.status()) { - if (type == "reconnect") { - await this.ensureConsumerIsValid(); - } - } - } catch { - await delay(15000); - await this.ensureConsumerIsValid(); - } - } - }; - - private isConsumerStillValid = async () => { - await waitUntilConnected(); - if (this.consumer == null || this.stream == null) { - return false; - } - try { - await this.consumer.info(); - return true; - } catch (err) { - console.log(`nats: consumer.info error -- ${err}`); - return false; - } - }; - - get = (n?): T | T[] => { - if (this.js == null) { - throw Error("closed"); - } - if (n == null) { - return this.getAll(); - } else { - return this.messages[n]; - } - }; - - getAll = (): T[] => { - if (this.js == null) { - throw Error("closed"); - } - return [...this.messages]; - }; - - headers = (n: number): { [key: string]: string } | undefined => { - return headersFromRawMessages(this.raw[n]); - }; - - // get server assigned global sequence number of n-th message in stream - seq = (n: number): number | undefined => { - return last(this.raw[n])?.seq; - }; - - // get server assigned time of n-th message in stream - time = (n: number): Date | undefined => { - const r = last(this.raw[n]); - if (r == null) { - return; - } - return new Date(millis(r?.info.timestampNanos)); - }; - - times = (): (Date | undefined)[] => { - const v: (Date | undefined)[] = []; - for (let i = 0; i < this.length; i++) { - v.push(this.time(i)); - } - return v; - }; - - get length(): number { - return this.messages.length; - } - - get start_seq(): number | undefined { - return this._start_seq; - } - - // WARNING: if you push multiple values at once here, then the order is NOT guaranteed - push = async (...args: T[]) => { - await awaitMap(args, MAX_PARALLEL, this.publish); - }; - - private encodeValue = (value) => { - return this.valueType == "json" ? this.env.jc.encode(value) : value; - }; - - private decodeValue = (value) => { - return this.valueType == "json" ? this.env.jc.decode(value) : value; - }; - - publish = async ( - mesg: T, - options?: Partial< - JetStreamPublishOptions & { headers: { [key: string]: string } } - >, - ) => { - if (this.js == null) { - throw Error("closed"); - } - const data = this.encodeValue(mesg); - if ( - this.limits.max_msg_size > -1 && - data.length > this.limits.max_msg_size - ) { - const err = new PublishRejectError( - `message size (=${data.length}) exceeds max_msg_size=${this.limits.max_msg_size} bytes`, - ); - err.code = "REJECT"; - err.mesg = mesg; - err.subject = this.subject; - err.limit = "max_msg_size"; - throw err; - } - this.enforceLimits(); - if (options?.msgID) { - if (options.headers) { - // also put it here so can be used to clear this.local by dstream: - options.headers[COCALC_MESSAGE_ID_HEADER] = options.msgID; - } else { - options.headers = { [COCALC_MESSAGE_ID_HEADER]: options.msgID }; - } - } - let resp; - const chunks: Buffer[] = []; - const headers: ReturnType[] = []; - // we subtract off from max_payload to leave space for headers (technically, 10 is enough) - await waitUntilConnected(); - const maxMessageSize = (await getMaxPayload()) - 1000; - //const maxMessageSize = 20; // DEV ONLY!!! - - // this may throw an exception: - enforceRateLimits({ - limits: this.limits, - bytesSent: this.bytesSent, - subject: this.subject, - data, - mesg, - }); - - if (data.length > maxMessageSize) { - // we chunk the message into blocks of size maxMessageSize, - // to fit NATS message size limits. We include a header - // so we can re-assemble the chunks later. - let data0 = data; - while (data0.length > 0) { - chunks.push(data0.slice(0, maxMessageSize)); - data0 = data0.slice(maxMessageSize); - } - const last = chunks.length; - for (let i = 1; i <= last; i++) { - const h = createHeaders(); - if (i == 1 && options?.headers != null) { - // also include custom user headers - for (const k in options.headers) { - h.append(k, `${options.headers[k]}`); - } - } - h.append(CHUNKS_HEADER, `${i}/${last}`); - headers.push(h); - } - } else { - // trivial chunk and no header needed. - chunks.push(data); - if (options?.headers != null) { - const h = createHeaders(); - for (const k in options.headers) { - h.append(k, `${options.headers[k]}`); - } - headers.push(h); - } - } - - for (let i = 0; i < chunks.length; i++) { - try { - await waitUntilConnected(); - resp = await this.js.publish(this.subject, chunks[i], { - timeout: PUBLISH_TIMEOUT, - ...options, - // if options contains a msgID, we must make it different for each chunk; - // otherwise, all but the first chunk is discarded! - ...(options?.msgID == null - ? undefined - : { msgID: `${options.msgID}-${i}` }), - headers: headers[i], - }); - // NOTE: the resp we get back contains a sequence number and GUARANTEES that the - // data has been written to disk by the nats server. - } catch (err) { - if (err.code == "MAX_PAYLOAD_EXCEEDED") { - // nats rejects due to payload size - const err2 = new PublishRejectError(`${err}`); - err2.code = "REJECT"; - err2.mesg = mesg; - err2.subject = this.subject; - throw err2; - } else { - throw err; - } - } - } - this.enforceLimits(); - return resp; - }; - - private getConsumer = async ({ start_seq }: { start_seq?: number } = {}) => { - // NOTE: do not cache or modify this in this function getConsumer, - // since it is also called by load and when reconnecting. - const js = jetstream(this.env.nc); - const jsm = await jetstreamManager(this.env.nc); - // making an ephemeral consumer, which is automatically destroyed by NATS - // after inactive_threshold. At that point we MUST reset state. - const options = { - filter_subject: this.filter, - ack_policy: AckPolicy.Explicit, - inactive_threshold: nanos(EPHEMERAL_CONSUMER_THRESH), - }; - let startOptions; - if (start_seq == null && this._start_seq != null) { - start_seq = this._start_seq; - } - if (start_seq != null) { - startOptions = { - deliver_policy: "by_start_sequence", - opt_start_seq: start_seq, - }; - } else { - startOptions = {}; - } - const { name } = await jsm.consumers.add(this.jsname, { - ...options, - ...startOptions, - }); - if (this.consumer != null) { - try { - await this.consumer.delete(); - } catch { - // this absolutely *can* throw an error if the consumer was already deleted - // automatically on the server for some reason! - } - delete this.consumer; - } - this.consumer = await js.consumers.get(this.jsname, name); - return this.consumer; - }; - - private fetchInitialData = async ({ - options, - noEmit, - }: { - options?; - noEmit: boolean; - }) => { - const consumer = await this.getConsumer(options); - // grab the messages. This should be very efficient since it - // internally grabs them in batches. - // This code seems exactly necessary and efficient, and most - // other things I tried ended too soon or hung. See also - // comment in getAllFromKv about permissions. - while (true) { - // https://www.synadia.com/blog/jetstream-design-patterns-for-scale says - // "Consumer info is also frequently misused as a method for clients to check - // for pending messages. Instead, get this metadata from the last message - // fetched to avoid the unnecessary overhead of consumer info." - const info = await consumer.info(); - if (info.num_pending == 0) { - return consumer; - } - const fetch = await consumer.fetch({ max_messages: 1000 }); - this.watch = fetch; - let chunks: JsMsg[] = []; - for await (const mesg of fetch) { - mesg.ack(); - let isChunked = false; - // chunked? - if (mesg.headers != null) { - for (const [key, value] of mesg.headers) { - if (key == CHUNKS_HEADER) { - isChunked = true; - const v = value[0].split("/"); - if (v[0] == "1") { - // first chunk - chunks = [mesg]; - } else { - chunks.push(mesg); - } - if (v[0] == v[1]) { - // have all the chunks - this.handle(chunks, noEmit); - this.enforceLimits(); - } - } - } - } - if (!isChunked) { - // not chunked - this.handle([mesg], noEmit); - this.enforceLimits(); - } - const pending = mesg.info.pending; - if (pending <= 0) { - return consumer; - } - } - } - }; - - private watchForNewData = async () => { - if (this.stream == null) { - // closed *during* initial load - return; - } - // STAGE 2: Watch for new mesg. It's the same consumer though, - // so we are **guaranteed** not to miss anything. - this.enforceLimits(); - this.emit("connected"); - const consume = await this.consumer.consume(); - this.watch = consume; - let chunks: JsMsg[] = []; - for await (const mesg of consume) { - mesg.ack(); - let isChunked = false; - // chunked? - for (const [key, value] of mesg.headers ?? []) { - if (key == CHUNKS_HEADER) { - isChunked = true; - const v = value[0].split("/"); - if (v[0] == "1") { - // first chunk - chunks = [mesg]; - } else { - chunks.push(mesg); - } - if (v[0] == v[1]) { - // have all the chunks - this.handle(chunks, false); - this.enforceLimits(); - } - } - } - if (!isChunked) { - // not chunked - this.handle([mesg], false); - this.enforceLimits(); - } - } - }; - - // this does not throw an exception -- it keeps trying until success. - private restartConsumer = reuseInFlight(async (): Promise => { - await waitUntilConnected(); - if (this.stream == null) { - return; - } - // make a new consumer, starting AFTER the last event we retrieved - let d = 250; - while (true) { - this.watch?.stop(); // stop current watch (if any) - // make new one: - const start_seq = this.last_seq + 1; - try { - // noEmit = false since we DO want to emit an event for any changes at this point!! - this.consumer = await this.fetchInitialData({ - options: { start_seq }, - noEmit: false, - }); - if (this.stream == null) { - // closed - return; - } - this.watchForNewData(); - return; - } catch (err) { - d = Math.min(30000, d * 1.3) + Math.random(); - await delay(d); - } - } - }); - - private decode = (raw: JsMsg[]) => { - if (raw.length == 0) { - throw Error("must be at least one chunk"); - } - const data = - raw.length == 1 - ? raw[0].data - : // @ts-ignore -- for nextjs prod - Buffer.concat(raw.map((mesg) => mesg.data)); - - try { - return this.decodeValue(data); - } catch (_err) { - // console.log("WARNING: issue decoding nats stream data", { data, _err }); - // better than crashing: - return data; - } - }; - - private handle = (raw: JsMsg[], noEmit: boolean) => { - const mesg = this.decode(raw); - this.messages.push(mesg); - this.raw.push(raw); - for (const { seq } of raw) { - this.last_seq = Math.max(this.last_seq, seq); - } - if (!noEmit) { - this.emit("change", mesg, raw); - } - }; - - close = () => { - if (this.watch == null) { - return; - } - (async () => { - try { - await this.consumer?.delete(); - delete this.consumer; - } catch { - // this absolutely *can* throw an error if the consumer was already deleted - // automatically on the server for some reason! - } - })(); - this.watch.stop(); - delete this.watch; - delete this.stream; - delete this.jsm; - delete this.js; - this.emit("closed"); - this.removeAllListeners(); - this.env.nc.removeListener?.("reconnect", this.restartConsumer); - }; - - // delete all messages up to and including the - // one at position index, i.e., this.messages[index] - // is deleted. - // NOTE: other clients will NOT see the result of a purge, - // except when done implicitly via limits, since all clients - // truncate this.raw and this.messages directly. - purge = async ({ index = -1 }: { index?: number } = {}) => { - // console.log("purge", { index }); - if (index >= this.raw.length - 1 || index == -1) { - index = this.raw.length - 1; - // everything - // console.log("purge everything"); - await this.jsm.streams.purge(this.jsname, { - filter: this.filter, - }); - } else { - const { seq } = last(this.raw[index + 1]); - await this.jsm.streams.purge(this.jsname, { - filter: this.filter, - seq, - }); - } - this.messages.splice(0, index + 1); - this.raw.splice(0, index + 1); - }; - - // get stats for this stream using data we have already downloaded, BUT - // only considering messages with sequence >= start_seq. - stats = ({ - start_seq = 1, - }: { - start_seq?: number; - }): { count: number; bytes: number } | undefined => { - if (this.raw == null) { - return; - } - let count = 0; - let bytes = 0; - for (const raw of this.raw) { - const seq = last(raw)?.seq; - if (seq == null) { - continue; - } - if (seq < start_seq) { - continue; - } - count += 1; - for (const r of raw) { - bytes += r.data.length; - } - } - return { count, bytes }; - }; - - private enforceLimitsNow = reuseInFlight(async () => { - if (this.jsm == null) { - return; - } - const index = enforceLimits({ - messages: this.messages, - raw: this.raw, - limits: this.limits, - }); - // console.log("enforceLImits", { index }); - if (index > -1) { - try { - // console.log("imposing limit via purge ", { index }); - await this.purge({ index }); - } catch (err) { - if (err.code != "TIMEOUT") { - console.log(`WARNING: purging old messages - ${err}`); - } - } - } - }); - - // ensure any limits are satisfied, i.e., delete old messages. - private enforceLimits = throttle( - this.enforceLimitsNow, - ENFORCE_LIMITS_THROTTLE_MS, - { leading: false, trailing: true }, - ); - - // load older messages starting at start_seq - load = async ({ - start_seq, - noEmit, - }: { - start_seq: number; - noEmit?: boolean; - }) => { - if (this._start_seq == null || this._start_seq <= 1) { - // we already loaded everything on initialization; there can't be anything older. - return; - } - const consumer = await this.getConsumer({ start_seq }); - // https://www.synadia.com/blog/jetstream-design-patterns-for-scale says - // "Consumer info is also frequently misused as a method for clients to check - // for pending messages. Instead, get this metadata from the last message - // fetched to avoid the unnecessary overhead of consumer info." - const info = await consumer.info(); - const fetch = await consumer.fetch(); - let i = 0; - // grab the messages. This should be very efficient since it - // internally grabs them in batches. - const raw: JsMsg[][] = []; - const messages: T[] = []; - const cur = last(this.raw[0])?.seq; - let chunks: JsMsg[] = []; - for await (const mesg of fetch) { - if (cur != null && mesg.seq >= cur) { - break; - } - - let isChunked = false; - // chunked? - for (const [key, value] of mesg.headers ?? []) { - if (key == CHUNKS_HEADER) { - isChunked = true; - const v = value[0].split("/"); - if (v[0] == "0") { - // first chunk - chunks = [mesg]; - } else { - chunks.push(mesg); - } - if (v[0] == v[1]) { - // have all the chunks - raw.push(chunks); - messages.push(this.decodeValue(chunks)); - } - } - } - if (!isChunked) { - // not chunked - raw.push([mesg]); - messages.push(this.decode([mesg])); - } - i += 1; - if (i >= info.num_pending) { - break; - } - } - // mutate the array this.raw and this.messages by splicing in - // raw and messages at the beginning: - this.raw.unshift(...raw); - this.messages.unshift(...messages); - if (!noEmit) { - for (let i = 0; i < raw.length; i++) { - this.emit("change", messages[i], raw[i]); - } - } - this._start_seq = start_seq; - }; -} - -// One stream for each account and one for each project. -// Use the filters to restrict, e.g., to message about a particular file. - -export interface UserStreamOptions { - name: string; - env?: NatsEnv; - account_id?: string; - project_id?: string; - limits?: Partial; - start_seq?: number; - noCache?: boolean; - desc?: JSONValue; - valueType?: ValueType; -} - -export function userStreamOptionsKey(options: UserStreamOptions) { - if (!options.name) { - throw Error("name must be specified"); - } - const { env, ...x } = options; - return jsonStableStringify(x); -} - -export const cache = refCache({ - name: "stream", - createKey: userStreamOptionsKey, - createObject: async (options) => { - if (options.env == null) { - options.env = await getEnv(); - } - const { account_id, project_id, name } = options; - const jsname = jsName({ account_id, project_id }); - const subjects = streamSubject({ account_id, project_id }); - const filter = subjects.replace(">", encodeBase64(name)); - const stream = new Stream({ - ...options, - name, - jsname, - subjects, - subject: filter, - filter, - }); - await stream.init(); - return stream; - }, -}); - -export async function stream( - options: UserStreamOptions, -): Promise> { - return await cache(options); -} - -export function last(v: any[] | undefined) { - if (v === undefined) { - return v; - } - return v[v.length - 1]; -} - -export function enforceLimits({ - messages, - raw, - limits, -}: { - messages: any[]; - raw: (JsMsg | RawMsg)[][]; - limits: FilteredStreamLimitOptions; -}) { - const { max_msgs, max_age, max_bytes } = limits; - // we check with each defined limit if some old messages - // should be dropped, and if so move limit forward. If - // it is above -1 at the end, we do the drop. - let index = -1; - const setIndex = (i, _limit) => { - // console.log("setIndex", { i, _limit }); - index = Math.max(i, index); - }; - // max_msgs - // console.log({ max_msgs, l: messages.length, messages }); - if (max_msgs > -1 && messages.length > max_msgs) { - // ensure there are at most limits.max_msgs messages - // by deleting the oldest ones up to a specified point. - const i = messages.length - max_msgs; - if (i > 0) { - setIndex(i - 1, "max_msgs"); - } - } - - // max_age - if (max_age > 0) { - // expire messages older than max_age nanoseconds - const recent = raw[raw.length - 1]; - if (recent != null) { - // to avoid potential clock skew, we define *now* as the time of the most - // recent message. For us, this should be fine, since we only impose limits - // when writing new messages, and none of these limits are guaranteed. - const nanos = last(recent).info?.timestampNanos; - const now = nanos ? nanos / 10 ** 6 : last(recent).timestamp; - if (now) { - const cutoff = now - max_age; - for (let i = raw.length - 1; i >= 0; i--) { - const nanos = last(raw[i]).info?.timestampNanos; - const t = nanos ? nanos / 10 ** 6 : last(raw[i]).timestamp; - if (t < cutoff) { - // it just went over the limit. Everything before - // and including the i-th message must be deleted. - setIndex(i, "max_age"); - break; - } - } - } - } - } - - // max_bytes - if (max_bytes >= 0) { - let t = 0; - for (let i = raw.length - 1; i >= 0; i--) { - for (const r of raw[i]) { - t += r.data.length; - } - if (t > max_bytes) { - // it just went over the limit. Everything before - // and including the i-th message must be deleted. - setIndex(i, "max_bytes"); - break; - } - } - } - - return index; -} - -export function enforceRateLimits({ - limits, - bytesSent, - subject, - data, - mesg, -}: { - limits: { max_bytes_per_second: number; max_msgs_per_second: number }; - bytesSent: { [time: number]: number }; - subject?: string; - data; - mesg; -}) { - const now = Date.now(); - if (!(limits.max_bytes_per_second > 0) && !(limits.max_msgs_per_second > 0)) { - return; - } - - const cutoff = now - 1000; - let bytes = 0, - msgs = 0; - for (const t in bytesSent) { - if (parseInt(t) < cutoff) { - delete bytesSent[t]; - } else { - bytes += bytesSent[t]; - msgs += 1; - } - } - if ( - limits.max_bytes_per_second > 0 && - bytes + data.length > limits.max_bytes_per_second - ) { - const err = new PublishRejectError( - `bytes per second limit of ${limits.max_bytes_per_second} exceeded`, - ); - err.code = "REJECT"; - err.mesg = mesg; - err.subject = subject; - err.limit = "max_bytes_per_second"; - throw err; - } - if (limits.max_msgs_per_second > 0 && msgs > limits.max_msgs_per_second) { - const err = new PublishRejectError( - `messages per second limit of ${limits.max_msgs_per_second} exceeded`, - ); - err.code = "REJECT"; - err.mesg = mesg; - err.subject = subject; - err.limit = "max_msgs_per_second"; - throw err; - } - bytesSent[now] = data.length; -} - -export function headersFromRawMessages(messages?: (JsMsg | RawMsg)[]) { - if (messages == null) { - return undefined; - } - const x: { [key: string]: string } = {}; - let hasHeaders = false; - for (const raw of messages) { - const { headers } = raw; - if (headers == null) { - continue; - } - for (const [key, value] of headers) { - x[key] = value[0]; - hasHeaders = true; - } - } - return hasHeaders ? x : undefined; -} diff --git a/src/packages/nats/system.ts b/src/packages/nats/system.ts deleted file mode 100644 index 73b7b94a6f..0000000000 --- a/src/packages/nats/system.ts +++ /dev/null @@ -1,28 +0,0 @@ -/* -This is a key:value store that hubs can write to and all -users of cocalc can read from. It contains: - -- recent system-wide notifications that haven't been canceled - system.notifications.{random} - -- the customize data: what used to be the /customize http endpoint - this makes it so clients get notified whenever anything changes, e.g., when the - recommended or required version changes, and can act accordingly. The UI - can also change. - -Development: - -~/cocalc/src/packages/server$ n -Welcome to Node.js v18.17.1. -Type ".help" for more information. -> env = await require("@cocalc/backend/nats/env").getEnv(); a = require("@cocalc/nats/system"); s = new a.SystemKv(env); await s.init(); - -*/ - -import { GeneralKV } from "@cocalc/nats/sync/general-kv"; - -export class SystemKv extends GeneralKV { - constructor(env) { - super({ env, name: "system" }); - } -} diff --git a/src/packages/nats/tiered-storage/client.ts b/src/packages/nats/tiered-storage/client.ts deleted file mode 100644 index 3144246c54..0000000000 --- a/src/packages/nats/tiered-storage/client.ts +++ /dev/null @@ -1,156 +0,0 @@ -/* -Client for the tiered server. -*/ - -import type { Info, Command } from "./server"; -import { tieredStorageSubject } from "./server"; -import { getEnv, getLogger } from "@cocalc/nats/client"; -import { type Location } from "@cocalc/nats/types"; -import { waitUntilConnected } from "@cocalc/nats/util"; -import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import { delay } from "awaiting"; - -const logger = getLogger("tiered-storage:client"); - -const TIMEOUT = { - restore: 90 * 1000, - backup: 90 * 1000, - archive: 90 * 1000, - info: 15 * 1000, -}; - -// Server will never ever archive anything that was active -// in less than this time, no matter what. Usually, it's much longer. -// This is what clients get to assume to reduce load. -export const MIN_ARCHIVE_TIME = 6 * 60 * 1000 * 60; // 6 hours - -const readyUntilAtLeast: { [location: string]: number } = {}; - -function toTime(s): number { - if (s == null) { - return 0; - } - return new Date(s).valueOf(); -} - -// 0 = never active -function lastActive(info: Info): number { - return Math.max( - toTime(info.nats.stream?.state.last_ts), - toTime(info.nats.kv?.state.last_ts), - ); -} - -// 0 = never backed up -// function lastBackup(info: Info): number { -// if (info.backup.stream == null) { -// return toTime(info.backup.kv?.ts); -// } -// if (info.backup.kv == null) { -// return toTime(info.backup.stream?.ts); -// } -// return Math.min(toTime(info.backup.stream?.ts), toTime(info.backup.kv?.ts)); -// } - -function stringToLocation(s: string): Location | null { - if (s.startsWith("account-")) { - return { account_id: s.slice("account-".length) }; - } else if (s.startsWith("project-")) { - return { project_id: s.slice("project-".length) }; - } - return null; -} - -export const waitUntilReady = reuseInFlight( - async (location: Location | string | null): Promise => { - if(location == null) { - return; - } - if (typeof location == "string") { - location = stringToLocation(location); - if (location == null) { - return; - } - } - if (process.env.COCALC_TEST_MODE) { - // no tiered storage in test mode - return; - } - const key = tieredStorageSubject(location); - if (readyUntilAtLeast[key] >= Date.now()) { - // definitely available - return; - } - logger.debug("waitUntilReady", location); - let d = 1000; - while (true) { - await waitUntilConnected(); - const locationInfo = await info(location); - const active = lastActive(locationInfo); - if (locationInfo.nats.kv != null || locationInfo.nats.stream != null) { - // it's live -- only question is how long is it guaranteed - readyUntilAtLeast[key] = MIN_ARCHIVE_TIME + active; - return; - } - // it's NOT live or it never existed - if ( - locationInfo.backup.kv == null && - locationInfo.backup.stream == null - ) { - // never existed, so will get created in the future - readyUntilAtLeast[key] = MIN_ARCHIVE_TIME + Date.now(); - return; - } - try { - // we have to restore - await restore(location); - } catch (err) { - // it may just be that two clients tried to restore at the same time and - // one wins. - d = Math.min(30000, d * 1.25 + Math.random()); - logger.debug( - `waitUntilReady -- WARNING: problem restoring archived nats data -- will retry in ${d}ms -- ${err}`, - ); - await delay(d); - continue; - } - // success - readyUntilAtLeast[key] = MIN_ARCHIVE_TIME + Date.now(); - return; - } - }, -); - -export async function restore(location: Location): Promise { - logger.debug("restore", location); - return (await call("restore", location)) as Info; -} - -export async function archive(location: Location): Promise { - logger.debug("archive", location); - return (await call("archive", location)) as Info; -} - -export async function backup(location: Location): Promise { - logger.debug("backup", location); - return (await call("backup", location)) as Info; -} - -export async function info(location: Location): Promise { - logger.debug("info", location); - return (await call("info", location)) as Info; -} - -async function call(command: Command, location: Location) { - const subject = tieredStorageSubject(location); - const { nc, jc } = await getEnv(); - const resp = await nc.request(subject, jc.encode({ command }), { - timeout: TIMEOUT[command], - }); - const x = jc.decode(resp.data); - if (x?.error) { - throw Error(x.error); - } else { - return x; - } -} diff --git a/src/packages/nats/tiered-storage/server.ts b/src/packages/nats/tiered-storage/server.ts deleted file mode 100644 index 6dffd5685b..0000000000 --- a/src/packages/nats/tiered-storage/server.ts +++ /dev/null @@ -1,166 +0,0 @@ -/* -NATS service that provides tiered storage of data. - -This is pure javascript and sets the basic interface, -behavior and types for both client and server. - -See also @cocalc/server/nats/tiered-storage. -*/ - -import { getEnv, getLogger } from "@cocalc/nats/client"; -import { type Subscription } from "@nats-io/nats-core"; -import { isValidUUID } from "@cocalc/util/misc"; -import { type Location } from "@cocalc/nats/types"; -import { delay } from "awaiting"; -import { type StreamInfo } from "@nats-io/jetstream"; - -const logger = getLogger("tiered-storage:server"); - -export type State = "archived" | "restoring" | "ready"; - -export interface Info { - nats: { stream: null | StreamInfo; kv: null | StreamInfo }; - backup: { stream: null | StreamInfo; kv: null | StreamInfo }; - location: Location; -} - -export const SUBJECT = "tiered-storage"; - -export interface TieredStorage { - info: (location: Location) => Promise; - restore: (location: Location) => Promise; - archive: (location: Location) => Promise; - backup: (location: Location) => Promise; - - // shut it down - close: () => Promise; -} - -export type Command = "restore" | "archive" | "backup" | "info"; - -export function tieredStorageSubject({ account_id, project_id }: Location) { - if (account_id) { - if (project_id) { - throw Error( - "location for tiered storage must specify exactly one of account_id or project_id, but it specifies both", - ); - } - if (!isValidUUID(account_id)) { - throw Error("invalid account_id"); - } - return `${SUBJECT}.account-${account_id}.api`; - } else if (project_id) { - if (!isValidUUID(project_id)) { - throw Error("invalid project_id"); - } - return `${SUBJECT}.project-${project_id}.api`; - } else { - throw Error( - "location for tiered storage must specify exactly one of account_id or project_id, but it specifies neither", - ); - } -} - -function getLocation(subject: string): Location { - if (subject.startsWith(`${SUBJECT}.account-`)) { - return { - account_id: subject.slice( - `${SUBJECT}.account-`.length, - `${SUBJECT}.account-`.length + 36, - ), - }; - } - if (subject.startsWith(`${SUBJECT}.project-`)) { - return { - project_id: subject.slice( - `${SUBJECT}.project-`.length, - `${SUBJECT}.project-`.length + 36, - ), - }; - } - throw Error(`invalid subject -- ${subject}`); -} - -let tieredStorage: TieredStorage | null = null; -export function init(ts: TieredStorage) { - logger.debug("init"); - if (tieredStorage != null) { - throw Error("tiered-storage: init already called"); - } - tieredStorage = ts; - mainLoop(); -} - -let terminated = false; -export async function terminate() { - logger.debug("terminate"); - if (terminated) { - return; - } - terminated = true; - if (tieredStorage) { - tieredStorage.close(); - } - tieredStorage = null; -} - -async function mainLoop() { - while (!terminated) { - logger.debug("mainLoop: running..."); - try { - await run(); - } catch (err) { - const DELAY = 5000; - logger.debug(`WARNING: run error (will restart in ${DELAY}ms) -- ${err}`); - await delay(DELAY); - } - } -} - -let sub: Subscription | null = null; -export async function run() { - const { nc } = await getEnv(); - const subject = `${SUBJECT}.*.api`; - logger.debug(`run: listening on '${subject}'`); - sub = nc.subscribe(subject, { queue: "0" }); - await listen(sub); -} - -async function listen(sub) { - logger.debug("listen"); - for await (const mesg of sub) { - if (tieredStorage == null) { - throw Error("tiered storage not available"); - } - handleMessage(mesg); - } -} - -async function handleMessage(mesg) { - let resp; - const { jc } = await getEnv(); - const location = getLocation(mesg.subject); - const { command } = jc.decode(mesg.data); - - try { - if (tieredStorage == null) { - throw Error("tiered storage not available"); - } - logger.debug("handleMessage", { location, command }); - if (command == "restore") { - resp = await tieredStorage.restore(location); - } else if (command == "archive") { - resp = await tieredStorage.archive(location); - } else if (command == "backup") { - resp = await tieredStorage.backup(location); - } else if (command == "info") { - resp = await tieredStorage.info(location); - } else { - throw Error(`unknown command '${command}'`); - } - } catch (err) { - resp = { error: `${err}` }; - } - //logger.debug("handleMessage -- resp", { location, command, resp }); - mesg.respond(jc.encode(resp)); -} diff --git a/src/packages/nats/util.ts b/src/packages/nats/util.ts deleted file mode 100644 index 5c821271a6..0000000000 --- a/src/packages/nats/util.ts +++ /dev/null @@ -1,167 +0,0 @@ -import jsonStableStringify from "json-stable-stringify"; -import type { MsgHdrs } from "@nats-io/nats-core"; -import { is_array } from "@cocalc/util/misc"; -import { encode as encodeBase64, decode as decodeBase64 } from "js-base64"; -export { encodeBase64, decodeBase64 }; -import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import { getConnection, getConnectionSync } from "./client"; -import { delay } from "awaiting"; - -// Get the number of NON-deleted keys in a nats kv store, matching a given subject: -// export async function numKeys(kv, x: string | string[] = ">"): Promise { -// let num = 0; -// for await (const _ of await kv.keys(x)) { -// num += 1; -// } -// return num; -// } - -// get everything from a KV store matching a subject pattern. -export async function getAllFromKv({ - kv, - key = ">", -}: { - kv; - key?: string | string[]; -}): Promise<{ - all: { [key: string]: any }; - revisions: { [key: string]: number }; - times: { [key: string]: Date }; - headers: { [key: string]: MsgHdrs }; -}> { - // const t = Date.now(); - // console.log("start getAllFromKv", key); - let all: any = {}; - let revisions: { [key: string]: number } = {}; - let times: { [key: string]: Date } = {}; - let headers: { [key: string]: MsgHdrs } = {}; - - if (is_array(key) && key.length > 1) { - // do all separately and combine... otherwise it hangs. - for (const k of key) { - const x = await getAllFromKv({ kv, key: k }); - all = { ...all, ...x.all }; - revisions = { ...revisions, ...x.revisions }; - times = { ...times, ...x.times }; - headers = { ...headers, ...x.headers }; - } - return { all, revisions, times, headers }; - } - - const watch = await kv.watch({ key, ignoreDeletes: false }); - if (watch._data._info.num_pending > 0) { - for await (const { key: key0, value, revision, sm } of watch) { - if (value.length > 0) { - // we MUST check value.length because we do NOT ignoreDeletes. - // we do NOT ignore deletes so that sm.di.pending goes down to 0. - // Otherwise, there is no way in general to know when we are done. - all[key0] = value; - revisions[key0] = revision; - times[key0] = sm.time; - headers[key0] = sm.headers; - } - if (sm.di.pending <= 0) { - // **NOTE! This will hang and never get hit if you don't have the $JC.FC.... auth enabled!!!!** - break; - } - } - } - watch.stop(); - // console.log("finished getAllFromKv", key, (Date.now() - t) / 1000, "seconds"); - return { all, revisions, times, headers }; -} - -export function handleErrorMessage(mesg) { - if (mesg?.error) { - if (mesg.error.startsWith("Error: ")) { - throw Error(mesg.error.slice("Error: ".length)); - } else { - throw Error(mesg.error); - } - } - return mesg; -} - -// Returns true if the subject matches the NATS pattern. -export function matchesPattern({ - pattern, - subject, -}: { - pattern: string; - subject: string; -}): boolean { - const subParts = subject.split("."); - const patParts = pattern.split("."); - let i = 0, - j = 0; - while (i < subParts.length && j < patParts.length) { - if (patParts[j] === ">") return true; - if (patParts[j] !== "*" && patParts[j] !== subParts[i]) return false; - i++; - j++; - } - - return i === subParts.length && j === patParts.length; -} - -// Converts the specified millis into Nanos -export type Nanos = number; -export function nanos(millis: number): Nanos { - return millis * 1000000; -} - -// Convert the specified Nanos into millis -export function millis(ns: Nanos): number { - return Math.floor(ns / 1000000); -} - -export function toKey(x): string | undefined { - if (x === undefined) { - return undefined; - } else if (typeof x === "object") { - return jsonStableStringify(x); - } else { - return `${x}`; - } -} - -// returns false if not connected or there is no connection yet. -export function isConnectedSync(): boolean { - const nc = getConnectionSync(); - // @ts-ignore - return !!nc?.protocol?.connected; -} - -export async function isConnected(nc?): Promise { - nc = nc ?? (await getConnection()); - // At least if this changes, things will be so broken, we'll quickly notice, hopefully. - // @ts-ignore - return !!nc.protocol?.connected; -} - -// Returns the max payload size for messages for the NATS server -// that we are connected to. This is used for chunking by the kv -// and stream to support arbitrarily large values. -export const getMaxPayload = reuseInFlight(async () => { - const nc = await getConnection(); - while (true) { - if (nc.info == null) { - await waitUntilConnected(); - await delay(100); - } else { - return nc.info.max_payload; - } - } -}); - -export const waitUntilConnected = reuseInFlight(async () => { - const nc = (await getConnection()) as any; - if (nc.protocol?.connected) { - return; - } - console.log("NATS waitUntilConnected: waiting..."); - while (!nc.protocol?.connected) { - await delay(500); - } - console.log("NATS waitUntilConnected: connected"); -}); diff --git a/src/packages/next/tsconfig.json b/src/packages/next/tsconfig.json index d863adeded..da863e598f 100644 --- a/src/packages/next/tsconfig.json +++ b/src/packages/next/tsconfig.json @@ -38,7 +38,7 @@ { "path": "../backend" }, { "path": "../database" }, { "path": "../frontend" }, - { "path": "../nats" }, + { "path": "../conat" }, { "path": "../server" }, { "path": "../util" } ] diff --git a/src/packages/package.json b/src/packages/package.json index ce79fc3b40..78375f00eb 100644 --- a/src/packages/package.json +++ b/src/packages/package.json @@ -28,6 +28,12 @@ "nanoid@<3.3.8": "^3.3.8", "tar-fs@2.1.1": "2.1.2" }, - "onlyBuiltDependencies": ["websocket-sftp", "websocketfs", "zeromq"] + "onlyBuiltDependencies": [ + "better-sqlite3", + "websocket-sftp", + "websocketfs", + "zeromq", + "zstd-napi" + ] } } diff --git a/src/packages/pnpm-lock.yaml b/src/packages/pnpm-lock.yaml index d670859223..cef023fad0 100644 --- a/src/packages/pnpm-lock.yaml +++ b/src/packages/pnpm-lock.yaml @@ -26,19 +26,19 @@ importers: devDependencies: '@types/jest': specifier: ^29.5.12 - version: 29.5.13 + version: 29.5.14 check-dependency-version-consistency: specifier: ^5.0.0 version: 5.0.0 jest: specifier: ^29.7.0 - version: 29.7.0(@types/node@18.19.86) + version: 29.7.0(@types/node@22.15.21) ts-jest: specifier: ^29.2.3 - version: 29.2.5(@babel/core@7.26.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.9))(jest@29.7.0(@types/node@18.19.86))(typescript@5.8.2) + version: 29.3.4(@babel/core@7.26.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.9))(jest@29.7.0(@types/node@22.15.21))(typescript@5.8.3) typescript: specifier: ^5.7.3 - version: 5.8.2 + version: 5.8.3 api-client: dependencies: @@ -51,7 +51,7 @@ importers: devDependencies: '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 assets: dependencies: @@ -74,15 +74,12 @@ importers: '@cocalc/backend': specifier: workspace:* version: 'link:' - '@cocalc/nats': + '@cocalc/conat': specifier: workspace:* - version: link:../nats + version: link:../conat '@cocalc/util': specifier: workspace:* version: link:../util - '@nats-io/nkeys': - specifier: ^2.0.3 - version: 2.0.3 '@types/debug': specifier: ^4.1.12 version: 4.1.12 @@ -95,12 +92,15 @@ importers: awaiting: specifier: ^3.0.0 version: 3.0.0 + better-sqlite3: + specifier: ^11.10.0 + version: 11.10.0 chokidar: specifier: ^3.6.0 version: 3.6.0 debug: specifier: ^4.4.0 - version: 4.4.0(supports-color@9.4.0) + version: 4.4.1(supports-color@9.4.0) fs-extra: specifier: ^11.2.0 version: 11.3.0 @@ -110,12 +110,6 @@ importers: lru-cache: specifier: ^7.18.3 version: 7.18.3 - nats: - specifier: ^2.29.3 - version: 2.29.3 - nats.ws: - specifier: ^1.30.2 - version: 1.30.2 password-hash: specifier: ^1.2.2 version: 1.2.2 @@ -128,6 +122,12 @@ importers: shell-escape: specifier: ^0.2.0 version: 0.2.0 + socket.io: + specifier: ^4.8.1 + version: 4.8.1(supports-color@9.4.0) + socket.io-client: + specifier: ^4.8.1 + version: 4.8.1(supports-color@9.4.0) supports-color: specifier: ^9.0.2 version: 9.4.0 @@ -139,11 +139,14 @@ importers: version: 1.13.7 ws: specifier: ^8.18.0 - version: 8.18.1 + version: 8.18.2 + zstd-napi: + specifier: ^0.0.10 + version: 0.0.10 devDependencies: '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 expect: specifier: ^26.6.2 version: 26.6.2 @@ -174,25 +177,80 @@ importers: devDependencies: '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 + + conat: + dependencies: + '@cocalc/comm': + specifier: workspace:* + version: link:../comm + '@cocalc/conat': + specifier: workspace:* + version: 'link:' + '@cocalc/util': + specifier: workspace:* + version: link:../util + '@isaacs/ttlcache': + specifier: ^1.4.1 + version: 1.4.1 + '@msgpack/msgpack': + specifier: ^3.1.1 + version: 3.1.1 + '@socket.io/redis-streams-adapter': + specifier: ^0.2.2 + version: 0.2.2(socket.io-adapter@2.5.5) + awaiting: + specifier: ^3.0.0 + version: 3.0.0 + events: + specifier: 3.3.0 + version: 3.3.0 + immutable: + specifier: ^4.3.0 + version: 4.3.7 + iovalkey: + specifier: ^0.3.1 + version: 0.3.1 + js-base64: + specifier: ^3.7.7 + version: 3.7.7 + json-stable-stringify: + specifier: ^1.0.1 + version: 1.3.0 + lodash: + specifier: ^4.17.21 + version: 4.17.21 + socket.io-client: + specifier: ^4.8.1 + version: 4.8.1(supports-color@9.4.0) + devDependencies: + '@types/better-sqlite3': + specifier: ^7.6.13 + version: 7.6.13 + '@types/json-stable-stringify': + specifier: ^1.0.32 + version: 1.2.0 + '@types/lodash': + specifier: ^4.14.202 + version: 4.17.17 + '@types/node': + specifier: ^18.16.14 + version: 18.19.103 database: dependencies: '@cocalc/backend': specifier: workspace:* version: link:../backend + '@cocalc/conat': + specifier: workspace:* + version: link:../conat '@cocalc/database': specifier: workspace:* version: 'link:' - '@cocalc/nats': - specifier: workspace:* - version: link:../nats '@cocalc/util': specifier: workspace:* version: link:../util - '@nats-io/services': - specifier: 3.0.0 - version: 3.0.0 async: specifier: ^1.5.2 version: 1.5.2 @@ -201,28 +259,25 @@ importers: version: 3.0.0 debug: specifier: ^4.4.0 - version: 4.4.0(supports-color@8.1.1) + version: 4.4.1(supports-color@8.1.1) immutable: specifier: ^4.3.0 version: 4.3.7 json-stable-stringify: specifier: ^1.0.1 - version: 1.1.1 + version: 1.3.0 lodash: specifier: ^4.17.21 version: 4.17.21 lru-cache: specifier: ^7.18.3 version: 7.18.3 - nats: - specifier: ^2.29.3 - version: 2.29.3 node-fetch: specifier: 2.6.7 version: 2.6.7(encoding@0.1.13) pg: specifier: ^8.7.1 - version: 8.14.1 + version: 8.16.0 random-key: specifier: ^0.3.2 version: 0.3.2 @@ -241,13 +296,13 @@ importers: devDependencies: '@types/lodash': specifier: ^4.14.202 - version: 4.17.9 + version: 4.17.17 '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 '@types/pg': specifier: ^8.6.1 - version: 8.11.11 + version: 8.15.2 '@types/uuid': specifier: ^8.3.1 version: 8.3.4 @@ -260,12 +315,12 @@ importers: '@cocalc/backend': specifier: workspace:* version: link:../backend + '@cocalc/conat': + specifier: workspace:* + version: link:../conat '@cocalc/file-server': specifier: workspace:* version: 'link:' - '@cocalc/nats': - specifier: workspace:* - version: link:../nats '@cocalc/util': specifier: workspace:* version: link:../util @@ -273,18 +328,18 @@ importers: specifier: ^3.0.0 version: 3.0.0 better-sqlite3: - specifier: ^11.8.1 - version: 11.9.1 + specifier: ^11.10.0 + version: 11.10.0 lodash: specifier: ^4.17.21 version: 4.17.21 devDependencies: '@types/better-sqlite3': - specifier: ^7.6.12 + specifier: ^7.6.13 version: 7.6.13 '@types/lodash': specifier: ^4.14.202 - version: 4.17.9 + version: 4.17.17 frontend: dependencies: @@ -293,7 +348,7 @@ importers: version: 6.0.0 '@ant-design/compatible': specifier: ^5.1.1 - version: 5.1.4(antd@5.24.7(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(prop-types@15.8.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 5.1.4(antd@5.25.2(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(prop-types@15.8.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@ant-design/icons': specifier: ^6.0.0 version: 6.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -306,6 +361,9 @@ importers: '@cocalc/comm': specifier: workspace:* version: link:../comm + '@cocalc/conat': + specifier: workspace:* + version: link:../conat '@cocalc/frontend': specifier: workspace:* version: 'link:' @@ -315,9 +373,6 @@ importers: '@cocalc/local-storage-lru': specifier: ^2.4.3 version: 2.5.0 - '@cocalc/nats': - specifier: workspace:* - version: link:../nats '@cocalc/sync': specifier: workspace:* version: link:../sync @@ -356,19 +411,7 @@ importers: version: 1.37.2(crypto@1.0.1) '@microlink/react-json-view': specifier: ^1.23.3 - version: 1.26.1(@types/react@18.3.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@nats-io/jetstream': - specifier: 3.0.0 - version: 3.0.0 - '@nats-io/kv': - specifier: 3.0.0 - version: 3.0.0 - '@nats-io/nats-core': - specifier: 3.0.0 - version: 3.0.0 - '@nats-io/services': - specifier: 3.0.0 - version: 3.0.0 + version: 1.26.2(@types/react@18.3.22)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@orama/orama': specifier: 3.0.0-rc-3 version: 3.0.0-rc-3 @@ -383,7 +426,7 @@ importers: version: 1.2.7 '@stripe/react-stripe-js': specifier: ^3.1.1 - version: 3.6.0(@stripe/stripe-js@5.10.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 3.7.0(@stripe/stripe-js@5.10.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@stripe/stripe-js': specifier: ^5.5.0 version: 5.10.0 @@ -392,7 +435,7 @@ importers: version: 4.1.12 '@uiw/react-textarea-code-editor': specifier: ^2.1.1 - version: 2.1.9(@babel/runtime@7.27.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 2.1.9(@babel/runtime@7.27.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@use-gesture/react': specifier: ^10.2.24 version: 10.3.1(react@18.3.1) @@ -404,10 +447,10 @@ importers: version: 2.3.2 antd: specifier: ^5.24.7 - version: 5.24.7(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 5.25.2(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) antd-img-crop: specifier: ^4.21.0 - version: 4.24.0(antd@5.24.7(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 4.25.0(antd@5.25.2(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) async: specifier: ^2.6.3 version: 2.6.4 @@ -464,7 +507,7 @@ importers: version: 1.11.13 debug: specifier: ^4.4.0 - version: 4.4.0(supports-color@8.1.1) + version: 4.4.1(supports-color@8.1.1) direction: specifier: ^1.0.4 version: 1.0.4 @@ -542,7 +585,7 @@ importers: version: 2.2.1 json-stable-stringify: specifier: ^1.0.1 - version: 1.1.1 + version: 1.3.0 jsonic: specifier: ^1.0.1 version: 1.0.1 @@ -579,9 +622,6 @@ importers: mermaid: specifier: ^11.4.1 version: 11.6.0 - nats.ws: - specifier: ^1.30.2 - version: 1.30.2 node-forge: specifier: ^1.0.0 version: 1.3.1 @@ -602,7 +642,7 @@ importers: version: 7.1.1 plotly.js: specifier: ^2.29.1 - version: 2.35.3(@rspack/core@1.3.4(@swc/helpers@0.5.5))(mapbox-gl@1.13.3)(webpack@5.99.5) + version: 2.35.3(@rspack/core@1.3.11(@swc/helpers@0.5.5))(mapbox-gl@1.13.3)(webpack@5.99.5) project-name-generator: specifier: ^2.1.6 version: 2.1.9 @@ -644,19 +684,19 @@ importers: version: 1.1.5(react@18.3.1) react-intl: specifier: ^7.1.11 - version: 7.1.11(react@18.3.1)(typescript@5.8.2) + version: 7.1.11(react@18.3.1)(typescript@5.8.3) react-plotly.js: specifier: ^2.6.0 - version: 2.6.0(plotly.js@2.35.3(@rspack/core@1.3.4(@swc/helpers@0.5.5))(mapbox-gl@1.13.3)(webpack@5.99.5))(react@18.3.1) + version: 2.6.0(plotly.js@2.35.3(@rspack/core@1.3.11(@swc/helpers@0.5.5))(mapbox-gl@1.13.3)(webpack@5.99.5))(react@18.3.1) react-redux: specifier: ^8.0.5 - version: 8.1.3(@types/react-dom@18.3.6(@types/react@18.3.10))(@types/react@18.3.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@4.2.1) + version: 8.1.3(@types/react-dom@18.3.7(@types/react@18.3.22))(@types/react@18.3.22)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@4.2.1) react-timeago: specifier: ^7.2.0 version: 7.2.0(react@18.3.1) react-virtuoso: specifier: ^4.9.0 - version: 4.12.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 4.12.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1) shallowequal: specifier: ^1.1.0 version: 1.1.0 @@ -726,19 +766,19 @@ importers: version: 3.2.1 '@formatjs/cli': specifier: ^6.2.12 - version: 6.6.3 + version: 6.7.1 '@types/codemirror': specifier: ^5.60.15 version: 5.60.15 '@types/jquery': specifier: ^3.5.5 - version: 3.5.30 + version: 3.5.32 '@types/katex': specifier: ^0.16.7 version: 0.16.7 '@types/lodash': specifier: ^4.14.202 - version: 4.17.9 + version: 4.17.17 '@types/markdown-it': specifier: 12.2.3 version: 12.2.3 @@ -753,10 +793,10 @@ importers: version: 5.1.3 '@types/react': specifier: ^18.3.10 - version: 18.3.10 + version: 18.3.22 '@types/react-dom': specifier: ^18.3.0 - version: 18.3.6(@types/react@18.3.10) + version: 18.3.7(@types/react@18.3.22) '@types/react-redux': specifier: ^7.1.25 version: 7.1.34 @@ -765,7 +805,7 @@ importers: version: 2.7.0 cspell: specifier: ^8.17.2 - version: 8.18.1 + version: 8.19.4 mocha: specifier: ^10.0.0 version: 10.8.2 @@ -787,6 +827,9 @@ importers: '@cocalc/cdn': specifier: workspace:* version: link:../cdn + '@cocalc/conat': + specifier: workspace:* + version: link:../conat '@cocalc/database': specifier: workspace:* version: link:../database @@ -796,9 +839,6 @@ importers: '@cocalc/hub': specifier: workspace:* version: 'link:' - '@cocalc/nats': - specifier: workspace:* - version: link:../nats '@cocalc/next': specifier: workspace:* version: link:../next @@ -828,7 +868,7 @@ importers: version: 7.3.9 '@types/react': specifier: ^18.3.10 - version: 18.3.10 + version: 18.3.22 '@types/uuid': specifier: ^8.3.1 version: 8.3.4 @@ -852,7 +892,7 @@ importers: version: 7.2.0 compression: specifier: ^1.7.4 - version: 1.7.4 + version: 1.8.0 cookie-parser: specifier: ^1.4.3 version: 1.4.7 @@ -864,7 +904,7 @@ importers: version: 2.8.5 debug: specifier: ^4.4.0 - version: 4.4.0(supports-color@8.1.1) + version: 4.4.1(supports-color@8.1.1) escape-html: specifier: ^1.0.3 version: 1.0.3 @@ -875,8 +915,8 @@ importers: specifier: ^3.5.4 version: 3.5.4 http-proxy-3: - specifier: ^1.20.0 - version: 1.20.0 + specifier: ^1.20.5 + version: 1.20.5 immutable: specifier: ^4.3.0 version: 4.3.7 @@ -885,7 +925,7 @@ importers: version: 3.7.1 json-stable-stringify: specifier: ^1.0.1 - version: 1.1.1 + version: 1.3.0 lodash: specifier: ^4.17.21 version: 4.17.21 @@ -904,12 +944,9 @@ importers: ms: specifier: 2.1.2 version: 2.1.2 - nats: - specifier: ^2.29.3 - version: 2.29.3 next: specifier: 14.2.28 - version: 14.2.28(@babel/core@7.26.9)(@playwright/test@1.51.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.86.3) + version: 14.2.28(@babel/core@7.26.9)(@playwright/test@1.51.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.89.0) parse-domain: specifier: ^5.0.0 version: 5.0.0(encoding@0.1.13) @@ -972,20 +1009,20 @@ importers: version: 2.26.1 ws: specifier: ^8.18.0 - version: 8.18.1 + version: 8.18.2 devDependencies: '@types/express': specifier: ^4.17.21 - version: 4.17.21 + version: 4.17.22 '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 '@types/passport': specifier: ^1.0.9 version: 1.0.17 '@types/react-dom': specifier: ^18.3.0 - version: 18.3.6(@types/react@18.3.10) + version: 18.3.7(@types/react@18.3.22) coffeescript: specifier: ^2.5.1 version: 2.7.0 @@ -1007,12 +1044,12 @@ importers: '@cocalc/backend': specifier: workspace:* version: link:../backend + '@cocalc/conat': + specifier: workspace:* + version: link:../conat '@cocalc/jupyter': specifier: workspace:* version: 'link:' - '@cocalc/nats': - specifier: workspace:* - version: link:../nats '@cocalc/sync': specifier: workspace:* version: link:../sync @@ -1027,7 +1064,7 @@ importers: version: 7.0.20 '@types/json-stable-stringify': specifier: ^1.0.32 - version: 1.0.36 + version: 1.2.0 '@types/node-cleanup': specifier: ^2.1.2 version: 2.1.5 @@ -1036,7 +1073,7 @@ importers: version: 3.0.0 debug: specifier: ^4.4.0 - version: 4.4.0(supports-color@8.1.1) + version: 4.4.1(supports-color@8.1.1) enchannel-zmq-backend: specifier: ^9.1.23 version: 9.1.23(rxjs@7.8.2) @@ -1054,7 +1091,7 @@ importers: version: 4.3.7 json-stable-stringify: specifier: ^1.0.1 - version: 1.1.1 + version: 1.3.0 jsonfile: specifier: ^6.1.0 version: 6.1.0 @@ -1078,7 +1115,7 @@ importers: version: 2.1.2 portfinder: specifier: ^1.0.32 - version: 1.0.32 + version: 1.0.37 shell-escape: specifier: ^0.2.0 version: 0.2.0 @@ -1091,59 +1128,7 @@ importers: devDependencies: '@types/node': specifier: ^18.16.14 - version: 18.19.86 - - nats: - dependencies: - '@cocalc/comm': - specifier: workspace:* - version: link:../comm - '@cocalc/nats': - specifier: workspace:* - version: 'link:' - '@cocalc/util': - specifier: workspace:* - version: link:../util - '@nats-io/jetstream': - specifier: 3.0.0 - version: 3.0.0 - '@nats-io/kv': - specifier: 3.0.0 - version: 3.0.0 - '@nats-io/nats-core': - specifier: 3.0.0 - version: 3.0.0 - '@nats-io/services': - specifier: 3.0.0 - version: 3.0.0 - awaiting: - specifier: ^3.0.0 - version: 3.0.0 - events: - specifier: 3.3.0 - version: 3.3.0 - immutable: - specifier: ^4.3.0 - version: 4.3.7 - js-base64: - specifier: ^3.7.7 - version: 3.7.7 - json-stable-stringify: - specifier: ^1.0.1 - version: 1.1.1 - lodash: - specifier: ^4.17.21 - version: 4.17.21 - devDependencies: - '@types/json-stable-stringify': - specifier: ^1.0.32 - version: 1.0.36 - '@types/lodash': - specifier: ^4.14.202 - version: 4.17.9 - '@types/node': - specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 next: dependencies: @@ -1173,22 +1158,22 @@ importers: version: link:../util '@openapitools/openapi-generator-cli': specifier: ^2.19.1 - version: 2.19.1(encoding@0.1.13) + version: 2.20.2(encoding@0.1.13) '@types/react': specifier: ^18.3.10 - version: 18.3.10 + version: 18.3.22 '@types/react-dom': specifier: ^18.3.0 - version: 18.3.6(@types/react@18.3.10) + version: 18.3.7(@types/react@18.3.22) '@vscode/vscode-languagedetection': specifier: ^1.0.22 version: 1.0.22 antd: specifier: ^5.24.7 - version: 5.24.7(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 5.25.2(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) antd-img-crop: specifier: ^4.21.0 - version: 4.24.0(antd@5.24.7(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 4.25.0(antd@5.25.2(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) awaiting: specifier: ^3.0.0 version: 3.0.0 @@ -1221,22 +1206,22 @@ importers: version: 2.1.2 next: specifier: 14.2.28 - version: 14.2.28(@babel/core@7.26.9)(@playwright/test@1.51.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.86.3) + version: 14.2.28(@babel/core@7.26.9)(@playwright/test@1.51.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.89.0) next-remove-imports: specifier: ^1.0.11 version: 1.0.12(webpack@5.99.5) next-rest-framework: specifier: 6.0.0-beta.4 - version: 6.0.0-beta.4(zod@3.24.2) + version: 6.0.0-beta.4(zod@3.25.17) next-translate: specifier: ^2.6.2 - version: 2.6.2(next@14.2.28(@babel/core@7.26.9)(@playwright/test@1.51.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.86.3))(react@18.3.1) + version: 2.6.2(next@14.2.28(@babel/core@7.26.9)(@playwright/test@1.51.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.89.0))(react@18.3.1) password-hash: specifier: ^1.2.2 version: 1.2.2 pg: specifier: ^8.7.1 - version: 8.14.1 + version: 8.16.0 react: specifier: ^18.3.1 version: 18.3.1 @@ -1251,7 +1236,7 @@ importers: version: 1.10.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react-intl: specifier: ^7.1.11 - version: 7.1.11(react@18.3.1)(typescript@5.8.2) + version: 7.1.11(react@18.3.1)(typescript@5.8.3) serve-index: specifier: ^1.9.1 version: 1.9.1 @@ -1275,20 +1260,20 @@ importers: version: 3.1.1 zod: specifier: ^3.23.5 - version: 3.24.2 + version: 3.25.17 devDependencies: '@babel/preset-typescript': specifier: ^7.23.3 - version: 7.27.0(@babel/core@7.26.9) + version: 7.27.1(@babel/core@7.26.9) '@types/express': specifier: ^4.17.21 - version: 4.17.21 + version: 4.17.22 '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 node-mocks-http: specifier: ^1.14.1 - version: 1.16.2(@types/express@4.17.21)(@types/node@18.19.86) + version: 1.17.2(@types/express@4.17.22)(@types/node@18.19.103) react-test-renderer: specifier: ^18.2.0 version: 18.3.1(react@18.3.1) @@ -1301,12 +1286,12 @@ importers: '@cocalc/comm': specifier: workspace:* version: link:../comm + '@cocalc/conat': + specifier: workspace:* + version: link:../conat '@cocalc/jupyter': specifier: workspace:* version: link:../jupyter - '@cocalc/nats': - specifier: workspace:* - version: link:../nats '@cocalc/primus-multiplex': specifier: ^1.1.0 version: 1.1.0 @@ -1334,15 +1319,6 @@ importers: '@lydell/node-pty': specifier: ^1.1.0 version: 1.1.0 - '@nats-io/jetstream': - specifier: 3.0.0 - version: 3.0.0 - '@nats-io/kv': - specifier: 3.0.0 - version: 3.0.0 - '@nats-io/services': - specifier: 3.0.0 - version: 3.0.0 '@nteract/messaging': specifier: ^7.0.20 version: 7.0.20 @@ -1357,13 +1333,13 @@ importers: version: 7.2.0 compression: specifier: ^1.7.4 - version: 1.7.4 + version: 1.8.0 daemonize-process: specifier: ^3.0.0 version: 3.0.0 debug: specifier: ^4.4.0 - version: 4.4.0(supports-color@8.1.1) + version: 4.4.1(supports-color@8.1.1) diskusage: specifier: ^1.1.3 version: 1.2.0 @@ -1375,7 +1351,7 @@ importers: version: 4.21.2 express-rate-limit: specifier: ^7.4.0 - version: 7.4.0(express@4.21.2) + version: 7.5.0(express@4.21.2) get-port: specifier: ^5.1.1 version: 5.1.1 @@ -1384,7 +1360,7 @@ importers: version: 0.1.0 json-stable-stringify: specifier: ^1.0.1 - version: 1.1.1 + version: 1.3.0 jupyter-paths: specifier: ^2.0.3 version: 2.0.4 @@ -1397,15 +1373,12 @@ importers: lru-cache: specifier: ^7.18.3 version: 7.18.3 - nats: - specifier: ^2.29.3 - version: 2.29.3 pidusage: specifier: ^1.2.0 version: 1.2.0 prettier: specifier: ^3.0.2 - version: 3.3.3 + version: 3.5.3 primus: specifier: ^8.0.9 version: 8.0.9 @@ -1435,7 +1408,7 @@ importers: version: 2.0.2 ws: specifier: ^8.18.0 - version: 8.18.1 + version: 8.18.2 zeromq: specifier: ^5.2.8 version: 5.3.1 @@ -1445,16 +1418,16 @@ importers: version: 1.19.5 '@types/express': specifier: ^4.17.21 - version: 4.17.21 + version: 4.17.22 '@types/jquery': specifier: ^3.5.5 - version: 3.5.30 + version: 3.5.32 '@types/lodash': specifier: ^4.14.202 - version: 4.17.9 + version: 4.17.17 '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 '@types/primus': specifier: ^7.3.9 version: 7.3.9 @@ -1467,15 +1440,15 @@ importers: '@cocalc/backend': specifier: workspace:* version: link:../backend + '@cocalc/conat': + specifier: workspace:* + version: link:../conat '@cocalc/database': specifier: workspace:* version: link:../database '@cocalc/gcloud-pricing-calculator': specifier: ^1.16.0 version: 1.16.0 - '@cocalc/nats': - specifier: workspace:* - version: link:../nats '@cocalc/server': specifier: workspace:* version: 'link:' @@ -1484,10 +1457,10 @@ importers: version: link:../util '@google-ai/generativelanguage': specifier: ^3.1.0 - version: 3.1.0 + version: 3.2.0 '@google-cloud/bigquery': specifier: ^7.8.0 - version: 7.9.3(encoding@0.1.13) + version: 7.9.4(encoding@0.1.13) '@google-cloud/compute': specifier: ^4.7.0 version: 4.12.0(encoding@0.1.13) @@ -1508,40 +1481,22 @@ importers: version: 1.4.1 '@langchain/anthropic': specifier: ^0.3.18 - version: 0.3.18(@langchain/core@0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)))(encoding@0.1.13) + version: 0.3.20(@langchain/core@0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)))(encoding@0.1.13) '@langchain/core': specifier: ^0.3.46 - version: 0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)) + version: 0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)) '@langchain/google-genai': specifier: ^0.2.4 - version: 0.2.4(@langchain/core@0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)))(zod@3.24.2) + version: 0.2.9(@langchain/core@0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)))(zod@3.24.2) '@langchain/mistralai': specifier: ^0.2.0 - version: 0.2.0(@langchain/core@0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2))) + version: 0.2.0(@langchain/core@0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2))) '@langchain/ollama': specifier: ^0.2.0 - version: 0.2.0(@langchain/core@0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2))) + version: 0.2.0(@langchain/core@0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2))) '@langchain/openai': specifier: ^0.5.5 - version: 0.5.6(@langchain/core@0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)))(encoding@0.1.13)(ws@8.18.1) - '@nats-io/jetstream': - specifier: 3.0.0 - version: 3.0.0 - '@nats-io/jwt': - specifier: 0.0.10-5 - version: 0.0.10-5 - '@nats-io/kv': - specifier: 3.0.0 - version: 3.0.0 - '@nats-io/nats-core': - specifier: 3.0.0 - version: 3.0.0 - '@nats-io/nkeys': - specifier: ^2.0.3 - version: 2.0.3 - '@nats-io/services': - specifier: 3.0.0 - version: 3.0.0 + version: 0.5.10(@langchain/core@0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)))(encoding@0.1.13)(ws@8.18.2) '@node-saml/passport-saml': specifier: ^5.0.1 version: 5.0.1 @@ -1580,7 +1535,7 @@ importers: version: 3.0.0 axios: specifier: ^1.7.5 - version: 1.8.4 + version: 1.9.0 base62: specifier: ^2.0.1 version: 2.0.2 @@ -1593,6 +1548,9 @@ importers: cloudflare: specifier: ^2.9.1 version: 2.9.1 + cookie: + specifier: ^1.0.0 + version: 1.0.2 cookies: specifier: ^0.8.0 version: 0.8.0 @@ -1616,7 +1574,7 @@ importers: version: 1.1.5 json-stable-stringify: specifier: ^1.0.1 - version: 1.1.1 + version: 1.3.0 jwt-decode: specifier: ^3.1.2 version: 3.1.2 @@ -1643,19 +1601,16 @@ importers: version: 2.1.2 nanoid: specifier: ^3.3.8 - version: 3.3.8 - nats: - specifier: ^2.29.3 - version: 2.29.3 + version: 3.3.11 node-zendesk: specifier: ^5.0.13 version: 5.0.15(encoding@0.1.13) nodemailer: specifier: ^6.9.16 - version: 6.10.0 + version: 6.10.1 openai: specifier: ^4.95.1 - version: 4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2) + version: 4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2) parse-domain: specifier: ^5.0.0 version: 5.0.0(encoding@0.1.13) @@ -1703,7 +1658,10 @@ importers: version: 1.2.0 sanitize-html: specifier: ^2.12.1 - version: 2.15.0 + version: 2.17.0 + socket.io: + specifier: ^4.8.1 + version: 4.8.1(supports-color@9.4.0) stripe: specifier: ^17.5.0 version: 17.7.0 @@ -1722,19 +1680,19 @@ importers: version: 2.1.6 '@types/express': specifier: ^4.17.21 - version: 4.17.21 + version: 4.17.22 '@types/express-session': specifier: ^1.18.0 version: 1.18.1 '@types/lodash': specifier: ^4.14.202 - version: 4.17.9 + version: 4.17.17 '@types/ms': specifier: ^0.7.31 - version: 0.7.31 + version: 0.7.34 '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 '@types/node-zendesk': specifier: ^2.0.15 version: 2.0.15 @@ -1749,7 +1707,7 @@ importers: version: 2.0.16 '@types/sanitize-html': specifier: ^2.3.1 - version: 2.15.0 + version: 2.16.0 expect: specifier: ^26.6.2 version: 26.6.2 @@ -1774,31 +1732,31 @@ importers: devDependencies: '@rspack/cli': specifier: ^1.1.1 - version: 1.3.4(@rspack/core@1.3.4(@swc/helpers@0.5.5))(@types/express@4.17.21)(webpack@5.99.5) + version: 1.3.11(@rspack/core@1.3.11(@swc/helpers@0.5.5))(@types/express@4.17.22)(webpack@5.99.5) '@rspack/core': specifier: ^1.1.1 - version: 1.3.4(@swc/helpers@0.5.5) + version: 1.3.11(@swc/helpers@0.5.5) '@rspack/plugin-react-refresh': specifier: ^1.0.0 - version: 1.2.0(react-refresh@0.14.2)(webpack-hot-middleware@2.26.1) + version: 1.4.3(react-refresh@0.14.2)(webpack-hot-middleware@2.26.1) '@types/jquery': specifier: ^3.5.5 - version: 3.5.30 + version: 3.5.32 '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 '@types/react': specifier: ^18.3.10 - version: 18.3.10 + version: 18.3.22 '@types/react-dom': specifier: ^18.3.0 - version: 18.3.6(@types/react@18.3.10) + version: 18.3.7(@types/react@18.3.22) '@typescript-eslint/eslint-plugin': specifier: ^6.21.0 - version: 6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint@8.57.1)(typescript@5.8.2) + version: 6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint@8.57.1)(typescript@5.8.3) '@typescript-eslint/parser': specifier: ^6.21.0 - version: 6.21.0(eslint@8.57.1)(typescript@5.8.2) + version: 6.21.0(eslint@8.57.1)(typescript@5.8.3) assert: specifier: ^2.0.0 version: 2.1.0 @@ -1834,7 +1792,7 @@ importers: version: 2.7.0 css-loader: specifier: ^7.1.2 - version: 7.1.2(@rspack/core@1.3.4(@swc/helpers@0.5.5))(webpack@5.99.5) + version: 7.1.2(@rspack/core@1.3.11(@swc/helpers@0.5.5))(webpack@5.99.5) entities: specifier: ^2.2.0 version: 2.2.0 @@ -1846,7 +1804,7 @@ importers: version: 9.1.0(eslint@8.57.1) eslint-plugin-prettier: specifier: ^5.1.3 - version: 5.2.6(@types/eslint@9.6.1)(eslint-config-prettier@9.1.0(eslint@8.57.1))(eslint@8.57.1)(prettier@3.3.3) + version: 5.4.0(@types/eslint@9.6.1)(eslint-config-prettier@9.1.0(eslint@8.57.1))(eslint@8.57.1)(prettier@3.5.3) eslint-plugin-react: specifier: ^7.33.2 version: 7.37.5(eslint@8.57.1) @@ -1864,7 +1822,7 @@ importers: version: 2.1.2(webpack@5.99.5) html-webpack-plugin: specifier: ^5.5.3 - version: 5.6.3(@rspack/core@1.3.4(@swc/helpers@0.5.5))(webpack@5.99.5) + version: 5.6.3(@rspack/core@1.3.11(@swc/helpers@0.5.5))(webpack@5.99.5) identity-obj-proxy: specifier: ^3.0.0 version: 3.0.0 @@ -1918,10 +1876,10 @@ importers: version: 18.3.1(react@18.3.1) sass: specifier: ^1.57.1 - version: 1.86.3 + version: 1.89.0 sass-loader: specifier: ^16.0.2 - version: 16.0.5(@rspack/core@1.3.4(@swc/helpers@0.5.5))(sass@1.86.3)(webpack@5.99.5) + version: 16.0.5(@rspack/core@1.3.11(@swc/helpers@0.5.5))(sass@1.89.0)(webpack@5.99.5) script-loader: specifier: ^0.7.2 version: 0.7.2 @@ -1939,7 +1897,7 @@ importers: version: 1.6.7 ts-jest: specifier: ^29.2.3 - version: 29.2.5(@babel/core@7.26.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.9))(jest@29.7.0(@types/node@18.19.86))(typescript@5.8.2) + version: 29.3.4(@babel/core@7.26.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.9))(jest@29.7.0(@types/node@18.19.103))(typescript@5.8.3) tsd: specifier: ^0.22.0 version: 0.22.0 @@ -1955,9 +1913,9 @@ importers: sync: dependencies: - '@cocalc/nats': + '@cocalc/conat': specifier: workspace:* - version: link:../nats + version: link:../conat '@cocalc/sync': specifier: workspace:* version: 'link:' @@ -1972,7 +1930,7 @@ importers: version: 3.0.0 debug: specifier: ^4.4.0 - version: 4.4.0(supports-color@8.1.1) + version: 4.4.1(supports-color@8.1.1) events: specifier: 3.3.0 version: 3.3.0 @@ -1981,7 +1939,7 @@ importers: version: 4.3.7 json-stable-stringify: specifier: ^1.0.1 - version: 1.1.1 + version: 1.3.0 lodash: specifier: ^4.17.21 version: 4.17.21 @@ -1994,13 +1952,13 @@ importers: version: 4.1.12 '@types/lodash': specifier: ^4.14.202 - version: 4.17.9 + version: 4.17.17 '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 ts-jest: specifier: ^29.2.3 - version: 29.2.5(@babel/core@7.26.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.9))(jest@29.7.0(@types/node@18.19.86))(typescript@5.8.2) + version: 29.3.4(@babel/core@7.26.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.9))(jest@29.7.0(@types/node@18.19.103))(typescript@5.8.3) sync-client: dependencies: @@ -2024,16 +1982,16 @@ importers: version: link:../util cookie: specifier: ^1.0.0 - version: 1.0.0 + version: 1.0.2 debug: specifier: ^4.4.0 - version: 4.4.0(supports-color@8.1.1) + version: 4.4.1(supports-color@8.1.1) primus: specifier: ^8.0.9 version: 8.0.9 ws: specifier: ^8.18.0 - version: 8.18.1 + version: 8.18.2 devDependencies: '@types/cookie': specifier: ^0.6.0 @@ -2043,7 +2001,7 @@ importers: version: 4.1.12 '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 '@types/primus': specifier: ^7.3.9 version: 7.3.9 @@ -2059,9 +2017,9 @@ importers: '@cocalc/comm': specifier: workspace:* version: link:../comm - '@cocalc/nats': + '@cocalc/conat': specifier: workspace:* - version: link:../nats + version: link:../conat '@cocalc/sync-client': specifier: workspace:* version: link:../sync-client @@ -2086,7 +2044,7 @@ importers: devDependencies: '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 terminal: dependencies: @@ -2116,7 +2074,7 @@ importers: version: 3.0.0 debug: specifier: ^4.4.0 - version: 4.4.0(supports-color@8.1.1) + version: 4.4.1(supports-color@8.1.1) lodash: specifier: ^4.17.21 version: 4.17.21 @@ -2126,10 +2084,10 @@ importers: devDependencies: '@types/lodash': specifier: ^4.14.202 - version: 4.17.9 + version: 4.17.17 '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 '@types/primus': specifier: ^7.3.9 version: 7.3.9 @@ -2156,7 +2114,7 @@ importers: version: 1.11.13 debug: specifier: ^4.4.0 - version: 4.4.0(supports-color@8.1.1) + version: 4.4.1(supports-color@8.1.1) decimal.js-light: specifier: ^2.5.1 version: 2.5.1 @@ -2174,7 +2132,7 @@ importers: version: 3.7.7 json-stable-stringify: specifier: ^1.0.1 - version: 1.1.1 + version: 1.3.0 jsonic: specifier: ^1.0.1 version: 1.0.1 @@ -2192,7 +2150,7 @@ importers: version: 18.3.1 react-intl: specifier: ^7.1.11 - version: 7.1.11(react@18.3.1)(typescript@5.8.2) + version: 7.1.11(react@18.3.1)(typescript@5.8.3) redux: specifier: ^4.2.1 version: 4.2.1 @@ -2220,13 +2178,13 @@ importers: version: 4.1.12 '@types/json-stable-stringify': specifier: ^1.0.32 - version: 1.0.36 + version: 1.2.0 '@types/lodash': specifier: ^4.14.202 - version: 4.17.9 + version: 4.17.17 '@types/node': specifier: ^18.16.14 - version: 18.19.86 + version: 18.19.103 '@types/seedrandom': specifier: ^3.0.8 version: 3.0.8 @@ -2334,8 +2292,8 @@ packages: '@antfu/utils@8.1.1': resolution: {integrity: sha512-Mex9nXf9vR6AhcXmMrlz/HVgYYZpVGJ6YlPgwl7UnaFpnshXs6EK/oa5Gpf3CzENMjkvEx2tQtntGnb7UtSTOQ==} - '@anthropic-ai/sdk@0.37.0': - resolution: {integrity: sha512-tHjX2YbkUBwEgg0JZU3EFSSAQPoK4qQR/NFYa8Vtzd5UAyXzZksCw2In69Rml4R/TyHPBfRYaLK35XiOe33pjw==} + '@anthropic-ai/sdk@0.39.0': + resolution: {integrity: sha512-eMyDIPRZbt1CCLErRCi3exlAvNkBtRe+kW5vvJyef93PmNr/clstYgHhtvmkxN82nlKgzyGPCyGxrm0JQ1ZIdg==} '@antoniomuso/lz4-napi-android-arm-eabi@2.8.0': resolution: {integrity: sha512-j1AF1SXOpgiEUxF74cOQLPTwAB9hvR4TqoHJR/ClzP1iWrWZCm6P6tNLy1P1KSn1x3ATGyiTHJqn1V5GvimJvQ==} @@ -2427,6 +2385,10 @@ packages: resolution: {integrity: sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==} engines: {node: '>=6.9.0'} + '@babel/code-frame@7.27.1': + resolution: {integrity: sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==} + engines: {node: '>=6.9.0'} + '@babel/compat-data@7.26.8': resolution: {integrity: sha512-oH5UPLMWR3L2wEFLnFJ1TZXqHufiTKAiLfqw5zkhS4dKXLJ10yVztfil/twG8EDTA4F/tvVNw9nOl4ZMslB8rQ==} engines: {node: '>=6.9.0'} @@ -2443,54 +2405,64 @@ packages: resolution: {integrity: sha512-VybsKvpiN1gU1sdMZIp7FcqphVVKEwcuj02x73uvcHE0PTihx1nlBcowYWhDwjpoAXRv43+gDzyggGnn1XZhVw==} engines: {node: '>=6.9.0'} - '@babel/helper-annotate-as-pure@7.25.9': - resolution: {integrity: sha512-gv7320KBUFJz1RnylIg5WWYPRXKZ884AGkYpgpWW02TH66Dl+HaC1t1CKd0z3R4b6hdYEcmrNZHUmfCP+1u3/g==} + '@babel/generator@7.27.1': + resolution: {integrity: sha512-UnJfnIpc/+JO0/+KRVQNGU+y5taA5vCbwN8+azkX6beii/ZF+enZJSOKo11ZSzGJjlNfJHfQtmQT8H+9TXPG2w==} + engines: {node: '>=6.9.0'} + + '@babel/helper-annotate-as-pure@7.27.1': + resolution: {integrity: sha512-WnuuDILl9oOBbKnb4L+DyODx7iC47XfzmNCpTttFsSp6hTG7XZxu60+4IO+2/hPfcGOoKbFiwoI/+zwARbNQow==} engines: {node: '>=6.9.0'} '@babel/helper-compilation-targets@7.27.0': resolution: {integrity: sha512-LVk7fbXml0H2xH34dFzKQ7TDZ2G4/rVTOrq9V+icbbadjbVxxeFeDsNHv2SrZeWoA+6ZiTyWYWtScEIW07EAcA==} engines: {node: '>=6.9.0'} - '@babel/helper-create-class-features-plugin@7.27.0': - resolution: {integrity: sha512-vSGCvMecvFCd/BdpGlhpXYNhhC4ccxyvQWpbGL4CWbvfEoLFWUZuSuf7s9Aw70flgQF+6vptvgK2IfOnKlRmBg==} + '@babel/helper-create-class-features-plugin@7.27.1': + resolution: {integrity: sha512-QwGAmuvM17btKU5VqXfb+Giw4JcN0hjuufz3DYnpeVDvZLAObloM77bhMXiqry3Iio+Ai4phVRDwl6WU10+r5A==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 - '@babel/helper-member-expression-to-functions@7.25.9': - resolution: {integrity: sha512-wbfdZ9w5vk0C0oyHqAJbc62+vet5prjj01jjJ8sKn3j9h3MQQlflEdXYvuqRWjHnM12coDEqiC1IRCi0U/EKwQ==} + '@babel/helper-member-expression-to-functions@7.27.1': + resolution: {integrity: sha512-E5chM8eWjTp/aNoVpcbfM7mLxu9XGLWYise2eBKGQomAk/Mb4XoxyqXTZbuTohbsl8EKqdlMhnDI2CCLfcs9wA==} engines: {node: '>=6.9.0'} '@babel/helper-module-imports@7.25.9': resolution: {integrity: sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw==} engines: {node: '>=6.9.0'} + '@babel/helper-module-imports@7.27.1': + resolution: {integrity: sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==} + engines: {node: '>=6.9.0'} + '@babel/helper-module-transforms@7.26.0': resolution: {integrity: sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 - '@babel/helper-optimise-call-expression@7.25.9': - resolution: {integrity: sha512-FIpuNaz5ow8VyrYcnXQTDRGvV6tTjkNtCK/RYNDXGSLlUD6cBuQTSw43CShGxjvfBTfcUA/r6UhUCbtYqkhcuQ==} + '@babel/helper-module-transforms@7.27.1': + resolution: {integrity: sha512-9yHn519/8KvTU5BjTVEEeIM3w9/2yXNKoD82JifINImhpKkARMJKPP59kLo+BafpdN5zgNeIcS4jsGDmd3l58g==} engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 - '@babel/helper-plugin-utils@7.24.8': - resolution: {integrity: sha512-FFWx5142D8h2Mgr/iPVGH5G7w6jDn4jUSpZTyDnQO0Yn7Ks2Kuz6Pci8H6MPCoUJegd/UZQ3tAvfLCxQSnWWwg==} + '@babel/helper-optimise-call-expression@7.27.1': + resolution: {integrity: sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==} engines: {node: '>=6.9.0'} - '@babel/helper-plugin-utils@7.26.5': - resolution: {integrity: sha512-RS+jZcRdZdRFzMyr+wcsaqOmld1/EqTghfaBGQQd/WnRdzdlvSZ//kF7U8VQTxf1ynZ4cjUcYgjVGx13ewNPMg==} + '@babel/helper-plugin-utils@7.27.1': + resolution: {integrity: sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==} engines: {node: '>=6.9.0'} - '@babel/helper-replace-supers@7.26.5': - resolution: {integrity: sha512-bJ6iIVdYX1YooY2X7w1q6VITt+LnUILtNk7zT78ykuwStx8BauCzxvFqFaHjOpW1bVnSUM1PN1f0p5P21wHxvg==} + '@babel/helper-replace-supers@7.27.1': + resolution: {integrity: sha512-7EHz6qDZc8RYS5ElPoShMheWvEgERonFCs7IAonWLLUTXW59DP14bCZt89/GKyreYn8g3S83m21FelHKbeDCKA==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 - '@babel/helper-skip-transparent-expression-wrappers@7.25.9': - resolution: {integrity: sha512-K4Du3BFa3gvyhzgPcntrkDgZzQaq6uozzcpGbOO1OEJaI+EJdqWIMTLgFgQf6lrfiDFo5FU+BxKepI9RmZqahA==} + '@babel/helper-skip-transparent-expression-wrappers@7.27.1': + resolution: {integrity: sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==} engines: {node: '>=6.9.0'} '@babel/helper-string-parser@7.25.7': @@ -2501,6 +2473,10 @@ packages: resolution: {integrity: sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==} engines: {node: '>=6.9.0'} + '@babel/helper-string-parser@7.27.1': + resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==} + engines: {node: '>=6.9.0'} + '@babel/helper-validator-identifier@7.24.7': resolution: {integrity: sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==} engines: {node: '>=6.9.0'} @@ -2513,8 +2489,12 @@ packages: resolution: {integrity: sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==} engines: {node: '>=6.9.0'} - '@babel/helper-validator-option@7.25.9': - resolution: {integrity: sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==} + '@babel/helper-validator-identifier@7.27.1': + resolution: {integrity: sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-option@7.27.1': + resolution: {integrity: sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==} engines: {node: '>=6.9.0'} '@babel/helpers@7.27.0': @@ -2534,6 +2514,11 @@ packages: engines: {node: '>=6.0.0'} hasBin: true + '@babel/parser@7.27.2': + resolution: {integrity: sha512-QYLs8299NA7WM/bZAdp+CviYYkVoYXlDW2rzliy3chxd1PQjej7JORuMJDJXJUb9g0TT+B99EwaVLKmX+sPXWw==} + engines: {node: '>=6.0.0'} + hasBin: true + '@babel/plugin-syntax-async-generators@7.8.4': resolution: {integrity: sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==} peerDependencies: @@ -2559,14 +2544,8 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-syntax-jsx@7.24.7': - resolution: {integrity: sha512-6ddciUPe/mpMnOKv/U+RSd2vvVy+Yw/JfBB0ZHYjEZt9NLHmCUylNYlsbqCCS1Bffjlb0fCwC9Vqz+sBz6PsiQ==} - engines: {node: '>=6.9.0'} - peerDependencies: - '@babel/core': ^7.0.0-0 - - '@babel/plugin-syntax-jsx@7.25.9': - resolution: {integrity: sha512-ld6oezHQMZsZfp6pWtbjaNDF2tiiCYYDqQszHt5VV437lewP9aSi2Of99CK0D0XB21k7FLgnLcmQKyKzynfeAA==} + '@babel/plugin-syntax-jsx@7.27.1': + resolution: {integrity: sha512-y8YTNIeKoyhGd9O0Jiyzyyqk8gdjnumGTQPsz0xOZOQ2RmkVJeZ1vmmfIvFEKqucBG6axJGBZDE/7iI5suUI/w==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 @@ -2613,26 +2592,26 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-syntax-typescript@7.25.9': - resolution: {integrity: sha512-hjMgRy5hb8uJJjUcdWunWVcoi9bGpJp8p5Ol1229PoN6aytsLwNMgmdftO23wnCLMfVmTwZDWMPNq/D1SY60JQ==} + '@babel/plugin-syntax-typescript@7.27.1': + resolution: {integrity: sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-modules-commonjs@7.26.3': - resolution: {integrity: sha512-MgR55l4q9KddUDITEzEFYn5ZsGDXMSsU9E+kh7fjRXTIC3RHqfCo8RPRbyReYJh44HQ/yomFkqbOFohXvDCiIQ==} + '@babel/plugin-transform-modules-commonjs@7.27.1': + resolution: {integrity: sha512-OJguuwlTYlN0gBZFRPqwOGNWssZjfIUdS7HMYtN8c1KmwpwHFBwTeFZrg9XZa+DFTitWOW5iTAG7tyCUPsCCyw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/plugin-transform-typescript@7.27.0': - resolution: {integrity: sha512-fRGGjO2UEGPjvEcyAZXRXAS8AfdaQoq7HnxAbJoAoW10B9xOKesmmndJv+Sym2a+9FHWZ9KbyyLCe9s0Sn5jtg==} + '@babel/plugin-transform-typescript@7.27.1': + resolution: {integrity: sha512-Q5sT5+O4QUebHdbwKedFBEwRLb02zJ7r4A5Gg2hUoLuU3FjdMcyqcywqUrLCaDsFCxzokf7u9kuy7qz51YUuAg==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/preset-typescript@7.27.0': - resolution: {integrity: sha512-vxaPFfJtHhgeOVXRKuHpHPAOgymmy8V8I65T1q53R7GCZlefKeCaTyDs3zOPHTTbmquvNlQYC5klEvWsBAtrBQ==} + '@babel/preset-typescript@7.27.1': + resolution: {integrity: sha512-l7WfQfX0WK4M0v2RudjuQK4u99BS6yLHYEmdtVPP7lKV013zr9DygFuWNlnbvQ9LR+LS0Egz/XAvGx5U9MX0fQ==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0-0 @@ -2645,14 +2624,26 @@ packages: resolution: {integrity: sha512-VtPOkrdPHZsKc/clNqyi9WUA8TINkZ4cGk63UUE3u4pmB2k+ZMQRDuIOagv8UVd6j7k0T3+RRIb7beKTebNbcw==} engines: {node: '>=6.9.0'} + '@babel/runtime@7.27.1': + resolution: {integrity: sha512-1x3D2xEk2fRo3PAhwQwu5UubzgiVWSXTBfWpVd2Mx2AzRqJuDJCsgaDVZ7HB5iGzDW1Hl1sWN2mFyKjmR9uAog==} + engines: {node: '>=6.9.0'} + '@babel/template@7.27.0': resolution: {integrity: sha512-2ncevenBqXI6qRMukPlXwHKHchC7RyMuu4xv5JBXRfOGVcTy1mXCD12qrp7Jsoxll1EV3+9sE4GugBVRjT2jFA==} engines: {node: '>=6.9.0'} + '@babel/template@7.27.2': + resolution: {integrity: sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==} + engines: {node: '>=6.9.0'} + '@babel/traverse@7.27.0': resolution: {integrity: sha512-19lYZFzYVQkkHkl4Cy4WrAVcqBkgvV2YM2TU3xG6DIwO7O3ecbDPfW3yM3bjAGcqcQHi+CCtjMR3dIEHxsd6bA==} engines: {node: '>=6.9.0'} + '@babel/traverse@7.27.1': + resolution: {integrity: sha512-ZCYtZciz1IWJB4U61UPu4KEaqyfj+r5T1Q5mqPo+IBpcG9kHv30Z0aD8LXPgC1trYa6rK0orRyAhqUgk4MjmEg==} + engines: {node: '>=6.9.0'} + '@babel/types@7.25.8': resolution: {integrity: sha512-JWtuCu8VQsMladxVz/P4HzHUGCAwpuqacmowgXFs5XjxIgKuNjnLokQzuVjlTvIzODaDmpjT3oxcC48vyk9EWg==} engines: {node: '>=6.9.0'} @@ -2661,6 +2652,10 @@ packages: resolution: {integrity: sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg==} engines: {node: '>=6.9.0'} + '@babel/types@7.27.1': + resolution: {integrity: sha512-+EzkxvLNfiUeKMgy/3luqfsCWFRXLb7U6wNQTk60tovuckwB15B191tJWvpp4HjiQWdJkCxO3Wbvc6jlk3Xb2Q==} + engines: {node: '>=6.9.0'} + '@bcoe/v8-coverage@0.2.3': resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} @@ -2706,28 +2701,28 @@ packages: '@cocalc/widgets@1.2.0': resolution: {integrity: sha512-q1Ka84hQYwocvoS81gjlgtT6cvgrEtgP9vKbAp6AzKd9moW9r6oHkduL8i9CT8GD/4b7fTJ6oAAqxh160VUuPA==} - '@cspell/cspell-bundled-dicts@8.18.1': - resolution: {integrity: sha512-gxciVVfQqCVXYH0p2Q5D7x7/SgaW3Wv5UjRwO+TCme0P2lVLl/IcfjkujZX+6UQkT7X4QRglXo1QN141UcCRCQ==} + '@cspell/cspell-bundled-dicts@8.19.4': + resolution: {integrity: sha512-2ZRcZP/ncJ5q953o8i+R0fb8+14PDt5UefUNMrFZZHvfTI0jukAASOQeLY+WT6ASZv6CgbPrApAdbppy9FaXYQ==} engines: {node: '>=18'} - '@cspell/cspell-json-reporter@8.18.1': - resolution: {integrity: sha512-/U3/8bcOL5O35fI9F7nN7Mhic0K01ZRxRV/+5jj7atltBbqgFSxViHCZBX0lDZJM96gUHn+3r6q6/8VEJahpDA==} + '@cspell/cspell-json-reporter@8.19.4': + resolution: {integrity: sha512-pOlUtLUmuDdTIOhDTvWxxta0Wm8RCD/p1V0qUqeP6/Ups1ajBI4FWEpRFd7yMBTUHeGeSNicJX5XeX7wNbAbLQ==} engines: {node: '>=18'} - '@cspell/cspell-pipe@8.18.1': - resolution: {integrity: sha512-QHndTQPkR1c02pvvQ7UKFtLjCXgY0OcX8zjTLrCkynmcQxJFjAZAh9cJ7NMOAxab+ciSnkaVf4KWaRSEG17z8Q==} + '@cspell/cspell-pipe@8.19.4': + resolution: {integrity: sha512-GNAyk+7ZLEcL2fCMT5KKZprcdsq3L1eYy3e38/tIeXfbZS7Sd1R5FXUe6CHXphVWTItV39TvtLiDwN/2jBts9A==} engines: {node: '>=18'} - '@cspell/cspell-resolver@8.18.1': - resolution: {integrity: sha512-T2sUBv0p9Hnfyg1xT1u3ESKuIWaaIDo0I8idh5DSlTpHgLjdIeAwasmFjEJ28qZv8OKSGawcSQKgJbStfbZASQ==} + '@cspell/cspell-resolver@8.19.4': + resolution: {integrity: sha512-S8vJMYlsx0S1D60glX8H2Jbj4mD8519VjyY8lu3fnhjxfsl2bDFZvF3ZHKsLEhBE+Wh87uLqJDUJQiYmevHjDg==} engines: {node: '>=18'} - '@cspell/cspell-service-bus@8.18.1': - resolution: {integrity: sha512-PwWl7EyhGIu4wHEhvBJb6xVlqMtFwQk0qLDArBvugL6nA+MX9NfG/w7PTgS7tCkFjVF1ku2sDzDLTDWwEk+MLw==} + '@cspell/cspell-service-bus@8.19.4': + resolution: {integrity: sha512-uhY+v8z5JiUogizXW2Ft/gQf3eWrh5P9036jN2Dm0UiwEopG/PLshHcDjRDUiPdlihvA0RovrF0wDh4ptcrjuQ==} engines: {node: '>=18'} - '@cspell/cspell-types@8.18.1': - resolution: {integrity: sha512-d/nMG+qnMbI/1JPm+lD0KcKpgtEHMRsHxkdtGyNCDgvHL/JOGaSHc5ERS3IUgBW0Dfya/3z9wPdaMcHEzt7YCQ==} + '@cspell/cspell-types@8.19.4': + resolution: {integrity: sha512-ekMWuNlFiVGfsKhfj4nmc8JCA+1ZltwJgxiKgDuwYtR09ie340RfXFF6YRd2VTW5zN7l4F1PfaAaPklVz6utSg==} engines: {node: '>=18'} '@cspell/dict-ada@4.1.0': @@ -2742,8 +2737,8 @@ packages: '@cspell/dict-bash@4.2.0': resolution: {integrity: sha512-HOyOS+4AbCArZHs/wMxX/apRkjxg6NDWdt0jF9i9XkvJQUltMwEhyA2TWYjQ0kssBsnof+9amax2lhiZnh3kCg==} - '@cspell/dict-companies@3.1.14': - resolution: {integrity: sha512-iqo1Ce4L7h0l0GFSicm2wCLtfuymwkvgFGhmu9UHyuIcTbdFkDErH+m6lH3Ed+QuskJlpQ9dM7puMIGqUlVERw==} + '@cspell/dict-companies@3.2.1': + resolution: {integrity: sha512-ryaeJ1KhTTKL4mtinMtKn8wxk6/tqD4vX5tFP+Hg89SiIXmbMk5vZZwVf+eyGUWJOyw5A1CVj9EIWecgoi+jYQ==} '@cspell/dict-cpp@6.0.8': resolution: {integrity: sha512-BzurRZilWqaJt32Gif6/yCCPi+FtrchjmnehVEIFzbWyeBd/VOUw77IwrEzehZsu5cRU91yPWuWp5fUsKfDAXA==} @@ -2766,8 +2761,8 @@ packages: '@cspell/dict-django@4.1.4': resolution: {integrity: sha512-fX38eUoPvytZ/2GA+g4bbdUtCMGNFSLbdJJPKX2vbewIQGfgSFJKY56vvcHJKAvw7FopjvgyS/98Ta9WN1gckg==} - '@cspell/dict-docker@1.1.13': - resolution: {integrity: sha512-85X+ZC/CPT3ie26DcfeMFkZSNuhS8DlAqPXzAjilHtGE/Nj+QnS3jyBz0spDJOJrjh8wx1+ro2oCK98sbVcztw==} + '@cspell/dict-docker@1.1.14': + resolution: {integrity: sha512-p6Qz5mokvcosTpDlgSUREdSbZ10mBL3ndgCdEKMqjCSZJFdfxRdNdjrGER3lQ6LMq5jGr1r7nGXA0gvUJK80nw==} '@cspell/dict-dotnet@5.0.9': resolution: {integrity: sha512-JGD6RJW5sHtO5lfiJl11a5DpPN6eKSz5M1YBa1I76j4dDOIqgZB6rQexlDlK1DH9B06X4GdDQwdBfnpAB0r2uQ==} @@ -2775,17 +2770,17 @@ packages: '@cspell/dict-elixir@4.0.7': resolution: {integrity: sha512-MAUqlMw73mgtSdxvbAvyRlvc3bYnrDqXQrx5K9SwW8F7fRYf9V4vWYFULh+UWwwkqkhX9w03ZqFYRTdkFku6uA==} - '@cspell/dict-en-common-misspellings@2.0.10': - resolution: {integrity: sha512-80mXJLtr0tVEtzowrI7ycVae/ULAYImZUlr0kUTpa8i57AUk7Zy3pYBs44EYIKW7ZC9AHu4Qjjfq4vriAtyTDQ==} + '@cspell/dict-en-common-misspellings@2.0.11': + resolution: {integrity: sha512-xFQjeg0wFHh9sFhshpJ+5BzWR1m9Vu8pD0CGPkwZLK9oii8AD8RXNchabLKy/O5VTLwyqPOi9qpyp1cxm3US4Q==} '@cspell/dict-en-gb@1.1.33': resolution: {integrity: sha512-tKSSUf9BJEV+GJQAYGw5e+ouhEe2ZXE620S7BLKe3ZmpnjlNG9JqlnaBhkIMxKnNFkLY2BP/EARzw31AZnOv4g==} - '@cspell/dict-en_us@4.4.0': - resolution: {integrity: sha512-TEfVT2NwvI9k1/ECjuC7GbULxenjJAbTLWMri1eMRk3mRGtqg5j0XzvvNRFuJWq8X48MdGVjsD+ZVI/VR94+eQ==} + '@cspell/dict-en_us@4.4.9': + resolution: {integrity: sha512-5gjqpUwhE+qP9A9wxD1+MGGJ3DNqTgSpiOsS10cGJfV4p/Z194XkDUZrUrJsnJA/3fsCZHAzcNWh8m0bw1v++A==} - '@cspell/dict-filetypes@3.0.11': - resolution: {integrity: sha512-bBtCHZLo7MiSRUqx5KEiPdGOmXIlDGY+L7SJEtRWZENpAKE+96rT7hj+TUUYWBbCzheqHr0OXZJFEKDgsG/uZg==} + '@cspell/dict-filetypes@3.0.12': + resolution: {integrity: sha512-+ds5wgNdlUxuJvhg8A1TjuSpalDFGCh7SkANCWvIplg6QZPXL4j83lqxP7PgjHpx7PsBUS7vw0aiHPjZy9BItw==} '@cspell/dict-flutter@1.1.0': resolution: {integrity: sha512-3zDeS7zc2p8tr9YH9tfbOEYfopKY/srNsAa+kE3rfBTtQERAZeOhe5yxrnTPoufctXLyuUtcGMUTpxr3dO0iaA==} @@ -2802,11 +2797,11 @@ packages: '@cspell/dict-gaming-terms@1.1.1': resolution: {integrity: sha512-tb8GFxjTLDQstkJcJ90lDqF4rKKlMUKs5/ewePN9P+PYRSehqDpLI5S5meOfPit8LGszeOrjUdBQ4zXo7NpMyQ==} - '@cspell/dict-git@3.0.4': - resolution: {integrity: sha512-C44M+m56rYn6QCsLbiKiedyPTMZxlDdEYAsPwwlL5bhMDDzXZ3Ic8OCQIhMbiunhCOJJT+er4URmOmM+sllnjg==} + '@cspell/dict-git@3.0.5': + resolution: {integrity: sha512-I7l86J2nOcpBY0OcwXLTGMbcXbEE7nxZme9DmYKrNgmt35fcLu+WKaiXW7P29V+lIXjJo/wKrEDY+wUEwVuABQ==} - '@cspell/dict-golang@6.0.20': - resolution: {integrity: sha512-b7nd9XXs+apMMzNSWorjirQsbmlwcTC0ViQJU8u+XNose3z0y7oNeEpbTPTVoN1+1sO9aOHuFwfwoOMFCDS14Q==} + '@cspell/dict-golang@6.0.21': + resolution: {integrity: sha512-D3wG1MWhFx54ySFJ00CS1MVjR4UiBVsOWGIjJ5Av+HamnguqEshxbF9mvy+BX0KqzdLVzwFkoLBs8QeOID56HA==} '@cspell/dict-google@1.0.8': resolution: {integrity: sha512-BnMHgcEeaLyloPmBs8phCqprI+4r2Jb8rni011A8hE+7FNk7FmLE3kiwxLFrcZnnb7eqM0agW4zUaNoB0P+z8A==} @@ -2858,8 +2853,8 @@ packages: '@cspell/dict-node@5.0.7': resolution: {integrity: sha512-ZaPpBsHGQCqUyFPKLyCNUH2qzolDRm1/901IO8e7btk7bEDF56DN82VD43gPvD4HWz3yLs/WkcLa01KYAJpnOw==} - '@cspell/dict-npm@5.1.34': - resolution: {integrity: sha512-UrUYqRQX864Cx9QJkg7eEIxmjYGqcje+x1j7bzl+a3jCKwT6jm+p0off6VEOf3EReHP0dWUSYO3Q0+pLL/N+FQ==} + '@cspell/dict-npm@5.2.4': + resolution: {integrity: sha512-/hK5ii9OzSOQkmTjkzJlEYWz+PBnz2hRq5Xu7d4aDURaynO9xMAcK31JJlKNQulBkVbQHxFZLUrzjdzdAr/Opw==} '@cspell/dict-php@4.0.14': resolution: {integrity: sha512-7zur8pyncYZglxNmqsRycOZ6inpDoVd4yFfz1pQRe5xaRWMiK3Km4n0/X/1YMWhh3e3Sl/fQg5Axb2hlN68t1g==} @@ -2870,8 +2865,8 @@ packages: '@cspell/dict-public-licenses@2.0.13': resolution: {integrity: sha512-1Wdp/XH1ieim7CadXYE7YLnUlW0pULEjVl9WEeziZw3EKCAw8ZI8Ih44m4bEa5VNBLnuP5TfqC4iDautAleQzQ==} - '@cspell/dict-python@4.2.17': - resolution: {integrity: sha512-xqMKfVc8d7yDaOChFdL2uWAN3Mw9qObB/Zr6t5w1OHbi23gWs7V1lI9d0mXAoqSK6N3mosbum4OIq/FleQDnlw==} + '@cspell/dict-python@4.2.18': + resolution: {integrity: sha512-hYczHVqZBsck7DzO5LumBLJM119a3F17aj8a7lApnPIS7cmEwnPc2eACNscAHDk7qAo2127oI7axUoFMe9/g1g==} '@cspell/dict-r@2.1.0': resolution: {integrity: sha512-k2512wgGG0lTpTYH9w5Wwco+lAMf3Vz7mhqV8+OnalIE7muA0RSuD9tWBjiqLcX8zPvEJr4LdgxVju8Gk3OKyA==} @@ -2888,8 +2883,8 @@ packages: '@cspell/dict-shell@1.1.0': resolution: {integrity: sha512-D/xHXX7T37BJxNRf5JJHsvziFDvh23IF/KvkZXNSh8VqcRdod3BAz9VGHZf6VDqcZXr1VRqIYR3mQ8DSvs3AVQ==} - '@cspell/dict-software-terms@5.0.5': - resolution: {integrity: sha512-ZjAOa8FI8/JrxaRqKT3eS7AQXFjU174xxQoKYMkmdwSyNIj7WUCAg10UeLqeMjFVv36zIO0Hm0dD2+Bvn18SLA==} + '@cspell/dict-software-terms@5.0.10': + resolution: {integrity: sha512-2nTcVKTYJKU5GzeviXGPtRRC9d23MtfpD4PM4pLSzl29/5nx5MxOUHkzPuJdyaw9mXIz8Rm9IlGeVAvQoTI8aw==} '@cspell/dict-sql@2.2.0': resolution: {integrity: sha512-MUop+d1AHSzXpBvQgQkCiok8Ejzb+nrzyG16E8TvKL2MQeDwnIvMe3bv90eukP6E1HWb+V/MA/4pnq0pcJWKqQ==} @@ -2909,20 +2904,20 @@ packages: '@cspell/dict-vue@3.0.4': resolution: {integrity: sha512-0dPtI0lwHcAgSiQFx8CzvqjdoXROcH+1LyqgROCpBgppommWpVhbQ0eubnKotFEXgpUCONVkeZJ6Ql8NbTEu+w==} - '@cspell/dynamic-import@8.18.1': - resolution: {integrity: sha512-VJHfS/Iv0Rx7wn1pjPmwgsaw6r72N5Cx2gL0slWk8Cogc8YiK7/6jsGnsvxJZVkHntJoiT8PrkIvhNKb3awD3g==} + '@cspell/dynamic-import@8.19.4': + resolution: {integrity: sha512-0LLghC64+SiwQS20Sa0VfFUBPVia1rNyo0bYeIDoB34AA3qwguDBVJJkthkpmaP1R2JeR/VmxmJowuARc4ZUxA==} engines: {node: '>=18.0'} - '@cspell/filetypes@8.18.1': - resolution: {integrity: sha512-vTOb2itP0pjrccvt8wcKiTGyw0pFMTPI85H12T6n8ZhqXTktPgQH2gEf/SU/5tkPNnBKr4GJ+FdU5hJ27HzgXQ==} + '@cspell/filetypes@8.19.4': + resolution: {integrity: sha512-D9hOCMyfKtKjjqQJB8F80PWsjCZhVGCGUMiDoQpcta0e+Zl8vHgzwaC0Ai4QUGBhwYEawHGiWUd7Y05u/WXiNQ==} engines: {node: '>=18'} - '@cspell/strong-weak-map@8.18.1': - resolution: {integrity: sha512-gsgv+5ZQD4aHNHDdfNGoafVYkqRynyYgaodt9Dp/3o0YKYcxGf2jrX8SJ35MfZ61qln0n7P4Djrg+bFV2zNH5w==} + '@cspell/strong-weak-map@8.19.4': + resolution: {integrity: sha512-MUfFaYD8YqVe32SQaYLI24/bNzaoyhdBIFY5pVrvMo1ZCvMl8AlfI2OcBXvcGb5aS5z7sCNCJm11UuoYbLI1zw==} engines: {node: '>=18'} - '@cspell/url@8.18.1': - resolution: {integrity: sha512-FRJbLYDC9ucpTOzbF6MohP2u5X3NU5L0RoVuoYCynqm/QOI38XP6WOEaI4H58CAn857bOIKZk0LZRPTGzi6Qlg==} + '@cspell/url@8.19.4': + resolution: {integrity: sha512-Pa474iBxS+lxsAL4XkETPGIq3EgMLCEb9agj3hAd2VGMTCApaiUvamR4b+uGXIPybN70piFxvzrfoxsG2uIP6A==} engines: {node: '>=18.0'} '@ctrl/tinycolor@3.6.1': @@ -2985,8 +2980,8 @@ packages: resolution: {integrity: sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - '@formatjs/cli@6.6.3': - resolution: {integrity: sha512-vW9EQdHmxQg/+s9K39ZwKcIyyhmEMHOtsv1KyQFtjv+pbE3XmiB5ohoo4wAx3HDsrufrTsplGnQdQ+KB2wY/bA==} + '@formatjs/cli@6.7.1': + resolution: {integrity: sha512-ULiXbLkbuTyd8f0qaByu1Nuc+jbAOLH1qRAtHZ7waIABQGPBB93OQ2FFtQPgoYoupKOKyNr+PZXR6pOT45E4EQ==} engines: {node: '>= 16'} hasBin: true peerDependencies: @@ -3039,12 +3034,12 @@ packages: typescript: optional: true - '@google-ai/generativelanguage@3.1.0': - resolution: {integrity: sha512-hyWdTMwqit6aF294tPp4v/oBLqrSPHsieJlDG7rVO8eH/T6xBaimGZGgmosuBsnefDty71FIpPbPQR0WW7b1eg==} + '@google-ai/generativelanguage@3.2.0': + resolution: {integrity: sha512-/dmsV7GHx8VwhR6LI/PA4HvBckOzZNnkskYVddzvy8syc+sX2k3lq7+TRQbq2/tdmevNIGpftEg5CzAwt6zmtA==} engines: {node: '>=18'} - '@google-cloud/bigquery@7.9.3': - resolution: {integrity: sha512-e0jvEwnEyvQeJOn5Twd429yr7T5/+3wR0rO0Vfe+3T25P0dRShhGyknlh/Ucoafa7WR4imFhW8+542yR4VkJ+w==} + '@google-cloud/bigquery@7.9.4': + resolution: {integrity: sha512-C7jeI+9lnCDYK3cRDujcBsPgiwshWKn/f0BiaJmClplfyosCLfWE83iGQ0eKH113UZzjR9c9q7aZQg0nU388sw==} engines: {node: '>=14.0.0'} '@google-cloud/common@5.0.2': @@ -3124,6 +3119,9 @@ packages: peerDependencies: react: '*' + '@iovalkey/commands@0.1.0': + resolution: {integrity: sha512-/B9W4qKSSITDii5nkBCHyPkIkAi+ealUtr1oqBJsLxjSRLka4pxun2VvMNSmcwgAMxgXtQfl0qRv7TE+udPJzg==} + '@isaacs/cliui@8.0.2': resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} @@ -3291,18 +3289,18 @@ packages: '@jupyterlab/statedb@4.4.0': resolution: {integrity: sha512-fjYfcJ08iq9DYRWg2qEuoq4gtIm+F9oU8x8tflKJ0UZZ+HpTQDRqChrWUCQ/nijymD8zs05BG3J63TUXlpTYnw==} - '@langchain/anthropic@0.3.18': - resolution: {integrity: sha512-+2Pk9AFV4aBUOAqPT0VOaH6YbcVZ/xGoI6/dCW+W4svqcWwW7NExkAHdUAO9q+h2sUBbHs4j9bzLXlRQLvJ03A==} + '@langchain/anthropic@0.3.20': + resolution: {integrity: sha512-er/mdxdSs8BlQeH5GQtvEIBxf2slge3gsF7CW88S23xfASn6bnjAisQGQwRmvD+X0do7G526W7lNP93u6dMJ0A==} engines: {node: '>=18'} peerDependencies: '@langchain/core': ^0.3.46 - '@langchain/core@0.3.46': - resolution: {integrity: sha512-uZ9iI8OIWjOoY/OBkNrjobYHPI/Ky/X4NllKLtkAaKt0aI02HAm2EmZnGrUd0oHjwlYLzjat0FE329ACqiXadw==} + '@langchain/core@0.3.56': + resolution: {integrity: sha512-eF9MyInM9RLNisAygiCrzHnqzOnuzGWy4f1SAqAis+XIMhcA98WuZDNWxyX9pP3aKQGc47FAJ/9XWJwv5KiquA==} engines: {node: '>=18'} - '@langchain/google-genai@0.2.4': - resolution: {integrity: sha512-2w/QZMbSRpo6plKilqXXAmhdq5mB1gaDwOeN1tVpKBOwKtxsH9jOzxukKhs3QvXmfy/xod/WWLajBXxU+63/Dw==} + '@langchain/google-genai@0.2.9': + resolution: {integrity: sha512-AcHAD0VX8Qci5ApLYd6ojMTJUk4ZTeMQrsOYsOVv09sy3FtrLHkRxZXkb0qgtDS9EJ+w40904B+s6s2H9+HZmQ==} engines: {node: '>=18'} peerDependencies: '@langchain/core': ^0.3.46 @@ -3319,8 +3317,8 @@ packages: peerDependencies: '@langchain/core': ^0.3.46 - '@langchain/openai@0.5.6': - resolution: {integrity: sha512-zN0iyJthPNmcefIBVybZwcTBgcqu/ElJFov42ZntxEncK4heOMAE9lkq9LQ5CaPU/SgrduibrM1oL57+tLUtaA==} + '@langchain/openai@0.5.10': + resolution: {integrity: sha512-hBQIWjcVxGS7tgVvgBBmrZ5jSaJ8nu9g6V64/Tx6KGjkW7VdGmUvqCO+koiQCOZVL7PBJkHWAvDsbghPYXiZEA==} engines: {node: '>=18'} peerDependencies: '@langchain/core': ^0.3.46 @@ -3487,8 +3485,8 @@ packages: '@mermaid-js/parser@0.4.0': resolution: {integrity: sha512-wla8XOWvQAwuqy+gxiZqY+c7FokraOTHRWMsbB4AgRx9Sy7zKslNyejy7E+a77qHfey5GXw/ik3IXv/NHMJgaA==} - '@microlink/react-json-view@1.26.1': - resolution: {integrity: sha512-2H5QCYdZlJi+oN4YBiUYPPFTNh/KLCN9i9yz8NwmSkRqXSRXYtEVIRffc9L34jdopKGK/tK21SeuzXVJHQLkfQ==} + '@microlink/react-json-view@1.26.2': + resolution: {integrity: sha512-NamaHDT21njvbg2RZQq+rnu+owlPyj5lnUdVH5ZtChfTX+75QD2EGnccB1gs0De42jdPj77UQHYLr7d4J46IYA==} engines: {node: '>=17'} peerDependencies: react: '>= 15' @@ -3499,23 +3497,31 @@ packages: peerDependencies: zod: '>= 3' - '@module-federation/error-codes@0.11.2': - resolution: {integrity: sha512-ik1Qnn0I+WyEdprTck9WGlH41vGsVdUg8cfO+ZM02qOb2cZm5Vu3SlxGAobj6g7uAj0g8yINnd7h7Dci40BxQA==} + '@module-federation/error-codes@0.13.1': + resolution: {integrity: sha512-azgGDBnFRfqlivHOl96ZjlFUFlukESz2Rnnz/pINiSqoBBNjUE0fcAZP4X6jgrVITuEg90YkruZa7pW9I3m7Uw==} + + '@module-federation/runtime-core@0.13.1': + resolution: {integrity: sha512-TfyKfkSAentKeuvSsAItk8s5tqQSMfIRTPN2e1aoaq/kFhE+7blps719csyWSX5Lg5Es7WXKMsXHy40UgtBtuw==} - '@module-federation/runtime-core@0.11.2': - resolution: {integrity: sha512-dia5kKybi6MFU0s5PgglJwN27k7n9Sf69Cy5xZ4BWaP0qlaXTsxHKO0PECHNt2Pt8jDdyU29sQ4DwAQfxpnXJQ==} + '@module-federation/runtime-tools@0.13.1': + resolution: {integrity: sha512-GEF1pxqLc80osIMZmE8j9UKZSaTm2hX2lql8tgIH/O9yK4wnF06k6LL5Ah+wJt+oJv6Dj55ri/MoxMP4SXoPNA==} - '@module-federation/runtime-tools@0.11.2': - resolution: {integrity: sha512-4MJTGAxVq6vxQRkTtTlH7Mm9AVqgn0X9kdu+7RsL7T/qU+jeYsbrntN2CWG3GVVA8r5JddXyTI1iJ0VXQZLV1w==} + '@module-federation/runtime@0.13.1': + resolution: {integrity: sha512-ZHnYvBquDm49LiHfv6fgagMo/cVJneijNJzfPh6S0CJrPS2Tay1bnTXzy8VA5sdIrESagYPaskKMGIj7YfnPug==} - '@module-federation/runtime@0.11.2': - resolution: {integrity: sha512-Ya9u/L6z2LvhgpqxuKCB7LcigIIRf1BbaxAZIH7mzbq/A7rZtTP7v+73E433jvgiAlbAfPSZkeoYGele6hfRwA==} + '@module-federation/sdk@0.13.1': + resolution: {integrity: sha512-bmf2FGQ0ymZuxYnw9bIUfhV3y6zDhaqgydEjbl4msObKMLGXZqhse2pTIIxBFpIxR1oONKX/y2FAolDCTlWKiw==} - '@module-federation/sdk@0.11.2': - resolution: {integrity: sha512-SBFe5xOamluT900J4AGBx+2/kCH/JbfqXoUwPSAC6PRzb8Y7LB0posnOGzmqYsLZXT37vp3d6AmJDsVoajDqxw==} + '@module-federation/webpack-bundler-runtime@0.13.1': + resolution: {integrity: sha512-QSuSIGa09S8mthbB1L6xERqrz+AzPlHR6D7RwAzssAc+IHf40U6NiTLPzUqp9mmKDhC5Tm0EISU0ZHNeJpnpBQ==} - '@module-federation/webpack-bundler-runtime@0.11.2': - resolution: {integrity: sha512-WdwIE6QF+MKs/PdVu0cKPETF743JB9PZ62/qf7Uo3gU4fjsUMc37RnbJZ/qB60EaHHfjwp1v6NnhZw1r4eVsnw==} + '@msgpack/msgpack@2.8.0': + resolution: {integrity: sha512-h9u4u/jiIRKbq25PM+zymTyW6bhTzELvOoUd+AvYriWOAKpLGnIamaET3pnHYoI5iYphAHBI4ayx0MehR+VVPQ==} + engines: {node: '>= 10'} + + '@msgpack/msgpack@3.1.1': + resolution: {integrity: sha512-DnBpqkMOUGayNVKyTLlkM6ILmU/m/+VUxGkuQlPQVAcvreLz5jn1OlQnWd8uHKL/ZSiljpM12rjRhr51VtvJUQ==} + engines: {node: '>= 18'} '@napi-rs/canvas-android-arm64@0.1.69': resolution: {integrity: sha512-4icWTByY8zPvM9SelfQKf3I6kwXw0aI5drBOVrwfER5kjwXJd78FPSDSZkxDHjvIo9Q86ljl18Yr963ehA4sHQ==} @@ -3584,33 +3590,6 @@ packages: '@napi-rs/triples@1.2.0': resolution: {integrity: sha512-HAPjR3bnCsdXBsATpDIP5WCrw0JcACwhhrwIAQhiR46n+jm+a2F8kBsfseAuWtSyQ+H3Yebt2k43B5dy+04yMA==} - '@nats-io/jetstream@3.0.0': - resolution: {integrity: sha512-pNJDQJfRmRoDsBYSysvk+kPr9cSj1XnLnAxSC7KHVTsu2YqMEWSqgs5YzIULbq1S12oFhM/vvlOiEj/9jY39lg==} - - '@nats-io/jwt@0.0.10-5': - resolution: {integrity: sha512-DoBIEJIgWZJLtMRef+oFvk1lEgyTIVcAZbBXYGm91B7az1bCu7A3Npsn/VD7RH5waYAz+EPS8Ot7zyU6iKAuqg==} - - '@nats-io/kv@3.0.0': - resolution: {integrity: sha512-ybERTWMemaBpMdbFv4718Ffnb427ykzlci+6oAmtaeyw3oM25aACEo+xgra9UKxQVubj1UGx2WcNUy9ojiU9zA==} - - '@nats-io/nats-core@3.0.0': - resolution: {integrity: sha512-Ma2VrFkSew35cIRxMvnbQ2VOE2crP7BCIIsGoCqVwvn6Jwmk2ypcHjj9pWn+bPXqmFlamLUrYGH6ncMDb4eXsQ==} - - '@nats-io/nkeys@1.2.0-8': - resolution: {integrity: sha512-o6nfNYySzVZL7mIJA+ejD3JdMFbLn9VWtptM2cnHh4jeF/FvhPhCkn8BH4jQE7asCV/SSialu6jGhsAFV1FKkQ==} - engines: {node: '>=16.0.0'} - - '@nats-io/nkeys@2.0.3': - resolution: {integrity: sha512-JVt56GuE6Z89KUkI4TXUbSI9fmIfAmk6PMPknijmuL72GcD+UgIomTcRWiNvvJKxA01sBbmIPStqJs5cMRBC3A==} - engines: {node: '>=18.0.0'} - - '@nats-io/nuid@2.0.3': - resolution: {integrity: sha512-TpA3HEBna/qMVudy+3HZr5M3mo/L1JPofpVT4t0HkFGkz2Cn9wrlrQC8tvR8Md5Oa9//GtGG26eN0qEWF5Vqew==} - engines: {node: '>= 18.x'} - - '@nats-io/services@3.0.0': - resolution: {integrity: sha512-tsZu5Chd616p2sP9QWZT0gBkO2zcRjbENhcJcV2hBGA6t5jgP5+iynwMAs+9HKFUNVdUgDxdpUYn8NUpaslzPw==} - '@nestjs/axios@4.0.0': resolution: {integrity: sha512-1cB+Jyltu/uUPNQrpUimRHEQHrnQrpLzVj6dU3dgn6iDDDdahr10TgHFGTmw5VuJ9GzKZsCLDL78VSwJAs/9JQ==} peerDependencies: @@ -3618,11 +3597,11 @@ packages: axios: ^1.3.1 rxjs: ^7.0.0 - '@nestjs/common@11.0.20': - resolution: {integrity: sha512-/GH8NDCczjn6+6RNEtSNAts/nq/wQE8L1qZ9TRjqjNqEsZNE1vpFuRIhmcO2isQZ0xY5rySnpaRdrOAul3gQ3A==} + '@nestjs/common@11.1.1': + resolution: {integrity: sha512-crzp+1qeZ5EGL0nFTPy9NrVMAaUWewV5AwtQyv6SQ9yQPXwRl9W9hm1pt0nAtUu5QbYMbSuo7lYcF81EjM+nCA==} peerDependencies: - class-transformer: '*' - class-validator: '*' + class-transformer: '>=0.4.1' + class-validator: '>=0.13.2' reflect-metadata: ^0.1.12 || ^0.2.0 rxjs: ^7.1.0 peerDependenciesMeta: @@ -3631,8 +3610,8 @@ packages: class-validator: optional: true - '@nestjs/core@11.0.20': - resolution: {integrity: sha512-yUkEzBGiRNSEThVl6vMCXgoA9sDGWoRbJsTLdYdCC7lg7PE1iXBnna1FiBfQjT995pm0fjyM1e3WsXmyWeJXbw==} + '@nestjs/core@11.1.1': + resolution: {integrity: sha512-UFoUAgLKFT+RwHTANJdr0dF7p0qS9QjkaUPjg8aafnjM/qxxxrUVDB49nVvyMlk+Hr1+vvcNaOHbWWQBxoZcHA==} engines: {node: '>= 20'} peerDependencies: '@nestjs/common': ^11.0.0 @@ -3768,8 +3747,8 @@ packages: resolution: {integrity: sha512-T8TbSnGsxo6TDBJx/Sgv/BlVJL3tshxZP7Aq5R1mSnM5OcHY2dQaxLMu2+E8u3gN0MLOzdjurqN4ZRVuzQycOQ==} engines: {node: '>=8.0'} - '@openapitools/openapi-generator-cli@2.19.1': - resolution: {integrity: sha512-APP3EPI/m7bg220qS+7UAFiyLJFbNCjlsEEjrP2sLmW4Za44U8e3Lb2zDy3sbvJvIUnpYWe+hu9RbrxrPP9djQ==} + '@openapitools/openapi-generator-cli@2.20.2': + resolution: {integrity: sha512-dNFwQcQu6+rmEWSJj4KUx468+p6Co7nfpVgi5QEfVhzKj7wBytz9GEhCN2qmVgtg3ZX8H6nxbXI8cjh7hAxAqg==} engines: {node: '>=16'} hasBin: true @@ -4041,78 +4020,71 @@ packages: peerDependencies: react: ^16.14.0 || >=17 - '@rspack/binding-darwin-arm64@1.3.4': - resolution: {integrity: sha512-cVfzvtVf05VumGrxFz9Tk0QHk4jWBcQBNQuaql2enco8NKnzuX+v0+VP2mbNfvgICBgrHWKRYinAX5IxTEJdCw==} + '@rspack/binding-darwin-arm64@1.3.11': + resolution: {integrity: sha512-sGoFDXYNinubhEiPSjtA/ua3qhMj6VVBPTSDvprZj+MT18YV7tQQtwBpm+8sbqJ1P5y+a3mzsP3IphRWyIQyXw==} cpu: [arm64] os: [darwin] - '@rspack/binding-darwin-x64@1.3.4': - resolution: {integrity: sha512-vXzf8xI+njdOSXGyI39lqkH/bSwyrx4jXW9+Pj2zbmRJVHZVyJsrx4kSpOoZX5zx/a7BbvuHRwrmmJS2HEOobw==} + '@rspack/binding-darwin-x64@1.3.11': + resolution: {integrity: sha512-4zgOkCLxhp4Ki98GuDaZgz4exXcE4+sgvXY/xA/A5FGPVRbfQLQ5psSOk0F/gvMua1r15E66loQRJpuzUK6bTA==} cpu: [x64] os: [darwin] - '@rspack/binding-linux-arm64-gnu@1.3.4': - resolution: {integrity: sha512-c45kQrqzR05Jc62oAetiAXrnPWhyt3Pz1h2LF62OW8SYXxdBskAKpWntTts/T96HMLqNPH3MAfDKxyfOb/n0eQ==} + '@rspack/binding-linux-arm64-gnu@1.3.11': + resolution: {integrity: sha512-NIOaIfYUmJs1XL4lbGVtcMm1KlA/6ZR6oAbs2ekofKXtJYAFQgnLTf7ZFmIwVjS0mP78BmeSNcIM6pd2w5id4w==} cpu: [arm64] os: [linux] - '@rspack/binding-linux-arm64-musl@1.3.4': - resolution: {integrity: sha512-/dUvkcBVnV95tA7BpeA6IZhrbpwxFzvgU6qF/iKxyHdMjwHdjn1Um7nR00TPOn/SIHzljafHpL6CuVTLNB5xvA==} + '@rspack/binding-linux-arm64-musl@1.3.11': + resolution: {integrity: sha512-CRRAQ379uzA2QfD9HHNtxuuqzGksUapMVcTLY5NIXWfvHLUJShdlSJQv3UQcqgAJNrMY7Ex1PnoQs1jZgUiqZA==} cpu: [arm64] os: [linux] - '@rspack/binding-linux-x64-gnu@1.3.4': - resolution: {integrity: sha512-jZgGKoH7RyqJbyEcvhEE9wqK6mwoWxLF3c3LD2+e+dKVcO5iCfMuulCGdzUnYyvH97CtvN5j0/20PErRXubyjg==} + '@rspack/binding-linux-x64-gnu@1.3.11': + resolution: {integrity: sha512-k3OyvLneX2ZeL8z/OzPojpImqy6PgqKJD+NtOvcr/TgbgADHZ3xQttf6B2X+qnZMAgOZ+RTeTkOFrvsg9AEKmA==} cpu: [x64] os: [linux] - '@rspack/binding-linux-x64-musl@1.3.4': - resolution: {integrity: sha512-Xko8mZ598vQDubig4rLTuCDjXplSDJbJEg6B3NykGaE6CMH2bI/6KJfVKEKo25ayNzoouT/1MxyOxB4mQuspbA==} + '@rspack/binding-linux-x64-musl@1.3.11': + resolution: {integrity: sha512-2agcELyyQ95jWGCW0YWD0TvAcN40yUjmxn9NXQBLHPX5Eb07NaHXairMsvV9vqQsPsq0nxxfd9Wsow18Y5r/Hw==} cpu: [x64] os: [linux] - '@rspack/binding-win32-arm64-msvc@1.3.4': - resolution: {integrity: sha512-Q+pU/MRylYB3XoNTM1LYPxWV1KUxeZY6R54twtoDFXhZn/PDflP7qH1BHQ0KN50HuG5ZK89CaFSPMF7+vs6HNA==} + '@rspack/binding-win32-arm64-msvc@1.3.11': + resolution: {integrity: sha512-sjGoChazu0krigT/LVwGUsgCv3D3s/4cR/3P4VzuDNVlb4pbh1CDa642Fr0TceqAXCeKW5GiL/EQOfZ4semtcQ==} cpu: [arm64] os: [win32] - '@rspack/binding-win32-ia32-msvc@1.3.4': - resolution: {integrity: sha512-aqP/l+YAG4L9I1klW3uSq+olafw8xzAP+4cd/Nyy2SSDnhWsDgawxJyO6FIeM+hXwC73ChH9pcXHGgEC7iCcHw==} + '@rspack/binding-win32-ia32-msvc@1.3.11': + resolution: {integrity: sha512-tjywW84oQLSqRmvQZ+fXP7e3eNmjScYrlWEPAQFjf08N19iAJ9UOGuuFw8Fk5ZmrlNZ2Qo9ASSOI7Nnwx2aZYg==} cpu: [ia32] os: [win32] - '@rspack/binding-win32-x64-msvc@1.3.4': - resolution: {integrity: sha512-xDU1njA1gIzIL6Nt5ARW4vWeVgwf00i7tPONg+6fJyMgwuFfwq2qEG7UFSBOedYjsSTCW+UoBh7riN7lRiFIvw==} + '@rspack/binding-win32-x64-msvc@1.3.11': + resolution: {integrity: sha512-pPy3yU6SAMfEPY7ki1KAetiDFfRbkYMiX3F89P9kX01UAePkLRNsjacHF4w7N3EsBsWn1FlGaYZdlzmOI5pg2Q==} cpu: [x64] os: [win32] - '@rspack/binding@1.3.4': - resolution: {integrity: sha512-wDRqqNfrVXuHAEm25mPlhroKN+v4uwhihVnZF4duz0I0L5rbsUNCy7uEda0GrBXkj3jkKLfg60mSd9MCZD0JZw==} + '@rspack/binding@1.3.11': + resolution: {integrity: sha512-BbMfZHqfH+CzFtZDg+v9nbKifJIJDUPD6KuoWlHq581koKvD3UMx6oVrj9w13JvO2xWNPeHclmqWAFgoD7faEQ==} - '@rspack/cli@1.3.4': - resolution: {integrity: sha512-MqstfifN6Q3+sNqUKZ29kHtAW/gqXyfH6TWZhJmqUsE9UWVuUOB607ze69mDBWht1cE/ml1waHna25dVCSx6AA==} + '@rspack/cli@1.3.11': + resolution: {integrity: sha512-+W+E5RyOAk8Am+e6Nfh75jA0rD1RlronmmkU9vGmAi6hl75hmzn9XAjdRNazBS4UO9+M1zs+Kz5hLxV8t52j9A==} hasBin: true peerDependencies: '@rspack/core': ^1.0.0-alpha || ^1.x - '@rspack/tracing': ^1.x - peerDependenciesMeta: - '@rspack/tracing': - optional: true - '@rspack/core@1.3.4': - resolution: {integrity: sha512-NIIk/0XUkyU9G8eby6kKO3YFpeDn8RsUIzNuElcfi1rWuuK+NLasDqUYOFqqlNBKnZpmtZ+SXAV9jE5k/i3uwg==} + '@rspack/core@1.3.11': + resolution: {integrity: sha512-aSYPtT1gum5MCfcFANdTroJ4JwzozuL3wX0twMGNAB7amq6+nZrbsUKWjcHgneCeZdahxzrKdyYef3FHaJ7lEA==} engines: {node: '>=16.0.0'} peerDependencies: - '@rspack/tracing': ^1.x '@swc/helpers': '>=0.5.1' peerDependenciesMeta: - '@rspack/tracing': - optional: true '@swc/helpers': optional: true - '@rspack/dev-server@1.1.1': - resolution: {integrity: sha512-9r7vOml2SrFA8cvbcJdSan9wHEo1TPXezF22+s5jvdyAAywg8w7HqDol6TPVv64NUonP1DOdyLxZ+6UW6WZiwg==} + '@rspack/dev-server@1.1.2': + resolution: {integrity: sha512-YNzXxWn6DV3X9yeJZ9bqX77wuhm2ko3sGavilBGi1MWuNihhWfhh9dlbipudPyoiwLl0lbioxA/hevosr+ajLg==} engines: {node: '>= 18.12.0'} peerDependencies: '@rspack/core': '*' @@ -4121,8 +4093,8 @@ packages: resolution: {integrity: sha512-VynGOEsVw2s8TAlLf/uESfrgfrq2+rcXB1muPJYBWbsm1Oa6r5qVQhjA5ggM6z/coYPrsVMgovl3Ff7Q7OCp1w==} engines: {node: '>=16.0.0'} - '@rspack/plugin-react-refresh@1.2.0': - resolution: {integrity: sha512-DTsbtggCfsiXE5QQtYMS8rKfEF8GIjwPDbgIT6Kg8BlAjpJY4jT5IisyhfIi7YOT3d5RIvu60iFB6Kr9sSMsnA==} + '@rspack/plugin-react-refresh@1.4.3': + resolution: {integrity: sha512-wZx4vWgy5oMEvgyNGd/oUKcdnKaccYWHCRkOqTdAPJC3WcytxhTX+Kady8ERurSBiLyQpoMiU3Iyd+F1Y2Arbw==} peerDependencies: react-refresh: '>=0.10.0 <1.0.0' webpack-hot-middleware: 2.x @@ -4170,11 +4142,20 @@ packages: '@sinonjs/text-encoding@0.7.3': resolution: {integrity: sha512-DE427ROAphMQzU4ENbliGYrBSYPXF+TtLg9S8vzeA+OF4ZKzoDdzfL8sxuMUGS/lgRhM6j1URSk9ghf7Xo1tyA==} + '@socket.io/component-emitter@3.1.2': + resolution: {integrity: sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==} + + '@socket.io/redis-streams-adapter@0.2.2': + resolution: {integrity: sha512-BMPa6oGC0wFgpMXoGksbJ75zMBwk+79pxjHc2YusdoK+X0BxN4fTsqEBuFV7yeXi9ekbi87rwlsT61+WZGVW9g==} + engines: {node: '>=14.0.0'} + peerDependencies: + socket.io-adapter: ^2.5.4 + '@speed-highlight/core@1.2.7': resolution: {integrity: sha512-0dxmVj4gxg3Jg879kvFS/msl4s9F3T9UXC1InxgOf7t5NvcPD97u/WTA5vL/IxWHMn7qSxBozqrnnE2wvl1m8g==} - '@stripe/react-stripe-js@3.6.0': - resolution: {integrity: sha512-zEnaUmTOsu7zhl3RWbZ0l1dRiad+QIbcAYzQfF+yYelURJowhAwesRHKWH+qGAIBEpkO6/VCLFHhVLH9DtPlnw==} + '@stripe/react-stripe-js@3.7.0': + resolution: {integrity: sha512-PYls/2S9l0FF+2n0wHaEJsEU8x7CmBagiH7zYOsxbBlLIHEsqUIQ4MlIAbV9Zg6xwT8jlYdlRIyBTHmO3yM7kQ==} peerDependencies: '@stripe/stripe-js': '>=1.44.1 <8.0.0' react: '>=16.8.0 <20.0.0' @@ -4277,6 +4258,9 @@ packages: '@types/cookie@0.6.0': resolution: {integrity: sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==} + '@types/cors@2.8.18': + resolution: {integrity: sha512-nX3d0sxJW41CqQvfOzVG1NCTXfFDrDWIghCZncpHeWlVFd81zxB/DLhg7avFg6eHLCRX7ckBmoIIcqa++upvJA==} + '@types/d3-array@3.2.1': resolution: {integrity: sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==} @@ -4394,11 +4378,14 @@ packages: '@types/express-serve-static-core@4.19.0': resolution: {integrity: sha512-bGyep3JqPCRry1wq+O5n7oiBgGWmeIJXPjXXCo8EK0u8duZGSYar7cGqd3ML2JUsLGeB7fmc06KYo9fLGWqPvQ==} + '@types/express-serve-static-core@5.0.6': + resolution: {integrity: sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA==} + '@types/express-session@1.18.1': resolution: {integrity: sha512-S6TkD/lljxDlQ2u/4A70luD8/ZxZcrU5pQwI1rVXCiaVIywoFgbA+PIUNDjPhQpPdK0dGleLtYc/y7XWBfclBg==} - '@types/express@4.17.21': - resolution: {integrity: sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ==} + '@types/express@4.17.22': + resolution: {integrity: sha512-eZUmSnhRX9YRSkplpz0N+k6NljUUn5l3EWZIKZvYzhvMphEuNiyyy1viH/ejgt66JWgALwC/gtSUAeQKtSwW/w==} '@types/formidable@3.4.5': resolution: {integrity: sha512-s7YPsNVfnsng5L8sKnG/Gbb2tiwwJTY1conOkJzTMRvJAlLFW1nEua+ADsJQu8N1c0oTHx9+d5nqg10WuT9gHQ==} @@ -4439,11 +4426,11 @@ packages: '@types/istanbul-reports@3.0.1': resolution: {integrity: sha512-c3mAZEuK0lvBp8tmuL74XRKn1+y2dcwOUpH7x4WrF6gk1GIgiluDRgMYQtw2OFcBvAJWlt6ASU3tSqxp0Uu0Aw==} - '@types/jest@29.5.13': - resolution: {integrity: sha512-wd+MVEZCHt23V0/L642O5APvspWply/rGY5BcW4SUETo2UzPU3Z26qr8jC2qxpimI2jjx9h7+2cj2FwIr01bXg==} + '@types/jest@29.5.14': + resolution: {integrity: sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==} - '@types/jquery@3.5.30': - resolution: {integrity: sha512-nbWKkkyb919DOUxjmRVk8vwtDb0/k8FKncmUKFi+NY+QXqWltooxTrswvz4LspQwxvLdvzBN1TImr6cw3aQx2A==} + '@types/jquery@3.5.32': + resolution: {integrity: sha512-b9Xbf4CkMqS02YH8zACqN1xzdxc3cO735Qe5AbSUFmyOiaWAbcpqh9Wna+Uk0vgACvoQHpWDg2rGdHkYPLmCiQ==} '@types/js-yaml@4.0.9': resolution: {integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg==} @@ -4451,8 +4438,9 @@ packages: '@types/json-schema@7.0.15': resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} - '@types/json-stable-stringify@1.0.36': - resolution: {integrity: sha512-b7bq23s4fgBB76n34m2b3RBf6M369B0Z9uRR8aHTMd8kZISRkmDEpPD8hhpYvDFzr3bJCPES96cm3Q6qRNDbQw==} + '@types/json-stable-stringify@1.2.0': + resolution: {integrity: sha512-PEHY3ohqolHqAzDyB1+31tFaAMnoLN7x/JgdcGmNZ2uvtEJ6rlFCUYNQc0Xe754xxCYLNGZbLUGydSE6tS4S9A==} + deprecated: This is a stub types definition. json-stable-stringify provides its own type definitions, so you do not need this installed. '@types/katex@0.16.7': resolution: {integrity: sha512-HMwFiRujE5PjrgwHQ25+bsLJgowjGjm5Z8FVSf0N6PwgJrwxH0QxzHYDcKsTfV3wva0vzrpqMTJS2jXPr5BMEQ==} @@ -4466,8 +4454,8 @@ packages: '@types/linkify-it@5.0.0': resolution: {integrity: sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==} - '@types/lodash@4.17.9': - resolution: {integrity: sha512-w9iWudx1XWOHW5lQRS9iKpK/XuRhnN+0T7HvdCCd802FYkT1AMTnxndJHGrNJwRoRHkslGr4S29tjm1cT7x/7w==} + '@types/lodash@4.17.17': + resolution: {integrity: sha512-RRVJ+J3J+WmyOTqnz3PiBLA501eKwXl2noseKOrNo/6+XEHjTAxO4xHvxQB6QuNm+s4WRbn6rSiap8+EA+ykFQ==} '@types/long@4.0.2': resolution: {integrity: sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==} @@ -4506,6 +4494,9 @@ packages: '@types/ms@0.7.31': resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} + '@types/ms@0.7.34': + resolution: {integrity: sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==} + '@types/node-cleanup@2.1.5': resolution: {integrity: sha512-+82RAk5uYiqiMoEv2fPeh03AL4pB5d3TL+Pf+hz31Mme6ECFI1kRlgmxYjdSlHzDbJ9yLorTnKi4Op5FA54kQQ==} @@ -4518,8 +4509,11 @@ packages: '@types/node-zendesk@2.0.15': resolution: {integrity: sha512-8Kk7ceoSUiBst5+jX/121QBD8f69F5j9CqvLA1Ka+24vo+B6sPINnqPwfBJAs4/9jBpCLh7h2SH9hUbABiuZXg==} - '@types/node@18.19.86': - resolution: {integrity: sha512-fifKayi175wLyKyc5qUfyENhQ1dCNI1UNjp653d8kuYcPQN5JhX3dGuP/XmvPTg/xRBn1VTLpbmi+H/Mr7tLfQ==} + '@types/node@18.19.103': + resolution: {integrity: sha512-hHTHp+sEz6SxFsp+SA+Tqrua3AbmlAw+Y//aEwdHrdZkYVRWdvWD3y5uPZ0flYOkgskaFWqZ/YGFm3FaFQ0pRw==} + + '@types/node@22.15.21': + resolution: {integrity: sha512-EV/37Td6c+MgKAbkcLG6vqZ2zEYHD7bvSrzqqs2RIhbA6w3x+Dqz8MZM3sP6kGTeLrdoOgKZe+Xja7tUB2DNkQ==} '@types/node@9.6.61': resolution: {integrity: sha512-/aKAdg5c8n468cYLy2eQrcR5k6chlbNwZNGUj3TboyPa2hcO2QAJcfymlqPzMiRj8B6nYKXjzQz36minFE0RwQ==} @@ -4551,8 +4545,8 @@ packages: '@types/pbf@3.0.5': resolution: {integrity: sha512-j3pOPiEcWZ34R6a6mN07mUkM4o4Lwf6hPNt8eilOeZhTFbxFXmKhvXl9Y28jotFPaI1bpPDJsbCprUoNke6OrA==} - '@types/pg@8.11.11': - resolution: {integrity: sha512-kGT1qKM8wJQ5qlawUrEkXgvMSXoV213KfMGXcwfDwUIfUHXqXYXOfS1nE1LINRJVVVx5wCm70XnFlMHaIcQAfw==} + '@types/pg@8.15.2': + resolution: {integrity: sha512-+BKxo5mM6+/A1soSHBI7ufUglqYXntChLDyTbvcAn1Lawi9J7J9Ok3jt6w7I0+T/UDJ4CyhHk66+GZbwmkYxSg==} '@types/pica@5.1.3': resolution: {integrity: sha512-13SEyETRE5psd9bE0AmN+0M1tannde2fwHfLVaVIljkbL9V0OfFvKwCicyeDvVYLkmjQWEydbAlsDsmjrdyTOg==} @@ -4566,14 +4560,17 @@ packages: '@types/prop-types@15.7.13': resolution: {integrity: sha512-hCZTSvwbzWGvhqxp/RqVqwU999pBf2vp7hzIjiYOsl8wqOmUxkQ6ddw1cV3l8811+kdUFus/q4d1Y3E3SyEifA==} + '@types/qs@6.14.0': + resolution: {integrity: sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==} + '@types/qs@6.9.17': resolution: {integrity: sha512-rX4/bPcfmvxHDv0XjfJELTTr+iB+tn032nPILqHm5wbthUUUuVtNGGqzhya9XUxjTP8Fpr0qYgSZZKxGY++svQ==} '@types/range-parser@1.2.7': resolution: {integrity: sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==} - '@types/react-dom@18.3.6': - resolution: {integrity: sha512-nf22//wEbKXusP6E9pfOCDwFdHAX4u172eaJI4YkDRQEZiorm6KfYnSC2SWLDMVWUOWPERmJnN0ujeAfTBLvrw==} + '@types/react-dom@18.3.7': + resolution: {integrity: sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==} peerDependencies: '@types/react': ^18.0.0 @@ -4583,6 +4580,9 @@ packages: '@types/react@18.3.10': resolution: {integrity: sha512-02sAAlBnP39JgXwkAq3PeU9DVaaGpZyF3MGcC0MKgQVkZor5IiiDAipVaxQHtDJAmO4GIy/rVBy/LzVj76Cyqg==} + '@types/react@18.3.22': + resolution: {integrity: sha512-vUhG0YmQZ7kL/tmKLrD3g5zXbXXreZXB3pmROW8bg3CnLnpjkRVwUlLne7Ufa2r9yJ8+/6B73RzhAek5TBKh2Q==} + '@types/request@2.48.12': resolution: {integrity: sha512-G3sY+NpsA9jnwm0ixhAFQSJ3Q9JkpLZpJbI3GMv0mIAT0y3mRabYeINzal5WOChIiaTEGQYlHOKgkaM9EisWHw==} @@ -4595,8 +4595,8 @@ packages: '@types/retry@0.12.2': resolution: {integrity: sha512-XISRgDJ2Tc5q4TRqvgJtzsRkFYNJzZrhTdtMoGVBttwzzQJkPnS3WWTFc7kuDRoPtPakl+T+OfdEUjYJj7Jbow==} - '@types/sanitize-html@2.15.0': - resolution: {integrity: sha512-71Z6PbYsVKfp4i6Jvr37s5ql6if1Q/iJQT80NbaSi7uGaG8CqBMXP0pk/EsURAOuGdk5IJCd/vnzKrR7S3Txsw==} + '@types/sanitize-html@2.16.0': + resolution: {integrity: sha512-l6rX1MUXje5ztPT0cAFtUayXF06DqPhRyfVXareEN5gGCFaP/iwsxIyKODr9XDhfxPpN6vXUFNfo5kZMXCxBtw==} '@types/seedrandom@3.0.8': resolution: {integrity: sha512-TY1eezMU2zH2ozQoAFAQFOPpvP15g+ZgSfTZt31AUUH/Rxtnz3H+A/Sv1Snw2/amp//omibc+AEkTaA8KUeOLQ==} @@ -4955,15 +4955,15 @@ packages: resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==} engines: {node: '>=12'} - antd-img-crop@4.24.0: - resolution: {integrity: sha512-RqY/XqvmUnHlj7oLV2kN/ytdZdHUFAZyM3TN+QlTlLrze1Q74isAePgG+QTkLAbrkR9L/IgVRpjsuH/nevpU7Q==} + antd-img-crop@4.25.0: + resolution: {integrity: sha512-2bFno50sfEnIl0ttGjOCC32z82yvz/wb4+UMfd9akKQrcNnbuadlfzL9f5n37Vuqb5l9Tdw4U4V+3sMY9EWW7g==} peerDependencies: antd: '>=4.0.0' react: '>=16.8.0' react-dom: '>=16.8.0' - antd@5.24.7: - resolution: {integrity: sha512-xROWsw0yYFGiNFpVSUZ9/Gs43q0qIM9BkfjugeqgePlZBpLZzLjtOpf4UGM+5aijelHqMi8864KZCX5BbcZYfA==} + antd@5.25.2: + resolution: {integrity: sha512-7R2nUvlHhey7Trx64+hCtGXOiy+DTUs1Lv5bwbV1LzEIZIhWb0at1AM6V3K108a5lyoR9n7DX3ptlLF7uYV/DQ==} peerDependencies: react: '>=16.9.0' react-dom: '>=16.9.0' @@ -5124,8 +5124,8 @@ packages: resolution: {integrity: sha512-19i4G7Hjxj9idgMlAM0BTRII8HfvsOdlr4D9cf3Dm1MZhvcKjBpzY8AMNEyIKyi+L9TIK15xZatmdcPG003yww==} engines: {node: '>=7.6.x'} - axios@1.8.4: - resolution: {integrity: sha512-eBSYY4Y68NNlHbHBMdeDmKNtDgXWhQsJcGqzO3iLUM0GraQFSS9cVgPX5I9b3lbdFKyYoAEGAZF1DwhTaljNAw==} + axios@1.9.0: + resolution: {integrity: sha512-re4CqKTJaURpzbLHtIi6XpDv20/CnpXOtjRY5/CU32L8gU8ek9UIivcfvSWvmKEngmVbrUtPpdDwWDWL7DNHvg==} b4a@1.6.7: resolution: {integrity: sha512-OnAYlL5b7LEkALw87fUVafQw5rVR9RjwGd4KUwNQ6DrrNmaVaUCgLipfVlzrPQ4tWOR9P0IXGNOx50jYCCdSJg==} @@ -5233,6 +5233,10 @@ packages: base64-js@1.5.1: resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + base64id@2.0.0: + resolution: {integrity: sha512-lGe34o6EHj9y3Kts9R4ZYs/Gr+6N7MCaMlIFA3F1R2O5/m7K06AxfSeO5530PEERE6/WyEg3lsuyw4GHlPZHog==} + engines: {node: ^4.5.0 || >= 5.9} + base64url@3.0.1: resolution: {integrity: sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A==} engines: {node: '>=6.0.0'} @@ -5251,8 +5255,8 @@ packages: bcryptjs@2.4.3: resolution: {integrity: sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ==} - better-sqlite3@11.9.1: - resolution: {integrity: sha512-Ba0KR+Fzxh2jDRhdg6TSH0SJGzb8C0aBY4hR8w8madIdIzzC6Y1+kx5qR6eS1Z+Gy20h6ZU28aeyg0z1VIrShQ==} + better-sqlite3@11.10.0: + resolution: {integrity: sha512-EwhOpyXiOEL/lKzHz9AW1msWFNzGc/z+LzeB3/jnFJpxu+th2yqvzsSWas1v9jgs9+xiXJcD5A8CJxAG2TaghQ==} big.js@3.2.0: resolution: {integrity: sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q==} @@ -5337,6 +5341,11 @@ packages: engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true + browserslist@4.24.5: + resolution: {integrity: sha512-FDToo4Wo82hIdgc1CQ+NQD0hEhmpPjrZ3hiUgwgOG6IuTdlpr8jdjyG24P6cNP1yJpTLzS5OcGgSw0xmDU1/Tw==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + bs-logger@0.2.6: resolution: {integrity: sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==} engines: {node: '>= 6'} @@ -5369,10 +5378,6 @@ packages: resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==} engines: {node: '>=10.16.0'} - bytes@3.0.0: - resolution: {integrity: sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==} - engines: {node: '>= 0.8'} - bytes@3.1.2: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} engines: {node: '>= 0.8'} @@ -5380,10 +5385,6 @@ packages: cache-manager@3.6.3: resolution: {integrity: sha512-dS4DnV6c6cQcVH5OxzIU1XZaACXwvVIiUPkFytnRmLOACuBGv3GQgRQ1RJGRRw4/9DF14ZK2RFlZu1TUgDniMg==} - call-bind-apply-helpers@1.0.1: - resolution: {integrity: sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==} - engines: {node: '>= 0.4'} - call-bind-apply-helpers@1.0.2: resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} engines: {node: '>= 0.4'} @@ -5422,6 +5423,9 @@ packages: caniuse-lite@1.0.30001713: resolution: {integrity: sha512-wCIWIg+A4Xr7NfhTuHdX+/FKh3+Op3LBbSp2N5Pfx6T/LhdQy3GTyoTg48BReaW/MyMNZAkTadsBtai3ldWK0Q==} + caniuse-lite@1.0.30001718: + resolution: {integrity: sha512-AflseV1ahcSunK53NfEs9gFWgOEmzr0f+kaMFA4xiLZlr9Hzt7HxcSpIFcnNCUkz6R6dWKa54rUz3HUmI3nVcw==} + canvas-fit@1.5.0: resolution: {integrity: sha512-onIcjRpz69/Hx5bB5HGbYKUF2uC6QT6Gp+pfpGm3A7mPfcluSLV5v4Zu+oflDUwLdUw0rLIBhUbi0v8hM4FJQQ==} @@ -5584,6 +5588,10 @@ packages: resolution: {integrity: sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==} engines: {node: '>=6'} + cluster-key-slot@1.1.2: + resolution: {integrity: sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==} + engines: {node: '>=0.10.0'} + co@4.6.0: resolution: {integrity: sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==} engines: {iojs: '>= 1.0.0', node: '>= 0.12.0'} @@ -5731,8 +5739,8 @@ packages: resolution: {integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==} engines: {node: '>= 0.6'} - compression@1.7.4: - resolution: {integrity: sha512-jaSIDzP9pZVS4ZfQ+TzvtiWhdpFhE2RDHz8QJkpX9SIpLq88VueF5jJw6t+6CUQcAoA6t+x89MLrWAqpfDE8iQ==} + compression@1.8.0: + resolution: {integrity: sha512-k6WLKfunuqCYD3t6AsuPGvQWaKwuLLh2/xHNcX4qE+vIfDNXpSqnrhwA7O53R7WVQUnt8dVAIW+YHr7xTgOgGA==} engines: {node: '>= 0.8.0'} compute-gcd@1.2.1: @@ -5816,8 +5824,8 @@ packages: resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==} engines: {node: '>= 0.6'} - cookie@1.0.0: - resolution: {integrity: sha512-bsSztFoaR8bw9MlFCrTHzc1wOKCUKOBsbgFdoDilZDkETAOOjKSqV7L+EQLbTaylwvZasd9vM4MGKotJaUfSpA==} + cookie@1.0.2: + resolution: {integrity: sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA==} engines: {node: '>=18'} cookies@0.8.0: @@ -5882,42 +5890,42 @@ packages: resolution: {integrity: sha512-VxBKmeNcqQdiUQUW2Tzq0t377b54N2bMtXO/qiLa+6eRRmmC4qT3D4OnTGoT/U6O9aklQ/jTwbOtRMTTY8G0Ig==} deprecated: This package is no longer supported. It's now a built-in Node module. If you've depended on crypto, you should switch to the one that's built-in. - cspell-config-lib@8.18.1: - resolution: {integrity: sha512-zdJ0uhLROSUrHoibysPw+AkxKPUmiG95hDtiL7s8smewkuaS1hpjqwsDBx981nHYs3xW3qDUfVATrAkSzb0VMw==} + cspell-config-lib@8.19.4: + resolution: {integrity: sha512-LtFNZEWVrnpjiTNgEDsVN05UqhhJ1iA0HnTv4jsascPehlaUYVoyucgNbFeRs6UMaClJnqR0qT9lnPX+KO1OLg==} engines: {node: '>=18'} - cspell-dictionary@8.18.1: - resolution: {integrity: sha512-vKHEPSfkMKMR4S4tk6K2vHC+f3kdJK8Kdh/C0jDh6RRDjDsyAPxshtbremxOgAX6X8GaRUCROoMZ7FhB92+Y9w==} + cspell-dictionary@8.19.4: + resolution: {integrity: sha512-lr8uIm7Wub8ToRXO9f6f7in429P1Egm3I+Ps3ZGfWpwLTCUBnHvJdNF/kQqF7PL0Lw6acXcjVWFYT7l2Wdst2g==} engines: {node: '>=18'} - cspell-gitignore@8.18.1: - resolution: {integrity: sha512-gp/AdUtW6FqpKY4YyYJ3kz0OsXApwsV1FOUA9Z0VnOYKVZtt2snh4uNlI4Ltq+wh7pDU8mqaPWmX6Xy+HSRDkQ==} + cspell-gitignore@8.19.4: + resolution: {integrity: sha512-KrViypPilNUHWZkMV0SM8P9EQVIyH8HvUqFscI7+cyzWnlglvzqDdV4N5f+Ax5mK+IqR6rTEX8JZbCwIWWV7og==} engines: {node: '>=18'} hasBin: true - cspell-glob@8.18.1: - resolution: {integrity: sha512-tlZXvzsN7dByHo69dz/HbJuQDUtrfhdioZ/LHaW7W9diG9NpaghgEfyX4fmsIXjU/2f66LDpYVY6osjtlOgyrg==} + cspell-glob@8.19.4: + resolution: {integrity: sha512-042uDU+RjAz882w+DXKuYxI2rrgVPfRQDYvIQvUrY1hexH4sHbne78+OMlFjjzOCEAgyjnm1ktWUCCmh08pQUw==} engines: {node: '>=18'} - cspell-grammar@8.18.1: - resolution: {integrity: sha512-V6XTN1B++7EzJA0H4g4XbNJtqm6Y3/iXdLeZ6sMRDaNFKXXwTbWRtn8gukDQIytyw09AnCUKeqGSzCVqw26Omg==} + cspell-grammar@8.19.4: + resolution: {integrity: sha512-lzWgZYTu/L7DNOHjxuKf8H7DCXvraHMKxtFObf8bAzgT+aBmey5fW2LviXUkZ2Lb2R0qQY+TJ5VIGoEjNf55ow==} engines: {node: '>=18'} hasBin: true - cspell-io@8.18.1: - resolution: {integrity: sha512-mm9SUEF2yShuTXDSjCbsAqYTEb6jrtgcCnlqIzpsZOJOOe+zj/VyzTy2NJvOrdvR59dikdaqB75VGBMfHi804g==} + cspell-io@8.19.4: + resolution: {integrity: sha512-W48egJqZ2saEhPWf5ftyighvm4mztxEOi45ILsKgFikXcWFs0H0/hLwqVFeDurgELSzprr12b6dXsr67dV8amg==} engines: {node: '>=18'} - cspell-lib@8.18.1: - resolution: {integrity: sha512-t1j+XB7515yHmrczK6I1N6j0a72vmL/6OxsMJnCucHC6DO0WkOqmHulNRH7LpFacnns0dx15lmrAqPg7gQFcIg==} + cspell-lib@8.19.4: + resolution: {integrity: sha512-NwfdCCYtIBNQuZcoMlMmL3HSv2olXNErMi/aOTI9BBAjvCHjhgX5hbHySMZ0NFNynnN+Mlbu5kooJ5asZeB3KA==} engines: {node: '>=18'} - cspell-trie-lib@8.18.1: - resolution: {integrity: sha512-UaB36wsyp2eWeMtrbS6Q2t2WFvpedmGXJ879yHn9qKD7ViyUpI4cAbh6v7gWMUu+gjqCulXtke64k1ddmBihPQ==} + cspell-trie-lib@8.19.4: + resolution: {integrity: sha512-yIPlmGSP3tT3j8Nmu+7CNpkPh/gBO2ovdnqNmZV+LNtQmVxqFd2fH7XvR1TKjQyctSH1ip0P5uIdJmzY1uhaYg==} engines: {node: '>=18'} - cspell@8.18.1: - resolution: {integrity: sha512-RE3LIgN9NAVcYBNX2NQVhLergok8EPymOuCUhu1vBR8cjRmioksn3CJeCoQgD8rPjalM+S9thYkMtOZc5Jjv2A==} + cspell@8.19.4: + resolution: {integrity: sha512-toaLrLj3usWY0Bvdi661zMmpKW2DVLAG3tcwkAv4JBTisdIRn15kN/qZDrhSieUEhVgJgZJDH4UKRiq29mIFxA==} engines: {node: '>=18'} hasBin: true @@ -6240,8 +6248,17 @@ packages: supports-color: optional: true - debug@4.4.0: - resolution: {integrity: sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==} + debug@4.3.7: + resolution: {integrity: sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + debug@4.4.1: + resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==} engines: {node: '>=6.0'} peerDependencies: supports-color: '*' @@ -6338,6 +6355,10 @@ packages: resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} engines: {node: '>=0.4.0'} + denque@2.1.0: + resolution: {integrity: sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==} + engines: {node: '>=0.10'} + depd@1.1.2: resolution: {integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==} engines: {node: '>= 0.6'} @@ -6530,6 +6551,9 @@ packages: electron-to-chromium@1.5.136: resolution: {integrity: sha512-kL4+wUTD7RSA5FHx5YwWtjDnEEkIIikFgWHR4P6fqjw1PPLlqYkxeOb++wAauAssat0YClCy8Y3C5SxgSkjibQ==} + electron-to-chromium@1.5.155: + resolution: {integrity: sha512-ps5KcGGmwL8VaeJlvlDlu4fORQpv3+GIcF5I3f9tUKUlJ/wsysh6HU8P5L1XWRYeXfA0oJd4PyM8ds8zTFf6Ng==} + element-size@1.1.1: resolution: {integrity: sha512-eaN+GMOq/Q+BIWy0ybsgpcYImjGIdNLyjLFJU4XsLHXYQao5jCNb36GyN6C2qwmDDYSfIBmKpPpr4VnBdLCsPQ==} @@ -6582,6 +6606,17 @@ packages: end-of-stream@1.4.4: resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==} + engine.io-client@6.6.3: + resolution: {integrity: sha512-T0iLjnyNWahNyv/lcjS2y4oE358tVS/SYQNxYXGAJ9/GLgH4VCvOQ/mhTjqU88mLZCQgiG8RIegFHYCdVC+j5w==} + + engine.io-parser@5.2.3: + resolution: {integrity: sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q==} + engines: {node: '>=10.0.0'} + + engine.io@6.6.4: + resolution: {integrity: sha512-ZCkIjSYNDyGn0R6ewHDtXgns/Zre/NT6Agvq1/WobF7JXgFff4SeDroKiCO3fNJreU9YG429Sc81o4w5ok/W5g==} + engines: {node: '>=10.2.0'} + enhanced-resolve@5.18.1: resolution: {integrity: sha512-ZSW3ma5GkcQBIpwZTSRAI8N71Uuwgs93IezB7mf7R60tC8ZbJideoDNKjHn2O9KIlx6rkGTTEk1xUCK2E1Y2Yg==} engines: {node: '>=10.13.0'} @@ -6637,8 +6672,8 @@ packages: resolution: {integrity: sha512-uDn+FE1yrDzyC0pCo961B2IHbdM8y/ACZsKD4dG6WqrjV53BADjwa7D+1aom2rsNVfLyDgU/eigvlJGJ08OQ4w==} engines: {node: '>= 0.4'} - es-module-lexer@1.6.0: - resolution: {integrity: sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==} + es-module-lexer@1.7.0: + resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} es-object-atoms@1.1.1: resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} @@ -6710,8 +6745,8 @@ packages: resolution: {integrity: sha512-IsUTtGxF1hrH6lMWiSl1WbGaiP01eT6kzywdY1U+zLc0MP+nwEnUiS9UI8IaOTUhTeQJLlCEWIbXINBH4YJbBQ==} engines: {node: '>=10'} - eslint-plugin-prettier@5.2.6: - resolution: {integrity: sha512-mUcf7QG2Tjk7H055Jk0lGBjbgDnfrvqjhXh9t2xLMSCjZVcw9Rb1V6sVNXO0th3jgeO7zllWPTNRil3JW94TnQ==} + eslint-plugin-prettier@5.4.0: + resolution: {integrity: sha512-BvQOvUhkVQM1i63iMETK9Hjud9QhqBnbtT1Zc642p9ynzBuCe5pybkOnvqZIBypXmMlsGcnU4HZ8sCTPfpAexA==} engines: {node: ^14.18.0 || >=16.0.0} peerDependencies: '@types/eslint': '>=8.0.0' @@ -6839,11 +6874,11 @@ packages: resolution: {integrity: sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - express-rate-limit@7.4.0: - resolution: {integrity: sha512-v1204w3cXu5gCDmAvgvzI6qjzZzoMWKnyVDk3ACgfswTQLYiGen+r8w0VnXnGMmzEN/g8fwIQ4JrFFd4ZP6ssg==} + express-rate-limit@7.5.0: + resolution: {integrity: sha512-eB5zbQh5h+VenMPM3fh+nw1YExi5nMr6HUCR62ELSP11huvxm/Uir1H1QEyTkk5QX6A58pX6NmaTMceKZ0Eodg==} engines: {node: '>= 16'} peerDependencies: - express: 4 || 5 || ^5.0.0-beta.1 + express: ^4.11 || 5 || ^5.0.0-beta.1 express-session@1.18.1: resolution: {integrity: sha512-a5mtTqEaZvBCL9A9aqkrtfz+3SMDhOVUnjafjo+s7A9Txkq+SVX2DLvSp1Zrv4uCXa3lMSK3viWnh9Gg07PBUA==} @@ -6930,8 +6965,8 @@ packages: fb-watchman@2.0.2: resolution: {integrity: sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==} - fdir@6.4.3: - resolution: {integrity: sha512-PMXmW2y1hDDfTSRc9gaXIuCCRpuoz3Kaz8cUelp3smouvfT632ozg2vrT6lJsHKKOF59YLbOGfAWGUcKEfRMQw==} + fdir@6.4.4: + resolution: {integrity: sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==} peerDependencies: picomatch: ^3 || ^4 peerDependenciesMeta: @@ -6960,8 +6995,8 @@ packages: resolution: {integrity: sha512-/pqPFG+FdxWQj+/WSuzXSDaNzxgTLr/OrR1QuqfEZzDakpdYE70PwUxL7BPUa8hpjbvY1+qvCl8k+8Tq34xJgg==} engines: {node: '>=18'} - file-type@20.4.1: - resolution: {integrity: sha512-hw9gNZXUfZ02Jo0uafWLaFVPter5/k2rfcrjFJJHX/77xtSDOfJuEFb6oKlFV86FLP1SuyHMW1PSk0U9M5tKkQ==} + file-type@20.5.0: + resolution: {integrity: sha512-BfHZtG/l9iMm4Ecianu7P8HRD2tBHLtjXinm4X62XBOYzi7CYA7jyqfJzOvXHqzVrVPYqBo2/GvbARMaaJkKVg==} engines: {node: '>=18'} file-uri-to-path@1.0.0: @@ -7179,10 +7214,6 @@ packages: resolution: {integrity: sha512-lMyPjQyl0cNNdDf2oR+IQ/fM3itDvpoHy45Ymo2r0L1EjazeSl13SfbKZs7KtZ/3MDCeueiaJiuOEfKqRTsSgA==} engines: {node: 10 || 12 || >=14} - get-stdin@9.0.0: - resolution: {integrity: sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA==} - engines: {node: '>=12'} - get-stream@3.0.0: resolution: {integrity: sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==} engines: {node: '>=4'} @@ -7604,9 +7635,9 @@ packages: http-parser-js@0.5.10: resolution: {integrity: sha512-Pysuw9XpUq5dVc/2SMHpuTY01RFl8fttgcyunjL7eEMhGM3cI4eOmiCycJDVCo/7O7ClfQD3SaI6ftDzqOXYMA==} - http-proxy-3@1.20.0: - resolution: {integrity: sha512-lGheif1UzSJ4j1MvvlWUA8u95tFnNzMMhtqOFIASZ9wD4UrLxp+F0QCR4BbE/ddrSpw+i+iJiO0oTH/9mEsriw==} - engines: {node: '>=20.0.0'} + http-proxy-3@1.20.5: + resolution: {integrity: sha512-7IfF7DIyZZmOVf7r8pl/H2BypTFvfgcCe36IwLlQV/725CoT2RXSqR6tXncsERDQCNlL5DWykfOIP1S8H+J4pA==} + engines: {node: '>=18'} http-proxy-agent@5.0.0: resolution: {integrity: sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==} @@ -7777,6 +7808,10 @@ packages: invariant@2.2.4: resolution: {integrity: sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==} + iovalkey@0.3.1: + resolution: {integrity: sha512-pSmFj/ZDFLP8AzqIAMpNJArhHYNeyqIwfcUULwZv8g6y3eaUGrlnlT7QXLrJAp0yiGCAqe1hPA33x/h5opNP9w==} + engines: {node: '>=18.12.0'} + ip-address@9.0.5: resolution: {integrity: sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==} engines: {node: '>= 12'} @@ -8404,8 +8439,8 @@ packages: json-stable-stringify-without-jsonify@1.0.1: resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} - json-stable-stringify@1.1.1: - resolution: {integrity: sha512-SU/971Kt5qVQfJpyDveVhQ/vya+5hvrjClFOcr8c0Fq5aODJjMwutrOfCU+eCnVD5gpx1Q3fEqkyom77zH1iIg==} + json-stable-stringify@1.3.0: + resolution: {integrity: sha512-qtYiSSFlwot9XHtF9bD9c7rwKjr+RecWT//ZnPvSmEjpV5mmPOCN4j8UjY5hbjNkOwZ/jQv3J6R1/pL7RwgMsg==} engines: {node: '>= 0.4'} json-stringify-pretty-compact@4.0.0: @@ -8523,8 +8558,8 @@ packages: langs@2.0.0: resolution: {integrity: sha512-v4pxOBEQVN1WBTfB1crhTtxzNLZU9HPWgadlwzWKISJtt6Ku/CnpBrwVy+jFv8StjxsPfwPFzO0CMwdZLJ0/BA==} - langsmith@0.3.20: - resolution: {integrity: sha512-zwVQos6tjcksCTfdM67QKq7yyED4GmQiZw/sJ6UCMYZxlvTMMg3PeQ9tOePXAWNWoJygOnH+EwGXr7gYOOETDg==} + langsmith@0.3.29: + resolution: {integrity: sha512-JPF2B339qpYy9FyuY4Yz1aWYtgPlFc/a+VTj3L/JcFLHCiMP7+Ig8I9jO+o1QwVa+JU3iugL1RS0wwc+Glw0zA==} peerDependencies: openai: '*' peerDependenciesMeta: @@ -8649,6 +8684,9 @@ packages: lodash.debounce@4.0.8: resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} + lodash.defaults@4.2.0: + resolution: {integrity: sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==} + lodash.get@4.4.2: resolution: {integrity: sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==} deprecated: This package is deprecated. Use the optional chaining (?.) operator instead. @@ -8656,6 +8694,9 @@ packages: lodash.includes@4.3.0: resolution: {integrity: sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==} + lodash.isarguments@3.1.0: + resolution: {integrity: sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==} + lodash.isboolean@3.0.3: resolution: {integrity: sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==} @@ -9005,6 +9046,11 @@ packages: nan@2.20.0: resolution: {integrity: sha512-bk3gXBZDGILuuo/6sKtr0DQmSThYHLtNCdSdXk9YkxD/jK6X2vmCyyXBBxyqZ4XcnzTyYEAThfX3DCEnLf6igw==} + nanoid@3.3.11: + resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + nanoid@3.3.8: resolution: {integrity: sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} @@ -9016,13 +9062,6 @@ packages: native-promise-only@0.8.1: resolution: {integrity: sha512-zkVhZUA3y8mbz652WrL5x0fB0ehrBkulWT3TomAQ9iDtyXZvzKeEA6GPxAItBYeNYl5yngKRX612qHOhvMkDeg==} - nats.ws@1.30.2: - resolution: {integrity: sha512-hIdw6XC8Mr8I5/0MvaHRhq+Elg2XBFi2TNV7ZI4MtUKHfGGnEgAdqUacqnseIztM2jatspgy12vhDtsqb8wdJA==} - - nats@2.29.3: - resolution: {integrity: sha512-tOQCRCwC74DgBTk4pWZ9V45sk4d7peoE2njVprMRCBXrhJ5q5cYM7i6W+Uvw2qUrcfOSnuisrX7bEx3b3Wx4QA==} - engines: {node: '>= 14.0.0'} - natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} @@ -9044,6 +9083,10 @@ packages: resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==} engines: {node: '>= 0.6'} + negotiator@0.6.4: + resolution: {integrity: sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==} + engines: {node: '>= 0.6'} + neo-async@2.6.2: resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} @@ -9089,10 +9132,6 @@ packages: nise@1.5.3: resolution: {integrity: sha512-Ymbac/94xeIrMf59REBPOv0thr+CJVFMhrlAkW/gjCIE58BGQdCj0x7KRCb3yz+Ga2Rz3E9XXSvUyyxqqhjQAQ==} - nkeys.js@1.1.0: - resolution: {integrity: sha512-tB/a0shZL5UZWSwsoeyqfTszONTt4k2YS0tuQioMOD180+MbombYVgzDUYHlx+gejYK6rgf08n/2Df99WY0Sxg==} - engines: {node: '>=10.0.0'} - no-case@3.0.4: resolution: {integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==} @@ -9153,8 +9192,8 @@ packages: node-jose@2.2.0: resolution: {integrity: sha512-XPCvJRr94SjLrSIm4pbYHKLEaOsDvJCpyFw/6V/KK/IXmyZ6SFBzAUDO9HQf4DB/nTEFcRGH87mNciOP23kFjw==} - node-mocks-http@1.16.2: - resolution: {integrity: sha512-2Sh6YItRp1oqewZNlck3LaFp5vbyW2u51HX2p1VLxQ9U/bG90XV8JY9O7Nk+HDd6OOn/oV3nA5Tx5k4Rki0qlg==} + node-mocks-http@1.17.2: + resolution: {integrity: sha512-HVxSnjNzE9NzoWMx9T9z4MLqwMpLwVvA0oVZ+L+gXskYXEJ6tFn3Kx4LargoB6ie7ZlCLplv7QbWO6N+MysWGA==} engines: {node: '>=14'} peerDependencies: '@types/express': ^4.17.21 || ^5.0.0 @@ -9177,8 +9216,8 @@ packages: resolution: {integrity: sha512-D36I9AhLxLG/LvD4GDd701fpevDG+QgQhpOJQnQa3CysSIiztfnttI5bAicqJ4PXNlT2UlCCdnCn8zj/ONkQfw==} engines: {node: '>= 18'} - nodemailer@6.10.0: - resolution: {integrity: sha512-SQ3wZCExjeSatLE/HBaXS5vqUOQk6GtBdIIKxiFdmm01mOQZX/POJkO3SUX1wDiYcwUOJwT23scFSC9fY2H8IA==} + nodemailer@6.10.1: + resolution: {integrity: sha512-Z+iLaBGVaSjbIzQ4pX6XV41HrooLsQ10ZWPUehGmuantvzWoDVBnmsdUcOIDM1t+yPor5pDhVlDESgOMEGxhHA==} engines: {node: '>=6.0.0'} normalize-package-data@2.5.0: @@ -9300,12 +9339,12 @@ packages: resolution: {integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==} engines: {node: '>=12'} - open@10.1.0: - resolution: {integrity: sha512-mnkeQ1qP5Ue2wd+aivTD3NHd/lZ96Lu0jgf0pwktLPtx6cTZiH7tyeGRRHs0zX0rbrahXPnXlUnbeXyaBBuIaw==} + open@10.1.2: + resolution: {integrity: sha512-cxN6aIDPz6rm8hbebcP7vrQNhvRcveZoJU72Y7vskh4oIm+BZwBECnx5nTmrlres1Qapvx27Qo1Auukpf8PKXw==} engines: {node: '>=18'} - openai@4.95.1: - resolution: {integrity: sha512-IqJy+ymeW+k/Wq+2YVN3693OQMMcODRtHEYOlz263MdUwnN/Dwdl9c2EXSxLLtGEHkSHAfvzpDMHI5MaWJKXjQ==} + openai@4.102.0: + resolution: {integrity: sha512-CWk15CMhPSHNZnjz+6rwVYV551xaC8CwOd7/zxImrC1btEo37dX/Ii5tBKWfqqxqyzpJ6p3Y4bICzzKhW03WhQ==} hasBin: true peerDependencies: ws: ^8.18.0 @@ -9615,11 +9654,11 @@ packages: performance-now@2.1.0: resolution: {integrity: sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==} - pg-cloudflare@1.1.1: - resolution: {integrity: sha512-xWPagP/4B6BgFO+EKz3JONXv3YDgvkbVrGw2mTo3D6tVDQRh1e7cqVGvyR3BE+eQgAvx1XhW/iEASj4/jCWl3Q==} + pg-cloudflare@1.2.5: + resolution: {integrity: sha512-OOX22Vt0vOSRrdoUPKJ8Wi2OpE/o/h9T8X1s4qSkCedbNah9ei2W2765be8iMVxQUsvgT7zIAT2eIa9fs5+vtg==} - pg-connection-string@2.7.0: - resolution: {integrity: sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA==} + pg-connection-string@2.9.0: + resolution: {integrity: sha512-P2DEBKuvh5RClafLngkAuGe9OUlFV7ebu8w1kmaaOgPcpJd1RIFh7otETfI6hAR8YupOLFTY7nuvvIn7PLciUQ==} pg-int8@1.0.1: resolution: {integrity: sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==} @@ -9629,11 +9668,14 @@ packages: resolution: {integrity: sha512-BM/Thnrw5jm2kKLE5uJkXqqExRUY/toLHda65XgFTBTFYZyopbKjBe29Ii3RbkvlsMoFwD+tHeGaCjjv0gHlyw==} engines: {node: '>=4'} - pg-pool@3.8.0: - resolution: {integrity: sha512-VBw3jiVm6ZOdLBTIcXLNdSotb6Iy3uOCwDGFAksZCXmi10nyRvnP2v3jl4d+IsLYRyXf6o9hIm/ZtUzlByNUdw==} + pg-pool@3.10.0: + resolution: {integrity: sha512-DzZ26On4sQ0KmqnO34muPcmKbhrjmyiO4lCCR0VwEd7MjmiKf5NTg/6+apUEu0NF7ESa37CGzFxH513CoUmWnA==} peerDependencies: pg: '>=8.0' + pg-protocol@1.10.0: + resolution: {integrity: sha512-IpdytjudNuLv8nhlHs/UrVBhU0e78J0oIS/0AVdTbWxSOkFUVdsHC/NrorO6nXsQNDTT1kzDSOMJubBQviX18Q==} + pg-protocol@1.8.0: resolution: {integrity: sha512-jvuYlEkL03NRvOoyoRktBK7+qU5kOvlAwvmrH8sr3wbLrOdVWsRxQfz8mMy9sZFsqJ1hEWNfdWKI4SAmoL+j7g==} @@ -9645,8 +9687,8 @@ packages: resolution: {integrity: sha512-cRL3JpS3lKMGsKaWndugWQoLOCoP+Cic8oseVcbr0qhPzYD5DWXK+RZ9LY9wxRf7RQia4SCwQlXk0q6FCPrVng==} engines: {node: '>=10'} - pg@8.14.1: - resolution: {integrity: sha512-0TdbqfjwIun9Fm/r89oB7RFQ0bLgduAhiIqIXOsyKoiC/L54DbuAAzIEN/9Op0f1Po9X7iCPXGoa/Ah+2aI8Xw==} + pg@8.16.0: + resolution: {integrity: sha512-7SKfdvP8CTNXjMUzfcVTaI+TDzBEeaUnVwiVGZQD1Hh33Kpev7liQba9uLd4CfN8r9mCVsD0JIpq03+Unpz+kg==} engines: {node: '>= 8.0.0'} peerDependencies: pg-native: '>=3.0.1' @@ -9751,9 +9793,9 @@ packages: port-get@1.0.4: resolution: {integrity: sha512-B8RcNfc8Ld+7C31DPaKIQz2aO9dqIs+4sUjhxJ2TSjEaidwyxu05WBbm08FJe+qkVvLiQqPbEAfNw1rB7JbjtA==} - portfinder@1.0.32: - resolution: {integrity: sha512-on2ZJVVDXRADWE6jnQaX0ioEylzgBpQk8r55NE4wjXW1ZxO+BgDlY6DXwj20i0V8eB4SenDQ00WEaxfiIQPcxg==} - engines: {node: '>= 0.12.0'} + portfinder@1.0.37: + resolution: {integrity: sha512-yuGIEjDAYnnOex9ddMnKZEMFE0CcGo6zbfzDklkmT1m5z734ss6JMzN9rNB3+RR7iS+F10D4/BVIaXOyh8PQKw==} + engines: {node: '>= 10.12'} possible-typed-array-names@1.1.0: resolution: {integrity: sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==} @@ -9868,8 +9910,8 @@ packages: engines: {node: '>=14'} hasBin: true - prettier@3.3.3: - resolution: {integrity: sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==} + prettier@3.5.3: + resolution: {integrity: sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==} engines: {node: '>=14'} hasBin: true @@ -10025,8 +10067,8 @@ packages: react: '>=16.9.0' react-dom: '>=16.9.0' - rc-cascader@3.33.1: - resolution: {integrity: sha512-Kyl4EJ7ZfCBuidmZVieegcbFw0RcU5bHHSbtEdmuLYd0fYHCAiYKZ6zon7fWAVyC6rWWOOib0XKdTSf7ElC9rg==} + rc-cascader@3.34.0: + resolution: {integrity: sha512-KpXypcvju9ptjW9FaN2NFcA2QH9E9LHKq169Y0eWtH4e/wHQ5Wh5qZakAgvb8EKZ736WZ3B0zLLOBsrsja5Dag==} peerDependencies: react: '>=16.9.0' react-dom: '>=16.9.0' @@ -10073,8 +10115,8 @@ packages: peerDependencies: prop-types: ^15.0 - rc-image@7.11.1: - resolution: {integrity: sha512-XuoWx4KUXg7hNy5mRTy1i8c8p3K8boWg6UajbHpDXS5AlRVucNfTi5YxTtPBTBzegxAZpvuLfh3emXFt6ybUdA==} + rc-image@7.12.0: + resolution: {integrity: sha512-cZ3HTyyckPnNnUb9/DRqduqzLfrQRyi+CdHjdqgsyDpI3Ln5UX1kXnAhPBSJj9pVRzwRFgqkN7p9b6HBDjmu/Q==} peerDependencies: react: '>=16.9.0' react-dom: '>=16.9.0' @@ -10109,8 +10151,8 @@ packages: react: '>=16.9.0' react-dom: '>=16.9.0' - rc-notification@5.6.3: - resolution: {integrity: sha512-42szwnn8VYQoT6GnjO00i1iwqV9D1TTMvxObWsuLwgl0TsOokzhkYiufdtQBsJMFjJravS1hfDKVMHLKLcPE4g==} + rc-notification@5.6.4: + resolution: {integrity: sha512-KcS4O6B4qzM3KH7lkwOB7ooLPZ4b6J+VMmQgT51VZCeEcmghdeR4IrMcFq0LG+RPdnbe/ArT086tGM8Snimgiw==} engines: {node: '>=8.x'} peerDependencies: react: '>=16.9.0' @@ -10173,8 +10215,8 @@ packages: react: '>=16.0.0' react-dom: '>=16.0.0' - rc-select@14.16.6: - resolution: {integrity: sha512-YPMtRPqfZWOm2XGTbx5/YVr1HT0vn//8QS77At0Gjb3Lv+Lbut0IORJPKLWu1hQ3u4GsA0SrDzs7nI8JG7Zmyg==} + rc-select@14.16.8: + resolution: {integrity: sha512-NOV5BZa1wZrsdkKaiK7LHRuo5ZjZYMDxPP6/1+09+FB4KoNi8jcG1ZqLE3AVCxEsYMBe65OBx71wFoHRTP3LRg==} engines: {node: '>=8.x'} peerDependencies: react: '*' @@ -10200,15 +10242,15 @@ packages: react: '>=16.9.0' react-dom: '>=16.9.0' - rc-table@7.50.4: - resolution: {integrity: sha512-Y+YuncnQqoS5e7yHvfvlv8BmCvwDYDX/2VixTBEhkMDk9itS9aBINp4nhzXFKiBP/frG4w0pS9d9Rgisl0T1Bw==} + rc-table@7.50.5: + resolution: {integrity: sha512-FDZu8aolhSYd3v9KOc3lZOVAU77wmRRu44R0Wfb8Oj1dXRUsloFaXMSl6f7yuWZUxArJTli7k8TEOX2mvhDl4A==} engines: {node: '>=8.x'} peerDependencies: react: '>=16.9.0' react-dom: '>=16.9.0' - rc-tabs@15.5.2: - resolution: {integrity: sha512-Hbqf2IV6k/jPgfMjPtIDmPV0D0C9c/fN4B/fYcoh9qqaUzUZQoK0PYzsV3UaV+3UsmyoYt48p74m/HkLhGTw+w==} + rc-tabs@15.6.1: + resolution: {integrity: sha512-/HzDV1VqOsUWyuC0c6AkxVYFjvx9+rFPKZ32ejxX0Uc7QCzcEjTA9/xMgv4HemPKwzBNX8KhGVbbumDjnj92aA==} engines: {node: '>=8.x'} peerDependencies: react: '>=16.9.0' @@ -10239,8 +10281,8 @@ packages: react: '*' react-dom: '*' - rc-upload@4.8.1: - resolution: {integrity: sha512-toEAhwl4hjLAI1u8/CgKWt30BR06ulPa4iGQSMvSXoHzO88gPCslxqV/mnn4gJU7PDoltGIC9Eh+wkeudqgHyw==} + rc-upload@4.9.0: + resolution: {integrity: sha512-pAzlPnyiFn1GCtEybEG2m9nXNzQyWXqWV2xFYCmDxjN9HzyjS5Pz2F+pbNdYw8mMJsixLEKLG0wVy9vOGxJMJA==} peerDependencies: react: '>=16.9.0' react-dom: '>=16.9.0' @@ -10261,6 +10303,13 @@ packages: react: '>=16.9.0' react-dom: '>=16.9.0' + rc-virtual-list@3.18.6: + resolution: {integrity: sha512-TQ5SsutL3McvWmmxqQtMIbfeoE3dGjJrRSfKekgby7WQMpPIFvv4ghytp5Z0s3D8Nik9i9YNOCqHBfk86AwgAA==} + engines: {node: '>=8.x'} + peerDependencies: + react: '>=16.9.0' + react-dom: '>=16.9.0' + rc@1.2.8: resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} hasBin: true @@ -10427,8 +10476,8 @@ packages: peerDependencies: react: ^16.0.0 || ^17.0.0 || ^18.0.0 - react-virtuoso@4.12.6: - resolution: {integrity: sha512-bfvS6aCL1ehXmq39KRiz/vxznGUbtA27I5I24TYCe1DhMf84O3aVNCIwrSjYQjkJGJGzY46ihdN8WkYlemuhMQ==} + react-virtuoso@4.12.7: + resolution: {integrity: sha512-njJp764he6Fi1p89PUW0k2kbyWu9w/y+MwdxmwK2kvdwwzVDbz2c2wMj5xdSruBFVgFTsI7Z85hxZR7aSHBrbQ==} peerDependencies: react: '>=16 || >=17 || >= 18 || >= 19' react-dom: '>=16 || >=17 || >= 18 || >=19' @@ -10480,6 +10529,14 @@ packages: resolution: {integrity: sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==} engines: {node: '>=8'} + redis-errors@1.2.0: + resolution: {integrity: sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==} + engines: {node: '>=4'} + + redis-parser@3.0.0: + resolution: {integrity: sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==} + engines: {node: '>=4'} + redux@4.2.1: resolution: {integrity: sha512-LAUYz4lc+Do8/g7aeRa8JkyDErK6ekstQaqWQrNRW//MY1TvCEpMtpTWvlQ+FPbWCx+Xixu/6SHt5N0HR+SB4w==} @@ -10689,8 +10746,8 @@ packages: resolution: {integrity: sha512-1HwIYD/8UlOtFS3QO3w7ey+SdSDFE4HRNLZoZRYVQefrOY3l17epswImeB1ijgJFQJodIaHcwkp3r/myBjFVbg==} deprecated: This package has been deprecated in favour of @sinonjs/samsam - sanitize-html@2.15.0: - resolution: {integrity: sha512-wIjst57vJGpLyBP8ioUbg6ThwJie5SuSIjHxJg53v5Fg+kUK+AXlb7bK3RNXpp315MvwM+0OBGCV6h5pPHsVhA==} + sanitize-html@2.17.0: + resolution: {integrity: sha512-dLAADUSS8rBwhaevT12yCezvioCA+bmUTPH/u57xKPT8d++voeYE6HeluA/bPbQ15TwDBG2ii+QZIEmYx8VdxA==} sass-loader@16.0.5: resolution: {integrity: sha512-oL+CMBXrj6BZ/zOq4os+UECPL+bWqt6OAC6DWS8Ln8GZRcMDjlJ4JC3FBDuHJdYaFWIdKNIBYmtZtK2MaMkNIw==} @@ -10713,8 +10770,8 @@ packages: webpack: optional: true - sass@1.86.3: - resolution: {integrity: sha512-iGtg8kus4GrsGLRDLRBRHY9dNVA78ZaS7xr01cWnS7PEMQyFtTqBiyCrfpTYTZXRWM94akzckYjh8oADfFNTzw==} + sass@1.89.0: + resolution: {integrity: sha512-ld+kQU8YTdGNjOLfRWBzewJpU5cwEv/h5yyqlSeJcj6Yh8U4TDA9UA5FPicqDz/xgRPWRSYIQNiFks21TbA9KQ==} engines: {node: '>=14.0.0'} hasBin: true @@ -10732,6 +10789,10 @@ packages: resolution: {integrity: sha512-Gf9qqc58SpCA/xdziiHz35F4GNIWYWZrEshUc/G/r5BnLph6xpKuLeoJoQuj5WfBIx/eQLf+hmVPYHaxJu7V2g==} engines: {node: '>= 10.13.0'} + schema-utils@4.3.2: + resolution: {integrity: sha512-Gn/JaSk/Mt9gYubxTtSn/QCV4em9mpAPiR1rqy/Ocu19u/G9J5WWdNoUT4SiV6mFC3y6cxyFcFwdzPM3FgxGAQ==} + engines: {node: '>= 10.13.0'} + script-loader@0.7.2: resolution: {integrity: sha512-UMNLEvgOAQuzK8ji8qIscM3GIrRCWN6MmMXGD4SD5l6cSycgGsCo0tX5xRnfQcoghqct0tjHjcykgI1PyBE2aA==} @@ -10766,7 +10827,12 @@ packages: engines: {node: '>=10'} hasBin: true - send@0.19.0: + semver@7.7.2: + resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==} + engines: {node: '>=10'} + hasBin: true + + send@0.19.0: resolution: {integrity: sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==} engines: {node: '>= 0.8.0'} @@ -10926,6 +10992,21 @@ packages: resolution: {integrity: sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==} engines: {node: '>= 6.0.0', npm: '>= 3.0.0'} + socket.io-adapter@2.5.5: + resolution: {integrity: sha512-eLDQas5dzPgOWCk9GuuJC2lBqItuhKI4uxGgo9aIV7MYbk2h9Q6uULEh8WBzThoI7l+qU9Ast9fVUmkqPP9wYg==} + + socket.io-client@4.8.1: + resolution: {integrity: sha512-hJVXfu3E28NmzGk8o1sHhN3om52tRvwYeidbj7xKy2eIIse5IoKX3USlS6Tqt3BHAtflLIkCQBkzVrEEfWUyYQ==} + engines: {node: '>=10.0.0'} + + socket.io-parser@4.2.4: + resolution: {integrity: sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew==} + engines: {node: '>=10.0.0'} + + socket.io@4.8.1: + resolution: {integrity: sha512-oZ7iUCxph8WYRHHcjBEc9unw3adt5CmSNlppj/5Q4k2RIrhl8Z5yY2Xr4j9zj0+wzVZ0bxmYoGSzKJnRl6A4yg==} + engines: {node: '>=10.2.0'} + sockjs@0.3.24: resolution: {integrity: sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==} @@ -11006,6 +11087,9 @@ packages: stackframe@1.3.4: resolution: {integrity: sha512-oeVtt7eWQS+Na6F//S4kJ2K2VbRlS9D43mAlMyVpVWovy9o+jfgH8O9agzANzaiLjclA0oYzUXEM4PurhSUChw==} + standard-as-callback@2.1.0: + resolution: {integrity: sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==} + static-eval@2.1.1: resolution: {integrity: sha512-MgWpQ/ZjGieSVB3eOJVs4OA2LT/q1vx98KPCTTQPzq/aLr0YUXTsgryTXr4SLfR0ZfUUCiedM9n/ABeDIyy4mA==} @@ -11237,6 +11321,10 @@ packages: resolution: {integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==} engines: {node: '>=6'} + tapable@2.2.2: + resolution: {integrity: sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg==} + engines: {node: '>=6'} + tar-fs@2.1.2: resolution: {integrity: sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==} @@ -11291,6 +11379,11 @@ packages: engines: {node: '>=10'} hasBin: true + terser@5.39.2: + resolution: {integrity: sha512-yEPUmWve+VA78bI71BW70Dh0TuV4HHd+I5SHOAfS1+QBOmvmCiiffgjR8ryyEd3KIfvPGFqoADt8LdQ6XpXIvg==} + engines: {node: '>=10'} + hasBin: true + test-exclude@6.0.0: resolution: {integrity: sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==} engines: {node: '>=8'} @@ -11353,8 +11446,8 @@ packages: tinyexec@0.3.2: resolution: {integrity: sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==} - tinyglobby@0.2.12: - resolution: {integrity: sha512-qkf4trmKSIiMTs/E63cxH+ojC2unam7rJ0WrauAzpT3ECNTxGRMlaXxVbfxMUC/w0LaYk6jQ4y/nGR9uBO3tww==} + tinyglobby@0.2.13: + resolution: {integrity: sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==} engines: {node: '>=12.0.0'} tinyqueue@2.0.3: @@ -11440,8 +11533,8 @@ packages: resolution: {integrity: sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==} engines: {node: '>=6.10'} - ts-jest@29.2.5: - resolution: {integrity: sha512-KD8zB2aAZrcKIdGk4OwpJggeLcH1FgrICqDSROWqlnJXGCXK4Mn6FcdK2B6670Xr73lHMG1kHw8R87A0ecZ+vA==} + ts-jest@29.3.4: + resolution: {integrity: sha512-Iqbrm8IXOmV+ggWHOTEbjwyCf2xZlUMv5npExksXohL+tk8va4Fjhb+X2+Rt9NBmgO7bJ8WpnMLOwih/DnMlFA==} engines: {node: ^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0} hasBin: true peerDependencies: @@ -11485,9 +11578,6 @@ packages: tunnel-agent@0.6.0: resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==} - tweetnacl@1.0.3: - resolution: {integrity: sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==} - type-check@0.4.0: resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} engines: {node: '>= 0.8.0'} @@ -11532,6 +11622,10 @@ packages: resolution: {integrity: sha512-rfgpoi08xagF3JSdtJlCwMq9DGNDE0IMh3Mkpc1wUypg9vPi786AiqeBBKcqvIkq42azsBM85N490fyZjeUftw==} engines: {node: '>=16'} + type-fest@4.41.0: + resolution: {integrity: sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==} + engines: {node: '>=16'} + type-is@1.6.18: resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==} engines: {node: '>= 0.6'} @@ -11561,8 +11655,8 @@ packages: typedarray@0.0.6: resolution: {integrity: sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==} - typescript@5.8.2: - resolution: {integrity: sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==} + typescript@5.8.3: + resolution: {integrity: sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==} engines: {node: '>=14.17'} hasBin: true @@ -11605,6 +11699,9 @@ packages: undici-types@5.26.5: resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + unicorn-magic@0.1.0: resolution: {integrity: sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==} engines: {node: '>=18'} @@ -11896,8 +11993,8 @@ packages: warning@4.0.3: resolution: {integrity: sha512-rpJyN222KWIvHJ/F53XSZv0Zl/accqHR8et1kpaMTD/fLCRxtV8iX8czMzY7sVZupTI3zcUTg8eycS2kNF9l6w==} - watchpack@2.4.2: - resolution: {integrity: sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw==} + watchpack@2.4.4: + resolution: {integrity: sha512-c5EGNOiyxxV5qmTtAB7rbiXxi1ooX1pQKMLX/MIabJjRA0SJBQOjKF+KSVfHkr9U1cADPon0mRiVe/riyaiDUA==} engines: {node: '>=10.13.0'} wbuf@1.7.3: @@ -12064,8 +12161,20 @@ packages: utf-8-validate: optional: true - ws@8.18.1: - resolution: {integrity: sha512-RKW2aJZMXeMxVpnZ6bck+RswznaxmzdULiBr6KY7XkTnW8uvt0iT9H5DkHUChXrc+uurzwa0rVI16n/Xzjdz1w==} + ws@8.17.1: + resolution: {integrity: sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + ws@8.18.2: + resolution: {integrity: sha512-DMricUmwGZUVr++AEAe2uiVM7UoO9MAVZMDu05UQOaUII0lp+zOzLLU4Xqh/JvTqklB1T4uELaaPBKyjE1r4fQ==} engines: {node: '>=10.0.0'} peerDependencies: bufferutil: ^4.0.1 @@ -12103,6 +12212,10 @@ packages: resolution: {integrity: sha512-yMqGBqtXyeN1e3TGYvgNgDVZ3j84W4cwkOXQswghol6APgZWaff9lnbvN7MHYJOiXsvGPXtjTYJEiC9J2wv9Eg==} engines: {node: '>=8.0'} + xmlhttprequest-ssl@2.1.2: + resolution: {integrity: sha512-TEU+nJVUUnA4CYJFLvK5X9AOeH4KvDvhIfm0vV1GaQRtchnG0hgK5p8hw/xjv8cunWYCsiPCSDzObPyhEwq3KQ==} + engines: {node: '>=0.4.0'} + xpath@0.0.32: resolution: {integrity: sha512-rxMJhSIoiO8vXcWvSifKqhvV96GjiD5wYb8/QHdoRyQvraTpp4IEv944nhGausZZ3u7dhQXteZuZbaqfpB7uYw==} engines: {node: '>=0.6.0'} @@ -12166,9 +12279,9 @@ packages: yallist@4.0.0: resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} - yaml@2.7.1: - resolution: {integrity: sha512-10ULxpnOCQXxJvBgxsn9ptjq6uviG/htZKk9veJGhlqn3w/DxQ631zFF+nlQXLwmImeS5amR2dl2U8sg6U9jsQ==} - engines: {node: '>= 14'} + yaml@2.8.0: + resolution: {integrity: sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==} + engines: {node: '>= 14.6'} hasBin: true yargs-parser@18.1.3: @@ -12227,6 +12340,13 @@ packages: zod@3.24.2: resolution: {integrity: sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ==} + zod@3.25.17: + resolution: {integrity: sha512-8hQzQ/kMOIFbwOgPrm9Sf9rtFHpFUMy4HvN0yEB0spw14aYi0uT5xG5CE2DB9cd51GWNsz+DNO7se1kztHMKnw==} + + zstd-napi@0.0.10: + resolution: {integrity: sha512-pwnG+auSiIrD2BNSIpPEUtcRSK33cfYmKo3sJPTohFiPqPci9F4SIRPR7gGeI45Maj4nFoyyxzT2YDxVXIIgzQ==} + engines: {node: ^12.22.0 || ^14.17.0 || ^15.12.0 || >=16} + zwitch@2.0.4: resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} @@ -12249,10 +12369,10 @@ snapshots: dependencies: '@ant-design/fast-color': 3.0.0 - '@ant-design/compatible@5.1.4(antd@5.24.7(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(prop-types@15.8.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@ant-design/compatible@5.1.4(antd@5.25.2(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(prop-types@15.8.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: '@ctrl/tinycolor': 3.6.1 - antd: 5.24.7(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + antd: 5.25.2(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) classnames: 2.5.1 dayjs: 1.11.13 lodash.camelcase: 4.3.0 @@ -12330,15 +12450,15 @@ snapshots: '@antfu/utils@8.1.1': {} - '@anthropic-ai/sdk@0.37.0(encoding@0.1.13)': + '@anthropic-ai/sdk@0.39.0(encoding@0.1.13)': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/node-fetch': 2.6.12 abort-controller: 3.0.0 agentkeepalive: 4.6.0 form-data-encoder: 1.7.2 formdata-node: 4.4.1 - node-fetch: 2.6.7(encoding@0.1.13) + node-fetch: 2.7.0(encoding@0.1.13) transitivePeerDependencies: - encoding @@ -12397,6 +12517,12 @@ snapshots: js-tokens: 4.0.0 picocolors: 1.1.1 + '@babel/code-frame@7.27.1': + dependencies: + '@babel/helper-validator-identifier': 7.27.1 + js-tokens: 4.0.0 + picocolors: 1.1.1 + '@babel/compat-data@7.26.8': {} '@babel/core@7.26.9': @@ -12412,7 +12538,7 @@ snapshots: '@babel/traverse': 7.27.0 '@babel/types': 7.27.0 convert-source-map: 2.0.0 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 @@ -12434,35 +12560,43 @@ snapshots: '@jridgewell/trace-mapping': 0.3.25 jsesc: 3.1.0 - '@babel/helper-annotate-as-pure@7.25.9': + '@babel/generator@7.27.1': dependencies: - '@babel/types': 7.27.0 + '@babel/parser': 7.27.2 + '@babel/types': 7.27.1 + '@jridgewell/gen-mapping': 0.3.8 + '@jridgewell/trace-mapping': 0.3.25 + jsesc: 3.1.0 + + '@babel/helper-annotate-as-pure@7.27.1': + dependencies: + '@babel/types': 7.27.1 '@babel/helper-compilation-targets@7.27.0': dependencies: '@babel/compat-data': 7.26.8 - '@babel/helper-validator-option': 7.25.9 + '@babel/helper-validator-option': 7.27.1 browserslist: 4.24.4 lru-cache: 5.1.1 semver: 6.3.1 - '@babel/helper-create-class-features-plugin@7.27.0(@babel/core@7.26.9)': + '@babel/helper-create-class-features-plugin@7.27.1(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-annotate-as-pure': 7.25.9 - '@babel/helper-member-expression-to-functions': 7.25.9 - '@babel/helper-optimise-call-expression': 7.25.9 - '@babel/helper-replace-supers': 7.26.5(@babel/core@7.26.9) - '@babel/helper-skip-transparent-expression-wrappers': 7.25.9 - '@babel/traverse': 7.27.0 + '@babel/helper-annotate-as-pure': 7.27.1 + '@babel/helper-member-expression-to-functions': 7.27.1 + '@babel/helper-optimise-call-expression': 7.27.1 + '@babel/helper-replace-supers': 7.27.1(@babel/core@7.26.9) + '@babel/helper-skip-transparent-expression-wrappers': 7.27.1 + '@babel/traverse': 7.27.1 semver: 6.3.1 transitivePeerDependencies: - supports-color - '@babel/helper-member-expression-to-functions@7.25.9': + '@babel/helper-member-expression-to-functions@7.27.1': dependencies: - '@babel/traverse': 7.27.0 - '@babel/types': 7.27.0 + '@babel/traverse': 7.27.1 + '@babel/types': 7.27.1 transitivePeerDependencies: - supports-color @@ -12473,6 +12607,13 @@ snapshots: transitivePeerDependencies: - supports-color + '@babel/helper-module-imports@7.27.1': + dependencies: + '@babel/traverse': 7.27.1 + '@babel/types': 7.27.1 + transitivePeerDependencies: + - supports-color + '@babel/helper-module-transforms@7.26.0(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 @@ -12482,27 +12623,34 @@ snapshots: transitivePeerDependencies: - supports-color - '@babel/helper-optimise-call-expression@7.25.9': + '@babel/helper-module-transforms@7.27.1(@babel/core@7.26.9)': dependencies: - '@babel/types': 7.27.0 + '@babel/core': 7.26.9 + '@babel/helper-module-imports': 7.27.1 + '@babel/helper-validator-identifier': 7.27.1 + '@babel/traverse': 7.27.1 + transitivePeerDependencies: + - supports-color - '@babel/helper-plugin-utils@7.24.8': {} + '@babel/helper-optimise-call-expression@7.27.1': + dependencies: + '@babel/types': 7.27.1 - '@babel/helper-plugin-utils@7.26.5': {} + '@babel/helper-plugin-utils@7.27.1': {} - '@babel/helper-replace-supers@7.26.5(@babel/core@7.26.9)': + '@babel/helper-replace-supers@7.27.1(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-member-expression-to-functions': 7.25.9 - '@babel/helper-optimise-call-expression': 7.25.9 - '@babel/traverse': 7.27.0 + '@babel/helper-member-expression-to-functions': 7.27.1 + '@babel/helper-optimise-call-expression': 7.27.1 + '@babel/traverse': 7.27.1 transitivePeerDependencies: - supports-color - '@babel/helper-skip-transparent-expression-wrappers@7.25.9': + '@babel/helper-skip-transparent-expression-wrappers@7.27.1': dependencies: - '@babel/traverse': 7.27.0 - '@babel/types': 7.27.0 + '@babel/traverse': 7.27.1 + '@babel/types': 7.27.1 transitivePeerDependencies: - supports-color @@ -12510,13 +12658,17 @@ snapshots: '@babel/helper-string-parser@7.25.9': {} + '@babel/helper-string-parser@7.27.1': {} + '@babel/helper-validator-identifier@7.24.7': {} '@babel/helper-validator-identifier@7.25.7': {} '@babel/helper-validator-identifier@7.25.9': {} - '@babel/helper-validator-option@7.25.9': {} + '@babel/helper-validator-identifier@7.27.1': {} + + '@babel/helper-validator-option@7.27.1': {} '@babel/helpers@7.27.0': dependencies: @@ -12532,7 +12684,7 @@ snapshots: '@babel/highlight@7.25.7': dependencies: - '@babel/helper-validator-identifier': 7.25.9 + '@babel/helper-validator-identifier': 7.27.1 chalk: 2.4.2 js-tokens: 4.0.0 picocolors: 1.1.1 @@ -12541,113 +12693,112 @@ snapshots: dependencies: '@babel/types': 7.27.0 + '@babel/parser@7.27.2': + dependencies: + '@babel/types': 7.27.1 + '@babel/plugin-syntax-async-generators@7.8.4(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 '@babel/plugin-syntax-bigint@7.8.3(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 '@babel/plugin-syntax-class-properties@7.12.13(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 '@babel/plugin-syntax-import-meta@7.10.4(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 '@babel/plugin-syntax-json-strings@7.8.3(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-jsx@7.24.7(@babel/core@7.26.9)': + '@babel/plugin-syntax-jsx@7.27.1(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 - - '@babel/plugin-syntax-jsx@7.25.9(@babel/core@7.26.9)': - dependencies: - '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.26.5 + '@babel/helper-plugin-utils': 7.27.1 '@babel/plugin-syntax-logical-assignment-operators@7.10.4(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 '@babel/plugin-syntax-nullish-coalescing-operator@7.8.3(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 '@babel/plugin-syntax-numeric-separator@7.10.4(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 '@babel/plugin-syntax-object-rest-spread@7.8.3(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 '@babel/plugin-syntax-optional-catch-binding@7.8.3(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 '@babel/plugin-syntax-optional-chaining@7.8.3(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 '@babel/plugin-syntax-top-level-await@7.14.5(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 '@babel/plugin-syntax-typescript@7.25.4(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-syntax-typescript@7.25.9(@babel/core@7.26.9)': + '@babel/plugin-syntax-typescript@7.27.1(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.26.5 + '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-transform-modules-commonjs@7.26.3(@babel/core@7.26.9)': + '@babel/plugin-transform-modules-commonjs@7.27.1(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-module-transforms': 7.26.0(@babel/core@7.26.9) - '@babel/helper-plugin-utils': 7.26.5 + '@babel/helper-module-transforms': 7.27.1(@babel/core@7.26.9) + '@babel/helper-plugin-utils': 7.27.1 transitivePeerDependencies: - supports-color - '@babel/plugin-transform-typescript@7.27.0(@babel/core@7.26.9)': + '@babel/plugin-transform-typescript@7.27.1(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-annotate-as-pure': 7.25.9 - '@babel/helper-create-class-features-plugin': 7.27.0(@babel/core@7.26.9) - '@babel/helper-plugin-utils': 7.26.5 - '@babel/helper-skip-transparent-expression-wrappers': 7.25.9 - '@babel/plugin-syntax-typescript': 7.25.9(@babel/core@7.26.9) + '@babel/helper-annotate-as-pure': 7.27.1 + '@babel/helper-create-class-features-plugin': 7.27.1(@babel/core@7.26.9) + '@babel/helper-plugin-utils': 7.27.1 + '@babel/helper-skip-transparent-expression-wrappers': 7.27.1 + '@babel/plugin-syntax-typescript': 7.27.1(@babel/core@7.26.9) transitivePeerDependencies: - supports-color - '@babel/preset-typescript@7.27.0(@babel/core@7.26.9)': + '@babel/preset-typescript@7.27.1(@babel/core@7.26.9)': dependencies: '@babel/core': 7.26.9 - '@babel/helper-plugin-utils': 7.26.5 - '@babel/helper-validator-option': 7.25.9 - '@babel/plugin-syntax-jsx': 7.25.9(@babel/core@7.26.9) - '@babel/plugin-transform-modules-commonjs': 7.26.3(@babel/core@7.26.9) - '@babel/plugin-transform-typescript': 7.27.0(@babel/core@7.26.9) + '@babel/helper-plugin-utils': 7.27.1 + '@babel/helper-validator-option': 7.27.1 + '@babel/plugin-syntax-jsx': 7.27.1(@babel/core@7.26.9) + '@babel/plugin-transform-modules-commonjs': 7.27.1(@babel/core@7.26.9) + '@babel/plugin-transform-typescript': 7.27.1(@babel/core@7.26.9) transitivePeerDependencies: - supports-color @@ -12659,12 +12810,20 @@ snapshots: dependencies: regenerator-runtime: 0.14.1 + '@babel/runtime@7.27.1': {} + '@babel/template@7.27.0': dependencies: '@babel/code-frame': 7.26.2 '@babel/parser': 7.27.0 '@babel/types': 7.27.0 + '@babel/template@7.27.2': + dependencies: + '@babel/code-frame': 7.27.1 + '@babel/parser': 7.27.2 + '@babel/types': 7.27.1 + '@babel/traverse@7.27.0': dependencies: '@babel/code-frame': 7.26.2 @@ -12672,7 +12831,19 @@ snapshots: '@babel/parser': 7.27.0 '@babel/template': 7.27.0 '@babel/types': 7.27.0 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + + '@babel/traverse@7.27.1': + dependencies: + '@babel/code-frame': 7.27.1 + '@babel/generator': 7.27.1 + '@babel/parser': 7.27.2 + '@babel/template': 7.27.2 + '@babel/types': 7.27.1 + debug: 4.4.1(supports-color@8.1.1) globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -12688,6 +12859,11 @@ snapshots: '@babel/helper-string-parser': 7.25.9 '@babel/helper-validator-identifier': 7.25.9 + '@babel/types@7.27.1': + dependencies: + '@babel/helper-string-parser': 7.27.1 + '@babel/helper-validator-identifier': 7.27.1 + '@bcoe/v8-coverage@0.2.3': {} '@braintree/sanitize-url@7.1.1': {} @@ -12720,7 +12896,7 @@ snapshots: awaiting: 3.0.0 cheerio: 1.0.0-rc.12 csv-parse: 5.6.0 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) transitivePeerDependencies: - supports-color @@ -12734,20 +12910,20 @@ snapshots: '@cocalc/primus-responder@1.0.5': dependencies: - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) node-uuid: 1.4.8 transitivePeerDependencies: - supports-color '@cocalc/widgets@1.2.0': {} - '@cspell/cspell-bundled-dicts@8.18.1': + '@cspell/cspell-bundled-dicts@8.19.4': dependencies: '@cspell/dict-ada': 4.1.0 '@cspell/dict-al': 1.1.0 '@cspell/dict-aws': 4.0.10 '@cspell/dict-bash': 4.2.0 - '@cspell/dict-companies': 3.1.14 + '@cspell/dict-companies': 3.2.1 '@cspell/dict-cpp': 6.0.8 '@cspell/dict-cryptocurrencies': 5.0.4 '@cspell/dict-csharp': 4.0.6 @@ -12755,20 +12931,20 @@ snapshots: '@cspell/dict-dart': 2.3.0 '@cspell/dict-data-science': 2.0.8 '@cspell/dict-django': 4.1.4 - '@cspell/dict-docker': 1.1.13 + '@cspell/dict-docker': 1.1.14 '@cspell/dict-dotnet': 5.0.9 '@cspell/dict-elixir': 4.0.7 - '@cspell/dict-en-common-misspellings': 2.0.10 + '@cspell/dict-en-common-misspellings': 2.0.11 '@cspell/dict-en-gb': 1.1.33 - '@cspell/dict-en_us': 4.4.0 - '@cspell/dict-filetypes': 3.0.11 + '@cspell/dict-en_us': 4.4.9 + '@cspell/dict-filetypes': 3.0.12 '@cspell/dict-flutter': 1.1.0 '@cspell/dict-fonts': 4.0.4 '@cspell/dict-fsharp': 1.1.0 '@cspell/dict-fullstack': 3.2.6 '@cspell/dict-gaming-terms': 1.1.1 - '@cspell/dict-git': 3.0.4 - '@cspell/dict-golang': 6.0.20 + '@cspell/dict-git': 3.0.5 + '@cspell/dict-golang': 6.0.21 '@cspell/dict-google': 1.0.8 '@cspell/dict-haskell': 4.0.5 '@cspell/dict-html': 4.0.11 @@ -12784,17 +12960,17 @@ snapshots: '@cspell/dict-markdown': 2.0.10(@cspell/dict-css@4.0.17)(@cspell/dict-html-symbol-entities@4.0.3)(@cspell/dict-html@4.0.11)(@cspell/dict-typescript@3.2.1) '@cspell/dict-monkeyc': 1.0.10 '@cspell/dict-node': 5.0.7 - '@cspell/dict-npm': 5.1.34 + '@cspell/dict-npm': 5.2.4 '@cspell/dict-php': 4.0.14 '@cspell/dict-powershell': 5.0.14 '@cspell/dict-public-licenses': 2.0.13 - '@cspell/dict-python': 4.2.17 + '@cspell/dict-python': 4.2.18 '@cspell/dict-r': 2.1.0 '@cspell/dict-ruby': 5.0.8 '@cspell/dict-rust': 4.0.11 '@cspell/dict-scala': 5.0.7 '@cspell/dict-shell': 1.1.0 - '@cspell/dict-software-terms': 5.0.5 + '@cspell/dict-software-terms': 5.0.10 '@cspell/dict-sql': 2.2.0 '@cspell/dict-svelte': 1.0.6 '@cspell/dict-swift': 2.0.5 @@ -12802,19 +12978,19 @@ snapshots: '@cspell/dict-typescript': 3.2.1 '@cspell/dict-vue': 3.0.4 - '@cspell/cspell-json-reporter@8.18.1': + '@cspell/cspell-json-reporter@8.19.4': dependencies: - '@cspell/cspell-types': 8.18.1 + '@cspell/cspell-types': 8.19.4 - '@cspell/cspell-pipe@8.18.1': {} + '@cspell/cspell-pipe@8.19.4': {} - '@cspell/cspell-resolver@8.18.1': + '@cspell/cspell-resolver@8.19.4': dependencies: global-directory: 4.0.1 - '@cspell/cspell-service-bus@8.18.1': {} + '@cspell/cspell-service-bus@8.19.4': {} - '@cspell/cspell-types@8.18.1': {} + '@cspell/cspell-types@8.19.4': {} '@cspell/dict-ada@4.1.0': {} @@ -12826,7 +13002,7 @@ snapshots: dependencies: '@cspell/dict-shell': 1.1.0 - '@cspell/dict-companies@3.1.14': {} + '@cspell/dict-companies@3.2.1': {} '@cspell/dict-cpp@6.0.8': {} @@ -12842,19 +13018,19 @@ snapshots: '@cspell/dict-django@4.1.4': {} - '@cspell/dict-docker@1.1.13': {} + '@cspell/dict-docker@1.1.14': {} '@cspell/dict-dotnet@5.0.9': {} '@cspell/dict-elixir@4.0.7': {} - '@cspell/dict-en-common-misspellings@2.0.10': {} + '@cspell/dict-en-common-misspellings@2.0.11': {} '@cspell/dict-en-gb@1.1.33': {} - '@cspell/dict-en_us@4.4.0': {} + '@cspell/dict-en_us@4.4.9': {} - '@cspell/dict-filetypes@3.0.11': {} + '@cspell/dict-filetypes@3.0.12': {} '@cspell/dict-flutter@1.1.0': {} @@ -12866,9 +13042,9 @@ snapshots: '@cspell/dict-gaming-terms@1.1.1': {} - '@cspell/dict-git@3.0.4': {} + '@cspell/dict-git@3.0.5': {} - '@cspell/dict-golang@6.0.20': {} + '@cspell/dict-golang@6.0.21': {} '@cspell/dict-google@1.0.8': {} @@ -12905,7 +13081,7 @@ snapshots: '@cspell/dict-node@5.0.7': {} - '@cspell/dict-npm@5.1.34': {} + '@cspell/dict-npm@5.2.4': {} '@cspell/dict-php@4.0.14': {} @@ -12913,7 +13089,7 @@ snapshots: '@cspell/dict-public-licenses@2.0.13': {} - '@cspell/dict-python@4.2.17': + '@cspell/dict-python@4.2.18': dependencies: '@cspell/dict-data-science': 2.0.8 @@ -12927,7 +13103,7 @@ snapshots: '@cspell/dict-shell@1.1.0': {} - '@cspell/dict-software-terms@5.0.5': {} + '@cspell/dict-software-terms@5.0.10': {} '@cspell/dict-sql@2.2.0': {} @@ -12941,16 +13117,16 @@ snapshots: '@cspell/dict-vue@3.0.4': {} - '@cspell/dynamic-import@8.18.1': + '@cspell/dynamic-import@8.19.4': dependencies: - '@cspell/url': 8.18.1 + '@cspell/url': 8.19.4 import-meta-resolve: 4.1.0 - '@cspell/filetypes@8.18.1': {} + '@cspell/filetypes@8.19.4': {} - '@cspell/strong-weak-map@8.18.1': {} + '@cspell/strong-weak-map@8.19.4': {} - '@cspell/url@8.18.1': {} + '@cspell/url@8.19.4': {} '@ctrl/tinycolor@3.6.1': {} @@ -13002,7 +13178,7 @@ snapshots: '@eslint/eslintrc@2.1.4': dependencies: ajv: 6.12.6 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) espree: 9.6.1 globals: 13.24.0 ignore: 5.3.1 @@ -13015,7 +13191,7 @@ snapshots: '@eslint/js@8.57.1': {} - '@formatjs/cli@6.6.3': {} + '@formatjs/cli@6.7.1': {} '@formatjs/ecma402-abstract@2.3.4': dependencies: @@ -13043,7 +13219,7 @@ snapshots: dependencies: tslib: 2.8.1 - '@formatjs/intl@3.1.6(typescript@5.8.2)': + '@formatjs/intl@3.1.6(typescript@5.8.3)': dependencies: '@formatjs/ecma402-abstract': 2.3.4 '@formatjs/fast-memoize': 2.2.7 @@ -13051,15 +13227,15 @@ snapshots: intl-messageformat: 10.7.16 tslib: 2.8.1 optionalDependencies: - typescript: 5.8.2 + typescript: 5.8.3 - '@google-ai/generativelanguage@3.1.0': + '@google-ai/generativelanguage@3.2.0': dependencies: google-gax: 5.0.1-rc.0 transitivePeerDependencies: - supports-color - '@google-cloud/bigquery@7.9.3(encoding@0.1.13)': + '@google-cloud/bigquery@7.9.4(encoding@0.1.13)': dependencies: '@google-cloud/common': 5.0.2(encoding@0.1.13) '@google-cloud/paginator': 5.0.2 @@ -13163,7 +13339,7 @@ snapshots: '@humanwhocodes/config-array@0.13.0': dependencies: '@humanwhocodes/object-schema': 2.0.3 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) minimatch: 3.1.2 transitivePeerDependencies: - supports-color @@ -13179,7 +13355,7 @@ snapshots: '@antfu/install-pkg': 1.0.0 '@antfu/utils': 8.1.1 '@iconify/types': 2.0.0 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) globals: 15.15.0 kolorist: 1.8.0 local-pkg: 1.1.1 @@ -13191,6 +13367,8 @@ snapshots: dependencies: react: 18.3.1 + '@iovalkey/commands@0.1.0': {} + '@isaacs/cliui@8.0.2': dependencies: string-width: 5.1.2 @@ -13215,7 +13393,7 @@ snapshots: '@jest/console@29.7.0': dependencies: '@jest/types': 29.6.3 - '@types/node': 18.19.86 + '@types/node': 22.15.21 chalk: 4.1.2 jest-message-util: 29.7.0 jest-util: 29.7.0 @@ -13228,14 +13406,14 @@ snapshots: '@jest/test-result': 29.7.0 '@jest/transform': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 18.19.86 + '@types/node': 22.15.21 ansi-escapes: 4.3.2 chalk: 4.1.2 ci-info: 3.9.0 exit: 0.1.2 graceful-fs: 4.2.11 jest-changed-files: 29.7.0 - jest-config: 29.7.0(@types/node@18.19.86) + jest-config: 29.7.0(@types/node@22.15.21) jest-haste-map: 29.7.0 jest-message-util: 29.7.0 jest-regex-util: 29.6.3 @@ -13260,7 +13438,7 @@ snapshots: dependencies: '@jest/fake-timers': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 18.19.86 + '@types/node': 22.15.21 jest-mock: 29.7.0 '@jest/expect-utils@29.7.0': @@ -13278,7 +13456,7 @@ snapshots: dependencies: '@jest/types': 29.6.3 '@sinonjs/fake-timers': 10.3.0 - '@types/node': 18.19.86 + '@types/node': 22.15.21 jest-message-util: 29.7.0 jest-mock: 29.7.0 jest-util: 29.7.0 @@ -13300,7 +13478,7 @@ snapshots: '@jest/transform': 29.7.0 '@jest/types': 29.6.3 '@jridgewell/trace-mapping': 0.3.25 - '@types/node': 18.19.86 + '@types/node': 22.15.21 chalk: 4.1.2 collect-v8-coverage: 1.0.2 exit: 0.1.2 @@ -13369,7 +13547,7 @@ snapshots: dependencies: '@types/istanbul-lib-coverage': 2.0.6 '@types/istanbul-reports': 3.0.1 - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/yargs': 15.0.19 chalk: 4.1.2 @@ -13378,7 +13556,7 @@ snapshots: '@jest/schemas': 29.6.3 '@types/istanbul-lib-coverage': 2.0.6 '@types/istanbul-reports': 3.0.1 - '@types/node': 18.19.86 + '@types/node': 22.15.21 '@types/yargs': 17.0.24 chalk: 4.1.2 @@ -13437,7 +13615,7 @@ snapshots: '@lumino/messaging': 2.0.3 '@lumino/widgets': 1.37.2(crypto@1.0.1) '@types/backbone': 1.4.23 - '@types/lodash': 4.17.9 + '@types/lodash': 4.17.17 backbone: 1.2.3 base64-js: 1.5.1 jquery: 3.7.1 @@ -13455,7 +13633,7 @@ snapshots: '@lumino/messaging': 1.10.3 '@lumino/widgets': 1.37.2(crypto@1.0.1) '@types/backbone': 1.4.14 - '@types/lodash': 4.17.9 + '@types/lodash': 4.17.17 backbone: 1.4.0 jquery: 3.7.1 lodash: 4.17.21 @@ -13526,7 +13704,7 @@ snapshots: '@lumino/polling': 2.1.4 '@lumino/properties': 2.0.3 '@lumino/signaling': 2.1.4 - ws: 8.18.1 + ws: 8.18.2 transitivePeerDependencies: - bufferutil - react @@ -13553,24 +13731,24 @@ snapshots: '@lumino/properties': 2.0.3 '@lumino/signaling': 2.1.4 - '@langchain/anthropic@0.3.18(@langchain/core@0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)))(encoding@0.1.13)': + '@langchain/anthropic@0.3.20(@langchain/core@0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)))(encoding@0.1.13)': dependencies: - '@anthropic-ai/sdk': 0.37.0(encoding@0.1.13) - '@langchain/core': 0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)) + '@anthropic-ai/sdk': 0.39.0(encoding@0.1.13) + '@langchain/core': 0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)) fast-xml-parser: 4.5.3 zod: 3.24.2 zod-to-json-schema: 3.24.5(zod@3.24.2) transitivePeerDependencies: - encoding - '@langchain/core@0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2))': + '@langchain/core@0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2))': dependencies: '@cfworker/json-schema': 4.1.1 ansi-styles: 5.2.0 camelcase: 6.3.0 decamelize: 1.2.0 js-tiktoken: 1.0.19 - langsmith: 0.3.20(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)) + langsmith: 0.3.29(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)) mustache: 4.2.0 p-queue: 6.6.2 p-retry: 4.6.2 @@ -13580,36 +13758,36 @@ snapshots: transitivePeerDependencies: - openai - '@langchain/google-genai@0.2.4(@langchain/core@0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)))(zod@3.24.2)': + '@langchain/google-genai@0.2.9(@langchain/core@0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)))(zod@3.24.2)': dependencies: '@google/generative-ai': 0.24.0 - '@langchain/core': 0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)) + '@langchain/core': 0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)) uuid: 11.1.0 zod-to-json-schema: 3.24.5(zod@3.24.2) transitivePeerDependencies: - zod - '@langchain/mistralai@0.2.0(@langchain/core@0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)))': + '@langchain/mistralai@0.2.0(@langchain/core@0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)))': dependencies: - '@langchain/core': 0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)) + '@langchain/core': 0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)) '@mistralai/mistralai': 1.5.2(zod@3.24.2) uuid: 10.0.0 zod: 3.24.2 zod-to-json-schema: 3.24.5(zod@3.24.2) - '@langchain/ollama@0.2.0(@langchain/core@0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)))': + '@langchain/ollama@0.2.0(@langchain/core@0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)))': dependencies: - '@langchain/core': 0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)) + '@langchain/core': 0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)) ollama: 0.5.15 uuid: 10.0.0 zod: 3.24.2 zod-to-json-schema: 3.24.5(zod@3.24.2) - '@langchain/openai@0.5.6(@langchain/core@0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)))(encoding@0.1.13)(ws@8.18.1)': + '@langchain/openai@0.5.10(@langchain/core@0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)))(encoding@0.1.13)(ws@8.18.2)': dependencies: - '@langchain/core': 0.3.46(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)) + '@langchain/core': 0.3.56(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)) js-tiktoken: 1.0.19 - openai: 4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2) + openai: 4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2) zod: 3.24.2 zod-to-json-schema: 3.24.5(zod@3.24.2) transitivePeerDependencies: @@ -13810,13 +13988,13 @@ snapshots: dependencies: langium: 3.3.1 - '@microlink/react-json-view@1.26.1(@types/react@18.3.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@microlink/react-json-view@1.26.2(@types/react@18.3.22)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: react: 18.3.1 react-base16-styling: 0.9.1 react-dom: 18.3.1(react@18.3.1) react-lifecycles-compat: 3.0.4 - react-textarea-autosize: 8.5.9(@types/react@18.3.10)(react@18.3.1) + react-textarea-autosize: 8.5.9(@types/react@18.3.22)(react@18.3.1) transitivePeerDependencies: - '@types/react' @@ -13825,30 +14003,34 @@ snapshots: zod: 3.24.2 zod-to-json-schema: 3.24.5(zod@3.24.2) - '@module-federation/error-codes@0.11.2': {} + '@module-federation/error-codes@0.13.1': {} - '@module-federation/runtime-core@0.11.2': + '@module-federation/runtime-core@0.13.1': dependencies: - '@module-federation/error-codes': 0.11.2 - '@module-federation/sdk': 0.11.2 + '@module-federation/error-codes': 0.13.1 + '@module-federation/sdk': 0.13.1 - '@module-federation/runtime-tools@0.11.2': + '@module-federation/runtime-tools@0.13.1': dependencies: - '@module-federation/runtime': 0.11.2 - '@module-federation/webpack-bundler-runtime': 0.11.2 + '@module-federation/runtime': 0.13.1 + '@module-federation/webpack-bundler-runtime': 0.13.1 - '@module-federation/runtime@0.11.2': + '@module-federation/runtime@0.13.1': dependencies: - '@module-federation/error-codes': 0.11.2 - '@module-federation/runtime-core': 0.11.2 - '@module-federation/sdk': 0.11.2 + '@module-federation/error-codes': 0.13.1 + '@module-federation/runtime-core': 0.13.1 + '@module-federation/sdk': 0.13.1 - '@module-federation/sdk@0.11.2': {} + '@module-federation/sdk@0.13.1': {} - '@module-federation/webpack-bundler-runtime@0.11.2': + '@module-federation/webpack-bundler-runtime@0.13.1': dependencies: - '@module-federation/runtime': 0.11.2 - '@module-federation/sdk': 0.11.2 + '@module-federation/runtime': 0.13.1 + '@module-federation/sdk': 0.13.1 + + '@msgpack/msgpack@2.8.0': {} + + '@msgpack/msgpack@3.1.1': {} '@napi-rs/canvas-android-arm64@0.1.69': optional: true @@ -13896,47 +14078,15 @@ snapshots: '@napi-rs/triples@1.2.0': {} - '@nats-io/jetstream@3.0.0': + '@nestjs/axios@4.0.0(@nestjs/common@11.1.1(reflect-metadata@0.2.2)(rxjs@7.8.2))(axios@1.9.0)(rxjs@7.8.2)': dependencies: - '@nats-io/nats-core': 3.0.0 - - '@nats-io/jwt@0.0.10-5': - dependencies: - '@nats-io/nkeys': 1.2.0-8 - - '@nats-io/kv@3.0.0': - dependencies: - '@nats-io/jetstream': 3.0.0 - '@nats-io/nats-core': 3.0.0 - - '@nats-io/nats-core@3.0.0': - dependencies: - '@nats-io/nkeys': 2.0.3 - '@nats-io/nuid': 2.0.3 - - '@nats-io/nkeys@1.2.0-8': - dependencies: - tweetnacl: 1.0.3 - - '@nats-io/nkeys@2.0.3': - dependencies: - tweetnacl: 1.0.3 - - '@nats-io/nuid@2.0.3': {} - - '@nats-io/services@3.0.0': - dependencies: - '@nats-io/nats-core': 3.0.0 - - '@nestjs/axios@4.0.0(@nestjs/common@11.0.20(reflect-metadata@0.2.2)(rxjs@7.8.2))(axios@1.8.4)(rxjs@7.8.2)': - dependencies: - '@nestjs/common': 11.0.20(reflect-metadata@0.2.2)(rxjs@7.8.2) - axios: 1.8.4 + '@nestjs/common': 11.1.1(reflect-metadata@0.2.2)(rxjs@7.8.2) + axios: 1.9.0 rxjs: 7.8.2 - '@nestjs/common@11.0.20(reflect-metadata@0.2.2)(rxjs@7.8.2)': + '@nestjs/common@11.1.1(reflect-metadata@0.2.2)(rxjs@7.8.2)': dependencies: - file-type: 20.4.1 + file-type: 20.5.0 iterare: 1.2.1 load-esm: 1.0.2 reflect-metadata: 0.2.2 @@ -13946,9 +14096,9 @@ snapshots: transitivePeerDependencies: - supports-color - '@nestjs/core@11.0.20(@nestjs/common@11.0.20(reflect-metadata@0.2.2)(rxjs@7.8.2))(reflect-metadata@0.2.2)(rxjs@7.8.2)': + '@nestjs/core@11.1.1(@nestjs/common@11.1.1(reflect-metadata@0.2.2)(rxjs@7.8.2))(reflect-metadata@0.2.2)(rxjs@7.8.2)': dependencies: - '@nestjs/common': 11.0.20(reflect-metadata@0.2.2)(rxjs@7.8.2) + '@nestjs/common': 11.1.1(reflect-metadata@0.2.2)(rxjs@7.8.2) '@nuxt/opencollective': 0.4.1 fast-safe-stringify: 2.1.1 iterare: 1.2.1 @@ -14001,7 +14151,7 @@ snapshots: '@types/xml2js': 0.4.14 '@xmldom/is-dom-node': 1.0.1 '@xmldom/xmldom': 0.8.10 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) xml-crypto: 6.1.0 xml-encryption: 3.1.0 xml2js: 0.6.2 @@ -14013,7 +14163,7 @@ snapshots: '@node-saml/passport-saml@5.0.1': dependencies: '@node-saml/node-saml': 5.0.1 - '@types/express': 4.17.21 + '@types/express': 4.17.22 '@types/passport': 1.0.17 '@types/passport-strategy': 0.2.38 passport: 0.7.0 @@ -14082,13 +14232,13 @@ snapshots: '@oozcitak/util@8.3.8': {} - '@openapitools/openapi-generator-cli@2.19.1(encoding@0.1.13)': + '@openapitools/openapi-generator-cli@2.20.2(encoding@0.1.13)': dependencies: - '@nestjs/axios': 4.0.0(@nestjs/common@11.0.20(reflect-metadata@0.2.2)(rxjs@7.8.2))(axios@1.8.4)(rxjs@7.8.2) - '@nestjs/common': 11.0.20(reflect-metadata@0.2.2)(rxjs@7.8.2) - '@nestjs/core': 11.0.20(@nestjs/common@11.0.20(reflect-metadata@0.2.2)(rxjs@7.8.2))(reflect-metadata@0.2.2)(rxjs@7.8.2) + '@nestjs/axios': 4.0.0(@nestjs/common@11.1.1(reflect-metadata@0.2.2)(rxjs@7.8.2))(axios@1.9.0)(rxjs@7.8.2) + '@nestjs/common': 11.1.1(reflect-metadata@0.2.2)(rxjs@7.8.2) + '@nestjs/core': 11.1.1(@nestjs/common@11.1.1(reflect-metadata@0.2.2)(rxjs@7.8.2))(reflect-metadata@0.2.2)(rxjs@7.8.2) '@nuxtjs/opencollective': 0.3.2(encoding@0.1.13) - axios: 1.8.4 + axios: 1.9.0 chalk: 4.1.2 commander: 8.3.0 compare-versions: 4.1.4 @@ -14403,50 +14553,50 @@ snapshots: react: 18.3.1 react-is: 18.3.1 - '@rspack/binding-darwin-arm64@1.3.4': + '@rspack/binding-darwin-arm64@1.3.11': optional: true - '@rspack/binding-darwin-x64@1.3.4': + '@rspack/binding-darwin-x64@1.3.11': optional: true - '@rspack/binding-linux-arm64-gnu@1.3.4': + '@rspack/binding-linux-arm64-gnu@1.3.11': optional: true - '@rspack/binding-linux-arm64-musl@1.3.4': + '@rspack/binding-linux-arm64-musl@1.3.11': optional: true - '@rspack/binding-linux-x64-gnu@1.3.4': + '@rspack/binding-linux-x64-gnu@1.3.11': optional: true - '@rspack/binding-linux-x64-musl@1.3.4': + '@rspack/binding-linux-x64-musl@1.3.11': optional: true - '@rspack/binding-win32-arm64-msvc@1.3.4': + '@rspack/binding-win32-arm64-msvc@1.3.11': optional: true - '@rspack/binding-win32-ia32-msvc@1.3.4': + '@rspack/binding-win32-ia32-msvc@1.3.11': optional: true - '@rspack/binding-win32-x64-msvc@1.3.4': + '@rspack/binding-win32-x64-msvc@1.3.11': optional: true - '@rspack/binding@1.3.4': + '@rspack/binding@1.3.11': optionalDependencies: - '@rspack/binding-darwin-arm64': 1.3.4 - '@rspack/binding-darwin-x64': 1.3.4 - '@rspack/binding-linux-arm64-gnu': 1.3.4 - '@rspack/binding-linux-arm64-musl': 1.3.4 - '@rspack/binding-linux-x64-gnu': 1.3.4 - '@rspack/binding-linux-x64-musl': 1.3.4 - '@rspack/binding-win32-arm64-msvc': 1.3.4 - '@rspack/binding-win32-ia32-msvc': 1.3.4 - '@rspack/binding-win32-x64-msvc': 1.3.4 - - '@rspack/cli@1.3.4(@rspack/core@1.3.4(@swc/helpers@0.5.5))(@types/express@4.17.21)(webpack@5.99.5)': + '@rspack/binding-darwin-arm64': 1.3.11 + '@rspack/binding-darwin-x64': 1.3.11 + '@rspack/binding-linux-arm64-gnu': 1.3.11 + '@rspack/binding-linux-arm64-musl': 1.3.11 + '@rspack/binding-linux-x64-gnu': 1.3.11 + '@rspack/binding-linux-x64-musl': 1.3.11 + '@rspack/binding-win32-arm64-msvc': 1.3.11 + '@rspack/binding-win32-ia32-msvc': 1.3.11 + '@rspack/binding-win32-x64-msvc': 1.3.11 + + '@rspack/cli@1.3.11(@rspack/core@1.3.11(@swc/helpers@0.5.5))(@types/express@4.17.22)(webpack@5.99.5)': dependencies: '@discoveryjs/json-ext': 0.5.7 - '@rspack/core': 1.3.4(@swc/helpers@0.5.5) - '@rspack/dev-server': 1.1.1(@rspack/core@1.3.4(@swc/helpers@0.5.5))(@types/express@4.17.21)(webpack@5.99.5) + '@rspack/core': 1.3.11(@swc/helpers@0.5.5) + '@rspack/dev-server': 1.1.2(@rspack/core@1.3.11(@swc/helpers@0.5.5))(@types/express@4.17.22)(webpack@5.99.5) colorette: 2.0.20 exit-hook: 4.0.0 interpret: 3.1.1 @@ -14462,26 +14612,23 @@ snapshots: - webpack - webpack-cli - '@rspack/core@1.3.4(@swc/helpers@0.5.5)': + '@rspack/core@1.3.11(@swc/helpers@0.5.5)': dependencies: - '@module-federation/runtime-tools': 0.11.2 - '@rspack/binding': 1.3.4 + '@module-federation/runtime-tools': 0.13.1 + '@rspack/binding': 1.3.11 '@rspack/lite-tapable': 1.0.1 - caniuse-lite: 1.0.30001713 + caniuse-lite: 1.0.30001718 optionalDependencies: '@swc/helpers': 0.5.5 - '@rspack/dev-server@1.1.1(@rspack/core@1.3.4(@swc/helpers@0.5.5))(@types/express@4.17.21)(webpack@5.99.5)': + '@rspack/dev-server@1.1.2(@rspack/core@1.3.11(@swc/helpers@0.5.5))(@types/express@4.17.22)(webpack@5.99.5)': dependencies: - '@rspack/core': 1.3.4(@swc/helpers@0.5.5) + '@rspack/core': 1.3.11(@swc/helpers@0.5.5) chokidar: 3.6.0 - express: 4.21.2 - http-proxy-middleware: 2.0.9(@types/express@4.17.21) - mime-types: 2.1.35 + http-proxy-middleware: 2.0.9(@types/express@4.17.22) p-retry: 6.2.1 - webpack-dev-middleware: 7.4.2(webpack@5.99.5) webpack-dev-server: 5.2.0(webpack@5.99.5) - ws: 8.18.1 + ws: 8.18.2 transitivePeerDependencies: - '@types/express' - bufferutil @@ -14493,7 +14640,7 @@ snapshots: '@rspack/lite-tapable@1.0.1': {} - '@rspack/plugin-react-refresh@1.2.0(react-refresh@0.14.2)(webpack-hot-middleware@2.26.1)': + '@rspack/plugin-react-refresh@1.4.3(react-refresh@0.14.2)(webpack-hot-middleware@2.26.1)': dependencies: error-stack-parser: 2.1.4 html-entities: 2.6.0 @@ -14504,7 +14651,7 @@ snapshots: '@sendgrid/client@8.1.5': dependencies: '@sendgrid/helpers': 8.0.0 - axios: 1.8.4 + axios: 1.9.0 transitivePeerDependencies: - debug @@ -14552,9 +14699,19 @@ snapshots: '@sinonjs/text-encoding@0.7.3': {} + '@socket.io/component-emitter@3.1.2': {} + + '@socket.io/redis-streams-adapter@0.2.2(socket.io-adapter@2.5.5)': + dependencies: + '@msgpack/msgpack': 2.8.0 + debug: 4.3.7(supports-color@9.4.0) + socket.io-adapter: 2.5.5(supports-color@9.4.0) + transitivePeerDependencies: + - supports-color + '@speed-highlight/core@1.2.7': {} - '@stripe/react-stripe-js@3.6.0(@stripe/stripe-js@5.10.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@stripe/react-stripe-js@3.7.0(@stripe/stripe-js@5.10.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: '@stripe/stripe-js': 5.10.0 prop-types: 15.8.1 @@ -14572,7 +14729,7 @@ snapshots: '@tokenizer/inflate@0.2.7': dependencies: - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) fflate: 0.8.2 token-types: 6.0.0 transitivePeerDependencies: @@ -14622,48 +14779,48 @@ snapshots: '@types/babel__core@7.20.5': dependencies: '@babel/parser': 7.27.0 - '@babel/types': 7.27.0 + '@babel/types': 7.27.1 '@types/babel__generator': 7.6.8 '@types/babel__template': 7.4.4 '@types/babel__traverse': 7.20.5 '@types/babel__generator@7.6.8': dependencies: - '@babel/types': 7.27.0 + '@babel/types': 7.27.1 '@types/babel__template@7.4.4': dependencies: '@babel/parser': 7.27.0 - '@babel/types': 7.27.0 + '@babel/types': 7.27.1 '@types/babel__traverse@7.20.5': dependencies: - '@babel/types': 7.27.0 + '@babel/types': 7.27.1 '@types/backbone@1.4.14': dependencies: - '@types/jquery': 3.5.30 + '@types/jquery': 3.5.32 '@types/underscore': 1.13.0 '@types/backbone@1.4.23': dependencies: - '@types/jquery': 3.5.30 + '@types/jquery': 3.5.32 '@types/underscore': 1.13.0 '@types/base16@1.0.5': {} '@types/better-sqlite3@7.6.13': dependencies: - '@types/node': 18.19.86 + '@types/node': 22.15.21 '@types/body-parser@1.19.5': dependencies: '@types/connect': 3.4.35 - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/bonjour@3.5.13': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/caseless@0.12.5': {} @@ -14675,17 +14832,21 @@ snapshots: '@types/connect-history-api-fallback@1.5.4': dependencies: - '@types/express-serve-static-core': 4.19.0 - '@types/node': 18.19.86 + '@types/express-serve-static-core': 5.0.6 + '@types/node': 18.19.103 '@types/connect@3.4.35': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/cookie@0.3.3': {} '@types/cookie@0.6.0': {} + '@types/cors@2.8.18': + dependencies: + '@types/node': 18.19.103 + '@types/d3-array@3.2.1': {} '@types/d3-axis@3.0.6': @@ -14830,16 +14991,23 @@ snapshots: '@types/express-serve-static-core@4.19.0': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/qs': 6.9.17 '@types/range-parser': 1.2.7 '@types/send': 0.17.4 + '@types/express-serve-static-core@5.0.6': + dependencies: + '@types/node': 18.19.103 + '@types/qs': 6.14.0 + '@types/range-parser': 1.2.7 + '@types/send': 0.17.4 + '@types/express-session@1.18.1': dependencies: - '@types/express': 4.17.21 + '@types/express': 4.17.22 - '@types/express@4.17.21': + '@types/express@4.17.22': dependencies: '@types/body-parser': 1.19.5 '@types/express-serve-static-core': 4.19.0 @@ -14848,7 +15016,7 @@ snapshots: '@types/formidable@3.4.5': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/geojson-vt@3.2.5': dependencies: @@ -14859,11 +15027,11 @@ snapshots: '@types/glob@7.2.0': dependencies: '@types/minimatch': 5.1.2 - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/graceful-fs@4.1.9': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/hast@2.3.10': dependencies: @@ -14880,7 +15048,7 @@ snapshots: '@types/http-proxy@1.17.16': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/istanbul-lib-coverage@2.0.6': {} @@ -14892,12 +15060,12 @@ snapshots: dependencies: '@types/istanbul-lib-report': 3.0.0 - '@types/jest@29.5.13': + '@types/jest@29.5.14': dependencies: expect: 29.7.0 pretty-format: 29.7.0 - '@types/jquery@3.5.30': + '@types/jquery@3.5.32': dependencies: '@types/sizzle': 2.3.3 @@ -14905,21 +15073,23 @@ snapshots: '@types/json-schema@7.0.15': {} - '@types/json-stable-stringify@1.0.36': {} + '@types/json-stable-stringify@1.2.0': + dependencies: + json-stable-stringify: 1.3.0 '@types/katex@0.16.7': {} '@types/keyv@3.1.4': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/ldapjs@2.2.5': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/linkify-it@5.0.0': {} - '@types/lodash@4.17.9': {} + '@types/lodash@4.17.17': {} '@types/long@4.0.2': {} @@ -14954,65 +15124,71 @@ snapshots: '@types/ms@0.7.31': {} + '@types/ms@0.7.34': {} + '@types/node-cleanup@2.1.5': {} '@types/node-fetch@2.6.12': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 form-data: 4.0.2 '@types/node-forge@1.3.11': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/node-zendesk@2.0.15': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 - '@types/node@18.19.86': + '@types/node@18.19.103': dependencies: undici-types: 5.26.5 + '@types/node@22.15.21': + dependencies: + undici-types: 6.21.0 + '@types/node@9.6.61': {} '@types/nodemailer@6.4.17': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/normalize-package-data@2.4.1': {} '@types/oauth@0.9.6': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/parse5@6.0.3': {} '@types/passport-google-oauth20@2.0.16': dependencies: - '@types/express': 4.17.21 + '@types/express': 4.17.22 '@types/passport': 1.0.17 '@types/passport-oauth2': 1.4.17 '@types/passport-oauth2@1.4.17': dependencies: - '@types/express': 4.17.21 + '@types/express': 4.17.22 '@types/oauth': 0.9.6 '@types/passport': 1.0.17 '@types/passport-strategy@0.2.38': dependencies: - '@types/express': 4.17.21 + '@types/express': 4.17.22 '@types/passport': 1.0.17 '@types/passport@1.0.17': dependencies: - '@types/express': 4.17.21 + '@types/express': 4.17.22 '@types/pbf@3.0.5': {} - '@types/pg@8.11.11': + '@types/pg@8.15.2': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 pg-protocol: 1.8.0 pg-types: 4.0.2 @@ -15020,24 +15196,26 @@ snapshots: '@types/primus@7.3.9': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/prismjs@1.26.5': {} '@types/prop-types@15.7.13': {} + '@types/qs@6.14.0': {} + '@types/qs@6.9.17': {} '@types/range-parser@1.2.7': {} - '@types/react-dom@18.3.6(@types/react@18.3.10)': + '@types/react-dom@18.3.7(@types/react@18.3.22)': dependencies: - '@types/react': 18.3.10 + '@types/react': 18.3.22 '@types/react-redux@7.1.34': dependencies: '@types/hoist-non-react-statics': 3.3.1 - '@types/react': 18.3.10 + '@types/react': 18.3.22 hoist-non-react-statics: 3.3.2 redux: 4.2.1 @@ -15046,22 +15224,27 @@ snapshots: '@types/prop-types': 15.7.13 csstype: 3.1.3 + '@types/react@18.3.22': + dependencies: + '@types/prop-types': 15.7.13 + csstype: 3.1.3 + '@types/request@2.48.12': dependencies: '@types/caseless': 0.12.5 - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/tough-cookie': 4.0.5 form-data: 2.5.3 '@types/responselike@1.0.3': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/retry@0.12.0': {} '@types/retry@0.12.2': {} - '@types/sanitize-html@2.15.0': + '@types/sanitize-html@2.16.0': dependencies: htmlparser2: 8.0.2 @@ -15072,23 +15255,23 @@ snapshots: '@types/send@0.17.4': dependencies: '@types/mime': 1.3.5 - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/serve-index@1.9.4': dependencies: - '@types/express': 4.17.21 + '@types/express': 4.17.22 '@types/serve-static@1.15.7': dependencies: '@types/http-errors': 2.0.4 - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/send': 0.17.4 '@types/sizzle@2.3.3': {} '@types/sockjs@0.3.36': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/stack-utils@2.0.3': {} @@ -15118,19 +15301,19 @@ snapshots: '@types/watchpack@2.4.4': dependencies: '@types/graceful-fs': 4.1.9 - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/ws@8.18.1': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/xml-encryption@1.2.4': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/xml2js@0.4.14': dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/yargs-parser@21.0.0': {} @@ -15142,36 +15325,36 @@ snapshots: dependencies: '@types/yargs-parser': 21.0.0 - '@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint@8.57.1)(typescript@5.8.2)': + '@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint@8.57.1)(typescript@5.8.3)': dependencies: '@eslint-community/regexpp': 4.12.1 - '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.8.2) + '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.8.3) '@typescript-eslint/scope-manager': 6.21.0 - '@typescript-eslint/type-utils': 6.21.0(eslint@8.57.1)(typescript@5.8.2) - '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.8.2) + '@typescript-eslint/type-utils': 6.21.0(eslint@8.57.1)(typescript@5.8.3) + '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.8.3) '@typescript-eslint/visitor-keys': 6.21.0 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) eslint: 8.57.1 graphemer: 1.4.0 ignore: 5.3.1 natural-compare: 1.4.0 semver: 7.7.1 - ts-api-utils: 1.4.3(typescript@5.8.2) + ts-api-utils: 1.4.3(typescript@5.8.3) optionalDependencies: - typescript: 5.8.2 + typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2)': + '@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3)': dependencies: '@typescript-eslint/scope-manager': 6.21.0 '@typescript-eslint/types': 6.21.0 - '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.8.2) + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.8.3) '@typescript-eslint/visitor-keys': 6.21.0 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) eslint: 8.57.1 optionalDependencies: - typescript: 5.8.2 + typescript: 5.8.3 transitivePeerDependencies: - supports-color @@ -15180,43 +15363,43 @@ snapshots: '@typescript-eslint/types': 6.21.0 '@typescript-eslint/visitor-keys': 6.21.0 - '@typescript-eslint/type-utils@6.21.0(eslint@8.57.1)(typescript@5.8.2)': + '@typescript-eslint/type-utils@6.21.0(eslint@8.57.1)(typescript@5.8.3)': dependencies: - '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.8.2) - '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.8.2) - debug: 4.4.0(supports-color@8.1.1) + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.8.3) + '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.8.3) + debug: 4.4.1(supports-color@8.1.1) eslint: 8.57.1 - ts-api-utils: 1.4.3(typescript@5.8.2) + ts-api-utils: 1.4.3(typescript@5.8.3) optionalDependencies: - typescript: 5.8.2 + typescript: 5.8.3 transitivePeerDependencies: - supports-color '@typescript-eslint/types@6.21.0': {} - '@typescript-eslint/typescript-estree@6.21.0(typescript@5.8.2)': + '@typescript-eslint/typescript-estree@6.21.0(typescript@5.8.3)': dependencies: '@typescript-eslint/types': 6.21.0 '@typescript-eslint/visitor-keys': 6.21.0 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) globby: 11.1.0 is-glob: 4.0.3 minimatch: 9.0.3 - semver: 7.7.1 - ts-api-utils: 1.4.3(typescript@5.8.2) + semver: 7.7.2 + ts-api-utils: 1.4.3(typescript@5.8.3) optionalDependencies: - typescript: 5.8.2 + typescript: 5.8.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@6.21.0(eslint@8.57.1)(typescript@5.8.2)': + '@typescript-eslint/utils@6.21.0(eslint@8.57.1)(typescript@5.8.3)': dependencies: '@eslint-community/eslint-utils': 4.5.1(eslint@8.57.1) '@types/json-schema': 7.0.15 '@types/semver': 7.7.0 '@typescript-eslint/scope-manager': 6.21.0 '@typescript-eslint/types': 6.21.0 - '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.8.2) + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.8.3) eslint: 8.57.1 semver: 7.7.1 transitivePeerDependencies: @@ -15228,9 +15411,9 @@ snapshots: '@typescript-eslint/types': 6.21.0 eslint-visitor-keys: 3.4.3 - '@uiw/react-textarea-code-editor@2.1.9(@babel/runtime@7.27.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@uiw/react-textarea-code-editor@2.1.9(@babel/runtime@7.27.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.0 + '@babel/runtime': 7.27.1 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) rehype: 12.0.1 @@ -15392,7 +15575,7 @@ snapshots: agent-base@6.0.2: dependencies: - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) transitivePeerDependencies: - supports-color @@ -15457,15 +15640,15 @@ snapshots: ansi-styles@6.2.1: {} - antd-img-crop@4.24.0(antd@5.24.7(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + antd-img-crop@4.25.0(antd@5.25.2(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: - antd: 5.24.7(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + antd: 5.25.2(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) react-easy-crop: 5.4.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) tslib: 2.8.1 - antd@5.24.7(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + antd@5.25.2(date-fns@2.30.0)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@ant-design/colors': 7.2.0 '@ant-design/cssinjs': 1.23.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -15482,37 +15665,37 @@ snapshots: classnames: 2.5.1 copy-to-clipboard: 3.3.3 dayjs: 1.11.13 - rc-cascader: 3.33.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + rc-cascader: 3.34.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-checkbox: 3.5.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-collapse: 3.9.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-dialog: 9.6.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-drawer: 7.2.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-dropdown: 4.2.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-field-form: 2.7.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - rc-image: 7.11.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + rc-image: 7.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-input: 1.8.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-input-number: 9.5.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-mentions: 2.20.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-menu: 9.16.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-motion: 2.9.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - rc-notification: 5.6.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + rc-notification: 5.6.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-pagination: 5.1.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-picker: 4.11.3(date-fns@2.30.0)(dayjs@1.11.13)(moment@2.30.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-progress: 4.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-rate: 2.13.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-resize-observer: 1.4.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-segmented: 2.7.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - rc-select: 14.16.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + rc-select: 14.16.8(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-slider: 11.1.8(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-steps: 6.0.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-switch: 4.1.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - rc-table: 7.50.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - rc-tabs: 15.5.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + rc-table: 7.50.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + rc-tabs: 15.6.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-textarea: 1.10.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-tooltip: 6.4.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-tree: 5.13.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-tree-select: 5.27.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - rc-upload: 4.8.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + rc-upload: 4.9.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-util: 5.44.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) @@ -15549,7 +15732,7 @@ snapshots: array-includes@3.1.8: dependencies: - call-bind: 1.0.7 + call-bind: 1.0.8 define-properties: 1.2.1 es-abstract: 1.23.9 es-object-atoms: 1.1.1 @@ -15578,7 +15761,7 @@ snapshots: array.prototype.findlast@1.2.5: dependencies: - call-bind: 1.0.7 + call-bind: 1.0.8 define-properties: 1.2.1 es-abstract: 1.23.9 es-errors: 1.3.0 @@ -15601,7 +15784,7 @@ snapshots: array.prototype.tosorted@1.1.4: dependencies: - call-bind: 1.0.7 + call-bind: 1.0.8 define-properties: 1.2.1 es-abstract: 1.23.9 es-errors: 1.3.0 @@ -15614,7 +15797,7 @@ snapshots: define-properties: 1.2.1 es-abstract: 1.23.9 es-errors: 1.3.0 - get-intrinsic: 1.3.0 + get-intrinsic: 1.2.7 is-array-buffer: 3.0.5 arrify@1.0.1: {} @@ -15681,9 +15864,9 @@ snapshots: awaiting@3.0.0: {} - axios@1.8.4: + axios@1.9.0: dependencies: - follow-redirects: 1.15.9(debug@4.4.0) + follow-redirects: 1.15.9(debug@4.4.1) form-data: 4.0.2 proxy-from-env: 1.1.0 transitivePeerDependencies: @@ -15713,7 +15896,7 @@ snapshots: babel-plugin-istanbul@6.1.1: dependencies: - '@babel/helper-plugin-utils': 7.24.8 + '@babel/helper-plugin-utils': 7.27.1 '@istanbuljs/load-nyc-config': 1.1.0 '@istanbuljs/schema': 0.1.3 istanbul-lib-instrument: 5.2.1 @@ -15723,8 +15906,8 @@ snapshots: babel-plugin-jest-hoist@29.6.3: dependencies: - '@babel/template': 7.27.0 - '@babel/types': 7.27.0 + '@babel/template': 7.27.2 + '@babel/types': 7.27.1 '@types/babel__core': 7.20.5 '@types/babel__traverse': 7.20.5 @@ -15810,6 +15993,8 @@ snapshots: base64-js@1.5.1: {} + base64id@2.0.0: {} + base64url@3.0.1: {} basic-auth@2.0.1: @@ -15822,7 +16007,7 @@ snapshots: bcryptjs@2.4.3: {} - better-sqlite3@11.9.1: + better-sqlite3@11.10.0: dependencies: bindings: 1.5.0 prebuild-install: 7.1.3 @@ -15913,11 +16098,18 @@ snapshots: browserslist@4.24.4: dependencies: - caniuse-lite: 1.0.30001713 + caniuse-lite: 1.0.30001718 electron-to-chromium: 1.5.136 node-releases: 2.0.19 update-browserslist-db: 1.1.3(browserslist@4.24.4) + browserslist@4.24.5: + dependencies: + caniuse-lite: 1.0.30001718 + electron-to-chromium: 1.5.155 + node-releases: 2.0.19 + update-browserslist-db: 1.1.3(browserslist@4.24.5) + bs-logger@0.2.6: dependencies: fast-json-stable-stringify: 2.1.0 @@ -15955,8 +16147,6 @@ snapshots: dependencies: streamsearch: 1.1.0 - bytes@3.0.0: {} - bytes@3.1.2: {} cache-manager@3.6.3: @@ -15965,11 +16155,6 @@ snapshots: lodash.clonedeep: 4.5.0 lru-cache: 6.0.0 - call-bind-apply-helpers@1.0.1: - dependencies: - es-errors: 1.3.0 - function-bind: 1.1.2 - call-bind-apply-helpers@1.0.2: dependencies: es-errors: 1.3.0 @@ -15985,9 +16170,9 @@ snapshots: call-bind@1.0.8: dependencies: - call-bind-apply-helpers: 1.0.1 + call-bind-apply-helpers: 1.0.2 es-define-property: 1.0.1 - get-intrinsic: 1.2.7 + get-intrinsic: 1.3.0 set-function-length: 1.2.2 call-bound@1.0.4: @@ -16014,6 +16199,8 @@ snapshots: caniuse-lite@1.0.30001713: {} + caniuse-lite@1.0.30001718: {} + canvas-fit@1.5.0: dependencies: element-size: 1.1.1 @@ -16210,6 +16397,8 @@ snapshots: clsx@1.2.1: {} + cluster-key-slot@1.1.2: {} + co@4.6.0: {} codemirror@5.65.19: {} @@ -16348,14 +16537,14 @@ snapshots: dependencies: mime-db: 1.52.0 - compression@1.7.4: + compression@1.8.0: dependencies: - accepts: 1.3.8 - bytes: 3.0.0 + bytes: 3.1.2 compressible: 2.0.18 debug: 2.6.9 + negotiator: 0.6.4 on-headers: 1.0.2 - safe-buffer: 5.1.2 + safe-buffer: 5.2.1 vary: 1.1.2 transitivePeerDependencies: - supports-color @@ -16438,7 +16627,7 @@ snapshots: cookie@0.7.2: {} - cookie@1.0.0: {} + cookie@1.0.2: {} cookies@0.8.0: dependencies: @@ -16478,13 +16667,28 @@ snapshots: dependencies: capture-stack-trace: 1.0.2 - create-jest@29.7.0(@types/node@18.19.86): + create-jest@29.7.0(@types/node@18.19.103): + dependencies: + '@jest/types': 29.6.3 + chalk: 4.1.2 + exit: 0.1.2 + graceful-fs: 4.2.11 + jest-config: 29.7.0(@types/node@18.19.103) + jest-util: 29.7.0 + prompts: 2.4.2 + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - supports-color + - ts-node + + create-jest@29.7.0(@types/node@22.15.21): dependencies: '@jest/types': 29.6.3 chalk: 4.1.2 exit: 0.1.2 graceful-fs: 4.2.11 - jest-config: 29.7.0(@types/node@18.19.86) + jest-config: 29.7.0(@types/node@22.15.21) jest-util: 29.7.0 prompts: 2.4.2 transitivePeerDependencies: @@ -16518,58 +16722,58 @@ snapshots: crypto@1.0.1: {} - cspell-config-lib@8.18.1: + cspell-config-lib@8.19.4: dependencies: - '@cspell/cspell-types': 8.18.1 + '@cspell/cspell-types': 8.19.4 comment-json: 4.2.5 - yaml: 2.7.1 + yaml: 2.8.0 - cspell-dictionary@8.18.1: + cspell-dictionary@8.19.4: dependencies: - '@cspell/cspell-pipe': 8.18.1 - '@cspell/cspell-types': 8.18.1 - cspell-trie-lib: 8.18.1 + '@cspell/cspell-pipe': 8.19.4 + '@cspell/cspell-types': 8.19.4 + cspell-trie-lib: 8.19.4 fast-equals: 5.2.2 - cspell-gitignore@8.18.1: + cspell-gitignore@8.19.4: dependencies: - '@cspell/url': 8.18.1 - cspell-glob: 8.18.1 - cspell-io: 8.18.1 + '@cspell/url': 8.19.4 + cspell-glob: 8.19.4 + cspell-io: 8.19.4 - cspell-glob@8.18.1: + cspell-glob@8.19.4: dependencies: - '@cspell/url': 8.18.1 - micromatch: 4.0.8 + '@cspell/url': 8.19.4 + picomatch: 4.0.2 - cspell-grammar@8.18.1: + cspell-grammar@8.19.4: dependencies: - '@cspell/cspell-pipe': 8.18.1 - '@cspell/cspell-types': 8.18.1 + '@cspell/cspell-pipe': 8.19.4 + '@cspell/cspell-types': 8.19.4 - cspell-io@8.18.1: + cspell-io@8.19.4: dependencies: - '@cspell/cspell-service-bus': 8.18.1 - '@cspell/url': 8.18.1 + '@cspell/cspell-service-bus': 8.19.4 + '@cspell/url': 8.19.4 - cspell-lib@8.18.1: + cspell-lib@8.19.4: dependencies: - '@cspell/cspell-bundled-dicts': 8.18.1 - '@cspell/cspell-pipe': 8.18.1 - '@cspell/cspell-resolver': 8.18.1 - '@cspell/cspell-types': 8.18.1 - '@cspell/dynamic-import': 8.18.1 - '@cspell/filetypes': 8.18.1 - '@cspell/strong-weak-map': 8.18.1 - '@cspell/url': 8.18.1 + '@cspell/cspell-bundled-dicts': 8.19.4 + '@cspell/cspell-pipe': 8.19.4 + '@cspell/cspell-resolver': 8.19.4 + '@cspell/cspell-types': 8.19.4 + '@cspell/dynamic-import': 8.19.4 + '@cspell/filetypes': 8.19.4 + '@cspell/strong-weak-map': 8.19.4 + '@cspell/url': 8.19.4 clear-module: 4.1.2 comment-json: 4.2.5 - cspell-config-lib: 8.18.1 - cspell-dictionary: 8.18.1 - cspell-glob: 8.18.1 - cspell-grammar: 8.18.1 - cspell-io: 8.18.1 - cspell-trie-lib: 8.18.1 + cspell-config-lib: 8.19.4 + cspell-dictionary: 8.19.4 + cspell-glob: 8.19.4 + cspell-grammar: 8.19.4 + cspell-io: 8.19.4 + cspell-trie-lib: 8.19.4 env-paths: 3.0.0 fast-equals: 5.2.2 gensequence: 7.0.0 @@ -16579,32 +16783,31 @@ snapshots: vscode-uri: 3.1.0 xdg-basedir: 5.1.0 - cspell-trie-lib@8.18.1: + cspell-trie-lib@8.19.4: dependencies: - '@cspell/cspell-pipe': 8.18.1 - '@cspell/cspell-types': 8.18.1 + '@cspell/cspell-pipe': 8.19.4 + '@cspell/cspell-types': 8.19.4 gensequence: 7.0.0 - cspell@8.18.1: + cspell@8.19.4: dependencies: - '@cspell/cspell-json-reporter': 8.18.1 - '@cspell/cspell-pipe': 8.18.1 - '@cspell/cspell-types': 8.18.1 - '@cspell/dynamic-import': 8.18.1 - '@cspell/url': 8.18.1 + '@cspell/cspell-json-reporter': 8.19.4 + '@cspell/cspell-pipe': 8.19.4 + '@cspell/cspell-types': 8.19.4 + '@cspell/dynamic-import': 8.19.4 + '@cspell/url': 8.19.4 chalk: 5.4.1 chalk-template: 1.1.0 commander: 13.1.0 - cspell-dictionary: 8.18.1 - cspell-gitignore: 8.18.1 - cspell-glob: 8.18.1 - cspell-io: 8.18.1 - cspell-lib: 8.18.1 + cspell-dictionary: 8.19.4 + cspell-gitignore: 8.19.4 + cspell-glob: 8.19.4 + cspell-io: 8.19.4 + cspell-lib: 8.19.4 fast-json-stable-stringify: 2.1.0 file-entry-cache: 9.1.0 - get-stdin: 9.0.0 semver: 7.7.1 - tinyglobby: 0.2.12 + tinyglobby: 0.2.13 css-color-names@0.0.4: {} @@ -16630,7 +16833,7 @@ snapshots: css-global-keywords@1.0.1: {} - css-loader@7.1.2(@rspack/core@1.3.4(@swc/helpers@0.5.5))(webpack@5.99.5): + css-loader@7.1.2(@rspack/core@1.3.11(@swc/helpers@0.5.5))(webpack@5.99.5): dependencies: icss-utils: 5.1.0(postcss@8.5.3) postcss: 8.5.3 @@ -16641,7 +16844,7 @@ snapshots: postcss-value-parser: 4.2.0 semver: 7.7.1 optionalDependencies: - '@rspack/core': 1.3.4(@swc/helpers@0.5.5) + '@rspack/core': 1.3.11(@swc/helpers@0.5.5) webpack: 5.99.5 css-select@4.3.0: @@ -16935,7 +17138,7 @@ snapshots: date-fns@2.30.0: dependencies: - '@babel/runtime': 7.27.0 + '@babel/runtime': 7.27.1 dayjs@1.11.13: {} @@ -16949,13 +17152,19 @@ snapshots: dependencies: ms: 2.1.2 - debug@4.4.0(supports-color@8.1.1): + debug@4.3.7(supports-color@9.4.0): + dependencies: + ms: 2.1.3 + optionalDependencies: + supports-color: 9.4.0 + + debug@4.4.1(supports-color@8.1.1): dependencies: ms: 2.1.3 optionalDependencies: supports-color: 8.1.1 - debug@4.4.0(supports-color@9.4.0): + debug@4.4.1(supports-color@9.4.0): dependencies: ms: 2.1.3 optionalDependencies: @@ -17048,6 +17257,8 @@ snapshots: delayed-stream@1.0.0: {} + denque@2.1.0: {} + depd@1.1.2: {} depd@2.0.0: {} @@ -17248,6 +17459,8 @@ snapshots: electron-to-chromium@1.5.136: {} + electron-to-chromium@1.5.155: {} + element-size@1.1.1: {} elementary-circuits-directed-graph@1.3.1: @@ -17291,10 +17504,40 @@ snapshots: dependencies: once: 1.4.0 + engine.io-client@6.6.3(supports-color@9.4.0): + dependencies: + '@socket.io/component-emitter': 3.1.2 + debug: 4.3.7(supports-color@9.4.0) + engine.io-parser: 5.2.3 + ws: 8.17.1 + xmlhttprequest-ssl: 2.1.2 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + engine.io-parser@5.2.3: {} + + engine.io@6.6.4(supports-color@9.4.0): + dependencies: + '@types/cors': 2.8.18 + '@types/node': 18.19.103 + accepts: 1.3.8 + base64id: 2.0.0 + cookie: 0.7.2 + cors: 2.8.5 + debug: 4.3.7(supports-color@9.4.0) + engine.io-parser: 5.2.3 + ws: 8.17.1 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + enhanced-resolve@5.18.1: dependencies: graceful-fs: 4.2.11 - tapable: 2.2.1 + tapable: 2.2.2 entities@2.2.0: {} @@ -17377,7 +17620,7 @@ snapshots: es-define-property@1.0.0: dependencies: - get-intrinsic: 1.2.7 + get-intrinsic: 1.2.4 es-define-property@1.0.1: {} @@ -17402,7 +17645,7 @@ snapshots: iterator.prototype: 1.1.5 safe-array-concat: 1.1.3 - es-module-lexer@1.6.0: {} + es-module-lexer@1.7.0: {} es-object-atoms@1.1.1: dependencies: @@ -17487,10 +17730,10 @@ snapshots: string-width: 4.2.3 supports-hyperlinks: 2.2.0 - eslint-plugin-prettier@5.2.6(@types/eslint@9.6.1)(eslint-config-prettier@9.1.0(eslint@8.57.1))(eslint@8.57.1)(prettier@3.3.3): + eslint-plugin-prettier@5.4.0(@types/eslint@9.6.1)(eslint-config-prettier@9.1.0(eslint@8.57.1))(eslint@8.57.1)(prettier@3.5.3): dependencies: eslint: 8.57.1 - prettier: 3.3.3 + prettier: 3.5.3 prettier-linter-helpers: 1.0.0 synckit: 0.11.3 optionalDependencies: @@ -17550,7 +17793,7 @@ snapshots: ajv: 6.12.6 chalk: 4.1.2 cross-spawn: 7.0.3 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) doctrine: 3.0.0 escape-string-regexp: 4.0.0 eslint-scope: 7.2.2 @@ -17671,7 +17914,7 @@ snapshots: jest-message-util: 29.7.0 jest-util: 29.7.0 - express-rate-limit@7.4.0(express@4.21.2): + express-rate-limit@7.5.0(express@4.21.2): dependencies: express: 4.21.2 @@ -17795,7 +18038,7 @@ snapshots: dependencies: bser: 2.1.1 - fdir@6.4.3(picomatch@4.0.2): + fdir@6.4.4(picomatch@4.0.2): optionalDependencies: picomatch: 4.0.2 @@ -17820,7 +18063,7 @@ snapshots: dependencies: flat-cache: 5.0.0 - file-type@20.4.1: + file-type@20.5.0: dependencies: '@tokenizer/inflate': 0.2.7 strtok3: 10.2.2 @@ -17894,9 +18137,9 @@ snapshots: dependencies: dtype: 2.0.0 - follow-redirects@1.15.9(debug@4.4.0): + follow-redirects@1.15.9(debug@4.4.1): optionalDependencies: - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) font-atlas@2.1.0: dependencies: @@ -18056,7 +18299,7 @@ snapshots: get-intrinsic@1.2.7: dependencies: - call-bind-apply-helpers: 1.0.1 + call-bind-apply-helpers: 1.0.2 es-define-property: 1.0.1 es-errors: 1.3.0 es-object-atoms: 1.1.1 @@ -18093,8 +18336,6 @@ snapshots: dependencies: global: 4.4.0 - get-stdin@9.0.0: {} - get-stream@3.0.0: {} get-stream@6.0.1: {} @@ -18105,13 +18346,13 @@ snapshots: dependencies: call-bound: 1.0.4 es-errors: 1.3.0 - get-intrinsic: 1.3.0 + get-intrinsic: 1.2.7 get-uri@6.0.4: dependencies: basic-ftp: 5.0.5 data-uri-to-buffer: 6.0.2 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) transitivePeerDependencies: - supports-color @@ -18677,7 +18918,7 @@ snapshots: html-void-elements@2.0.1: {} - html-webpack-plugin@5.6.3(@rspack/core@1.3.4(@swc/helpers@0.5.5))(webpack@5.99.5): + html-webpack-plugin@5.6.3(@rspack/core@1.3.11(@swc/helpers@0.5.5))(webpack@5.99.5): dependencies: '@types/html-minifier-terser': 6.1.0 html-minifier-terser: 6.1.0 @@ -18685,7 +18926,7 @@ snapshots: pretty-error: 4.0.0 tapable: 2.2.1 optionalDependencies: - '@rspack/core': 1.3.4(@swc/helpers@0.5.5) + '@rspack/core': 1.3.11(@swc/helpers@0.5.5) webpack: 5.99.5 htmlparser2@6.1.0: @@ -18730,10 +18971,10 @@ snapshots: http-parser-js@0.5.10: {} - http-proxy-3@1.20.0: + http-proxy-3@1.20.5: dependencies: - debug: 4.4.0(supports-color@8.1.1) - follow-redirects: 1.15.9(debug@4.4.0) + debug: 4.4.1(supports-color@8.1.1) + follow-redirects: 1.15.9(debug@4.4.1) transitivePeerDependencies: - supports-color @@ -18741,18 +18982,18 @@ snapshots: dependencies: '@tootallnate/once': 2.0.0 agent-base: 6.0.2 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) transitivePeerDependencies: - supports-color http-proxy-agent@7.0.2: dependencies: agent-base: 7.1.3 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) transitivePeerDependencies: - supports-color - http-proxy-middleware@2.0.9(@types/express@4.17.21): + http-proxy-middleware@2.0.9(@types/express@4.17.22): dependencies: '@types/http-proxy': 1.17.16 http-proxy: 1.18.1 @@ -18760,14 +19001,14 @@ snapshots: is-plain-obj: 3.0.0 micromatch: 4.0.8 optionalDependencies: - '@types/express': 4.17.21 + '@types/express': 4.17.22 transitivePeerDependencies: - debug http-proxy@1.18.1: dependencies: eventemitter3: 4.0.7 - follow-redirects: 1.15.9(debug@4.4.0) + follow-redirects: 1.15.9(debug@4.4.1) requires-port: 1.0.0 transitivePeerDependencies: - debug @@ -18775,14 +19016,14 @@ snapshots: https-proxy-agent@5.0.1: dependencies: agent-base: 6.0.2 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) transitivePeerDependencies: - supports-color https-proxy-agent@7.0.6: dependencies: agent-base: 7.1.3 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) transitivePeerDependencies: - supports-color @@ -18911,6 +19152,20 @@ snapshots: dependencies: loose-envify: 1.4.0 + iovalkey@0.3.1: + dependencies: + '@iovalkey/commands': 0.1.0 + cluster-key-slot: 1.1.2 + debug: 4.4.1(supports-color@8.1.1) + denque: 2.1.0 + lodash.defaults: 4.2.0 + lodash.isarguments: 3.1.0 + redis-errors: 1.2.0 + redis-parser: 3.0.0 + standard-as-callback: 2.1.0 + transitivePeerDependencies: + - supports-color + ip-address@9.0.5: dependencies: jsbn: 1.1.0 @@ -18940,7 +19195,7 @@ snapshots: dependencies: call-bind: 1.0.8 call-bound: 1.0.4 - get-intrinsic: 1.3.0 + get-intrinsic: 1.2.7 is-arrayish@0.2.1: {} @@ -18982,7 +19237,7 @@ snapshots: is-data-view@1.0.2: dependencies: call-bound: 1.0.4 - get-intrinsic: 1.3.0 + get-intrinsic: 1.2.7 is-typed-array: 1.1.15 is-date-object@1.1.0: @@ -19136,7 +19391,7 @@ snapshots: is-weakset@2.0.4: dependencies: call-bound: 1.0.4 - get-intrinsic: 1.3.0 + get-intrinsic: 1.2.7 is-what@3.14.1: {} @@ -19178,7 +19433,7 @@ snapshots: '@babel/parser': 7.27.0 '@istanbuljs/schema': 0.1.3 istanbul-lib-coverage: 3.2.2 - semver: 7.7.1 + semver: 7.7.2 transitivePeerDependencies: - supports-color @@ -19190,7 +19445,7 @@ snapshots: istanbul-lib-source-maps@4.0.1: dependencies: - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) istanbul-lib-coverage: 3.2.2 source-map: 0.6.1 transitivePeerDependencies: @@ -19239,7 +19494,7 @@ snapshots: '@jest/expect': 29.7.0 '@jest/test-result': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 18.19.86 + '@types/node': 22.15.21 chalk: 4.1.2 co: 4.6.0 dedent: 1.5.3 @@ -19259,16 +19514,35 @@ snapshots: - babel-plugin-macros - supports-color - jest-cli@29.7.0(@types/node@18.19.86): + jest-cli@29.7.0(@types/node@18.19.103): + dependencies: + '@jest/core': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/types': 29.6.3 + chalk: 4.1.2 + create-jest: 29.7.0(@types/node@18.19.103) + exit: 0.1.2 + import-local: 3.2.0 + jest-config: 29.7.0(@types/node@18.19.103) + jest-util: 29.7.0 + jest-validate: 29.7.0 + yargs: 17.7.2 + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - supports-color + - ts-node + + jest-cli@29.7.0(@types/node@22.15.21): dependencies: '@jest/core': 29.7.0 '@jest/test-result': 29.7.0 '@jest/types': 29.6.3 chalk: 4.1.2 - create-jest: 29.7.0(@types/node@18.19.86) + create-jest: 29.7.0(@types/node@22.15.21) exit: 0.1.2 import-local: 3.2.0 - jest-config: 29.7.0(@types/node@18.19.86) + jest-config: 29.7.0(@types/node@22.15.21) jest-util: 29.7.0 jest-validate: 29.7.0 yargs: 17.7.2 @@ -19278,7 +19552,7 @@ snapshots: - supports-color - ts-node - jest-config@29.7.0(@types/node@18.19.86): + jest-config@29.7.0(@types/node@18.19.103): dependencies: '@babel/core': 7.26.9 '@jest/test-sequencer': 29.7.0 @@ -19303,7 +19577,37 @@ snapshots: slash: 3.0.0 strip-json-comments: 3.1.1 optionalDependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 + transitivePeerDependencies: + - babel-plugin-macros + - supports-color + + jest-config@29.7.0(@types/node@22.15.21): + dependencies: + '@babel/core': 7.26.9 + '@jest/test-sequencer': 29.7.0 + '@jest/types': 29.6.3 + babel-jest: 29.7.0(@babel/core@7.26.9) + chalk: 4.1.2 + ci-info: 3.9.0 + deepmerge: 4.3.1 + glob: 7.2.3 + graceful-fs: 4.2.11 + jest-circus: 29.7.0 + jest-environment-node: 29.7.0 + jest-get-type: 29.6.3 + jest-regex-util: 29.6.3 + jest-resolve: 29.7.0 + jest-runner: 29.7.0 + jest-util: 29.7.0 + jest-validate: 29.7.0 + micromatch: 4.0.8 + parse-json: 5.2.0 + pretty-format: 29.7.0 + slash: 3.0.0 + strip-json-comments: 3.1.1 + optionalDependencies: + '@types/node': 22.15.21 transitivePeerDependencies: - babel-plugin-macros - supports-color @@ -19339,7 +19643,7 @@ snapshots: '@jest/environment': 29.7.0 '@jest/fake-timers': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 18.19.86 + '@types/node': 22.15.21 jest-mock: 29.7.0 jest-util: 29.7.0 @@ -19351,7 +19655,7 @@ snapshots: dependencies: '@jest/types': 29.6.3 '@types/graceful-fs': 4.1.9 - '@types/node': 18.19.86 + '@types/node': 22.15.21 anymatch: 3.1.3 fb-watchman: 2.0.2 graceful-fs: 4.2.11 @@ -19409,7 +19713,7 @@ snapshots: jest-mock@29.7.0: dependencies: '@jest/types': 29.6.3 - '@types/node': 18.19.86 + '@types/node': 22.15.21 jest-util: 29.7.0 jest-pnp-resolver@1.2.3(jest-resolve@29.7.0): @@ -19446,7 +19750,7 @@ snapshots: '@jest/test-result': 29.7.0 '@jest/transform': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 18.19.86 + '@types/node': 22.15.21 chalk: 4.1.2 emittery: 0.13.1 graceful-fs: 4.2.11 @@ -19474,7 +19778,7 @@ snapshots: '@jest/test-result': 29.7.0 '@jest/transform': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 18.19.86 + '@types/node': 22.15.21 chalk: 4.1.2 cjs-module-lexer: 1.2.3 collect-v8-coverage: 1.0.2 @@ -19496,7 +19800,7 @@ snapshots: dependencies: '@babel/core': 7.26.9 '@babel/generator': 7.25.7 - '@babel/plugin-syntax-jsx': 7.24.7(@babel/core@7.26.9) + '@babel/plugin-syntax-jsx': 7.27.1(@babel/core@7.26.9) '@babel/plugin-syntax-typescript': 7.25.4(@babel/core@7.26.9) '@babel/types': 7.25.8 '@jest/expect-utils': 29.7.0 @@ -19513,14 +19817,14 @@ snapshots: jest-util: 29.7.0 natural-compare: 1.4.0 pretty-format: 29.7.0 - semver: 7.7.1 + semver: 7.7.2 transitivePeerDependencies: - supports-color jest-util@29.7.0: dependencies: '@jest/types': 29.6.3 - '@types/node': 18.19.86 + '@types/node': 22.15.21 chalk: 4.1.2 ci-info: 3.9.0 graceful-fs: 4.2.11 @@ -19539,7 +19843,7 @@ snapshots: dependencies: '@jest/test-result': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 18.19.86 + '@types/node': 22.15.21 ansi-escapes: 4.3.2 chalk: 4.1.2 emittery: 0.13.1 @@ -19548,23 +19852,35 @@ snapshots: jest-worker@27.5.1: dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 merge-stream: 2.0.0 supports-color: 8.1.1 jest-worker@29.7.0: dependencies: - '@types/node': 18.19.86 + '@types/node': 22.15.21 jest-util: 29.7.0 merge-stream: 2.0.0 supports-color: 8.1.1 - jest@29.7.0(@types/node@18.19.86): + jest@29.7.0(@types/node@18.19.103): + dependencies: + '@jest/core': 29.7.0 + '@jest/types': 29.6.3 + import-local: 3.2.0 + jest-cli: 29.7.0(@types/node@18.19.103) + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - supports-color + - ts-node + + jest@29.7.0(@types/node@22.15.21): dependencies: '@jest/core': 29.7.0 '@jest/types': 29.6.3 import-local: 3.2.0 - jest-cli: 29.7.0(@types/node@18.19.86) + jest-cli: 29.7.0(@types/node@22.15.21) transitivePeerDependencies: - '@types/node' - babel-plugin-macros @@ -19673,9 +19989,10 @@ snapshots: json-stable-stringify-without-jsonify@1.0.1: {} - json-stable-stringify@1.1.1: + json-stable-stringify@1.3.0: dependencies: - call-bind: 1.0.7 + call-bind: 1.0.8 + call-bound: 1.0.4 isarray: 2.0.5 jsonify: 0.0.1 object-keys: 1.1.1 @@ -19717,7 +20034,7 @@ snapshots: lodash.isstring: 4.0.1 lodash.once: 4.1.1 ms: 2.1.2 - semver: 7.7.1 + semver: 7.7.2 jsx-ast-utils@3.3.5: dependencies: @@ -19808,17 +20125,17 @@ snapshots: langs@2.0.0: {} - langsmith@0.3.20(openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2)): + langsmith@0.3.29(openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2)): dependencies: '@types/uuid': 10.0.0 chalk: 4.1.2 console-table-printer: 2.12.1 p-queue: 6.6.2 p-retry: 4.6.2 - semver: 7.7.1 + semver: 7.7.2 uuid: 10.0.0 optionalDependencies: - openai: 4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2) + openai: 4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2) launch-editor@2.10.0: dependencies: @@ -19957,10 +20274,14 @@ snapshots: lodash.debounce@4.0.8: {} + lodash.defaults@4.2.0: {} + lodash.get@4.4.2: {} lodash.includes@4.3.0: {} + lodash.isarguments@3.1.0: {} + lodash.isboolean@3.0.3: {} lodash.isinteger@4.0.4: {} @@ -20044,7 +20365,7 @@ snapshots: make-dir@4.0.0: dependencies: - semver: 7.7.1 + semver: 7.7.2 make-error@1.3.6: {} @@ -20308,7 +20629,7 @@ snapshots: ansi-colors: 4.1.3 browser-stdout: 1.3.1 chokidar: 3.6.0 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) diff: 5.2.0 escape-string-regexp: 4.0.0 find-up: 5.0.0 @@ -20382,20 +20703,14 @@ snapshots: nan@2.20.0: {} + nanoid@3.3.11: {} + nanoid@3.3.8: {} napi-build-utils@2.0.0: {} native-promise-only@0.8.1: {} - nats.ws@1.30.2: - optionalDependencies: - nkeys.js: 1.1.0 - - nats@2.29.3: - dependencies: - nkeys.js: 1.1.0 - natural-compare@1.4.0: {} ncp@2.0.0: @@ -20417,6 +20732,8 @@ snapshots: negotiator@0.6.3: {} + negotiator@0.6.4: {} + neo-async@2.6.2: {} netmask@2.0.2: {} @@ -20430,7 +20747,7 @@ snapshots: - supports-color - webpack - next-rest-framework@6.0.0-beta.4(zod@3.24.2): + next-rest-framework@6.0.0-beta.4(zod@3.25.17): dependencies: chalk: 4.1.2 commander: 10.0.1 @@ -20438,18 +20755,18 @@ snapshots: lodash: 4.17.21 prettier: 3.0.2 qs: 6.11.2 - zod-to-json-schema: 3.21.4(zod@3.24.2) + zod-to-json-schema: 3.21.4(zod@3.25.17) transitivePeerDependencies: - zod next-tick@1.1.0: {} - next-translate@2.6.2(next@14.2.28(@babel/core@7.26.9)(@playwright/test@1.51.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.86.3))(react@18.3.1): + next-translate@2.6.2(next@14.2.28(@babel/core@7.26.9)(@playwright/test@1.51.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.89.0))(react@18.3.1): dependencies: - next: 14.2.28(@babel/core@7.26.9)(@playwright/test@1.51.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.86.3) + next: 14.2.28(@babel/core@7.26.9)(@playwright/test@1.51.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.89.0) react: 18.3.1 - next@14.2.28(@babel/core@7.26.9)(@playwright/test@1.51.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.86.3): + next@14.2.28(@babel/core@7.26.9)(@playwright/test@1.51.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.89.0): dependencies: '@next/env': 14.2.28 '@swc/helpers': 0.5.5 @@ -20471,7 +20788,7 @@ snapshots: '@next/swc-win32-ia32-msvc': 14.2.28 '@next/swc-win32-x64-msvc': 14.2.28 '@playwright/test': 1.51.1 - sass: 1.86.3 + sass: 1.89.0 transitivePeerDependencies: - '@babel/core' - babel-plugin-macros @@ -20484,10 +20801,6 @@ snapshots: lolex: 5.1.2 path-to-regexp: 1.9.0 - nkeys.js@1.1.0: - dependencies: - tweetnacl: 1.0.3 - no-case@3.0.4: dependencies: lower-case: 2.0.2 @@ -20495,12 +20808,11 @@ snapshots: node-abi@3.74.0: dependencies: - semver: 7.7.1 + semver: 7.7.2 node-addon-api@6.1.0: {} - node-addon-api@7.1.1: - optional: true + node-addon-api@7.1.1: {} node-cjsx@2.0.0: dependencies: @@ -20547,7 +20859,7 @@ snapshots: process: 0.11.10 uuid: 9.0.1 - node-mocks-http@1.16.2(@types/express@4.17.21)(@types/node@18.19.86): + node-mocks-http@1.17.2(@types/express@4.17.22)(@types/node@18.19.103): dependencies: accepts: 1.3.8 content-disposition: 0.5.4 @@ -20560,8 +20872,8 @@ snapshots: range-parser: 1.2.1 type-is: 1.6.18 optionalDependencies: - '@types/express': 4.17.21 - '@types/node': 18.19.86 + '@types/express': 4.17.22 + '@types/node': 18.19.103 node-releases@2.0.19: {} @@ -20573,7 +20885,7 @@ snapshots: transitivePeerDependencies: - encoding - nodemailer@6.10.0: {} + nodemailer@6.10.1: {} normalize-package-data@2.5.0: dependencies: @@ -20586,7 +20898,7 @@ snapshots: dependencies: hosted-git-info: 4.1.0 is-core-module: 2.15.1 - semver: 7.7.1 + semver: 7.7.2 validate-npm-package-license: 3.0.4 normalize-path@3.0.0: {} @@ -20654,7 +20966,7 @@ snapshots: object.fromentries@2.0.8: dependencies: - call-bind: 1.0.7 + call-bind: 1.0.8 define-properties: 1.2.1 es-abstract: 1.23.9 es-object-atoms: 1.1.1 @@ -20698,16 +21010,16 @@ snapshots: dependencies: mimic-fn: 4.0.0 - open@10.1.0: + open@10.1.2: dependencies: default-browser: 5.2.1 define-lazy-prop: 3.0.0 is-inside-container: 1.0.0 is-wsl: 3.1.0 - openai@4.95.1(encoding@0.1.13)(ws@8.18.1)(zod@3.24.2): + openai@4.102.0(encoding@0.1.13)(ws@8.18.2)(zod@3.24.2): dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 '@types/node-fetch': 2.6.12 abort-controller: 3.0.0 agentkeepalive: 4.6.0 @@ -20715,7 +21027,7 @@ snapshots: formdata-node: 4.4.1 node-fetch: 2.6.7(encoding@0.1.13) optionalDependencies: - ws: 8.18.1 + ws: 8.18.2 zod: 3.24.2 transitivePeerDependencies: - encoding @@ -20747,7 +21059,7 @@ snapshots: own-keys@1.0.1: dependencies: - get-intrinsic: 1.3.0 + get-intrinsic: 1.2.7 object-keys: 1.1.1 safe-push-apply: 1.0.0 @@ -20805,7 +21117,7 @@ snapshots: dependencies: '@tootallnate/quickjs-emscripten': 0.23.0 agent-base: 7.1.3 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) get-uri: 6.0.4 http-proxy-agent: 7.0.2 https-proxy-agent: 7.0.6 @@ -21048,18 +21360,20 @@ snapshots: performance-now@2.1.0: {} - pg-cloudflare@1.1.1: + pg-cloudflare@1.2.5: optional: true - pg-connection-string@2.7.0: {} + pg-connection-string@2.9.0: {} pg-int8@1.0.1: {} pg-numeric@1.0.2: {} - pg-pool@3.8.0(pg@8.14.1): + pg-pool@3.10.0(pg@8.16.0): dependencies: - pg: 8.14.1 + pg: 8.16.0 + + pg-protocol@1.10.0: {} pg-protocol@1.8.0: {} @@ -21081,15 +21395,15 @@ snapshots: postgres-interval: 3.0.0 postgres-range: 1.1.4 - pg@8.14.1: + pg@8.16.0: dependencies: - pg-connection-string: 2.7.0 - pg-pool: 3.8.0(pg@8.14.1) - pg-protocol: 1.8.0 + pg-connection-string: 2.9.0 + pg-pool: 3.10.0(pg@8.16.0) + pg-protocol: 1.10.0 pg-types: 2.2.0 pgpass: 1.0.5 optionalDependencies: - pg-cloudflare: 1.1.1 + pg-cloudflare: 1.2.5 pgpass@1.0.5: dependencies: @@ -21159,7 +21473,7 @@ snapshots: fsevents: 2.3.2 optional: true - plotly.js@2.35.3(@rspack/core@1.3.4(@swc/helpers@0.5.5))(mapbox-gl@1.13.3)(webpack@5.99.5): + plotly.js@2.35.3(@rspack/core@1.3.11(@swc/helpers@0.5.5))(mapbox-gl@1.13.3)(webpack@5.99.5): dependencies: '@plotly/d3': 3.8.2 '@plotly/d3-sankey': 0.7.2 @@ -21175,7 +21489,7 @@ snapshots: color-parse: 2.0.0 color-rgba: 2.1.1 country-regex: 1.1.0 - css-loader: 7.1.2(@rspack/core@1.3.4(@swc/helpers@0.5.5))(webpack@5.99.5) + css-loader: 7.1.2(@rspack/core@1.3.11(@swc/helpers@0.5.5))(webpack@5.99.5) d3-force: 1.2.1 d3-format: 1.4.5 d3-geo: 1.12.1 @@ -21236,11 +21550,10 @@ snapshots: port-get@1.0.4: {} - portfinder@1.0.32: + portfinder@1.0.37: dependencies: - async: 2.6.4 - debug: 3.2.7 - mkdirp: 0.5.6 + async: 3.2.6 + debug: 4.4.1(supports-color@8.1.1) transitivePeerDependencies: - supports-color @@ -21276,13 +21589,13 @@ snapshots: postcss@8.4.31: dependencies: - nanoid: 3.3.8 + nanoid: 3.3.11 picocolors: 1.1.1 source-map-js: 1.2.1 postcss@8.5.3: dependencies: - nanoid: 3.3.8 + nanoid: 3.3.11 picocolors: 1.1.1 source-map-js: 1.2.1 @@ -21343,7 +21656,7 @@ snapshots: prettier@3.0.2: {} - prettier@3.3.3: {} + prettier@3.5.3: {} pretty-error@4.0.0: dependencies: @@ -21430,7 +21743,7 @@ snapshots: '@protobufjs/path': 1.1.2 '@protobufjs/pool': 1.1.0 '@protobufjs/utf8': 1.1.0 - '@types/node': 18.19.86 + '@types/node': 18.19.103 long: 5.3.1 protocol-buffers-schema@3.6.0: {} @@ -21443,7 +21756,7 @@ snapshots: proxy-agent@6.5.0: dependencies: agent-base: 7.1.3 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) http-proxy-agent: 7.0.2 https-proxy-agent: 7.0.6 lru-cache: 7.18.3 @@ -21533,11 +21846,11 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - rc-cascader@3.33.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + rc-cascader@3.34.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@babel/runtime': 7.27.0 classnames: 2.5.1 - rc-select: 14.16.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + rc-select: 14.16.8(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-tree: 5.13.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-util: 5.44.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: 18.3.1 @@ -21610,7 +21923,7 @@ snapshots: react-is: 16.13.1 warning: 4.0.3 - rc-image@7.11.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + rc-image@7.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@babel/runtime': 7.27.0 '@rc-component/portal': 1.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -21670,7 +21983,7 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - rc-notification@5.6.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + rc-notification@5.6.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@babel/runtime': 7.27.0 classnames: 2.5.1 @@ -21745,7 +22058,7 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - rc-select@14.16.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + rc-select@14.16.8(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@babel/runtime': 7.27.0 '@rc-component/trigger': 2.2.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -21753,7 +22066,7 @@ snapshots: rc-motion: 2.9.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-overflow: 1.4.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-util: 5.44.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - rc-virtual-list: 3.18.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + rc-virtual-list: 3.18.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) @@ -21781,18 +22094,18 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - rc-table@7.50.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + rc-table@7.50.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@babel/runtime': 7.27.0 '@rc-component/context': 1.4.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) classnames: 2.5.1 rc-resize-observer: 1.4.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-util: 5.44.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - rc-virtual-list: 3.18.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + rc-virtual-list: 3.18.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - rc-tabs@15.5.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + rc-tabs@15.6.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@babel/runtime': 7.27.0 classnames: 2.5.1 @@ -21827,7 +22140,7 @@ snapshots: dependencies: '@babel/runtime': 7.27.0 classnames: 2.5.1 - rc-select: 14.16.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + rc-select: 14.16.8(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-tree: 5.13.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) rc-util: 5.44.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) react: 18.3.1 @@ -21843,7 +22156,7 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - rc-upload@4.8.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + rc-upload@4.9.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: '@babel/runtime': 7.27.0 classnames: 2.5.1 @@ -21875,6 +22188,15 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) + rc-virtual-list@3.18.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + dependencies: + '@babel/runtime': 7.27.0 + classnames: 2.5.1 + rc-resize-observer: 1.4.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + rc-util: 5.44.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + rc@1.2.8: dependencies: deep-extend: 0.6.0 @@ -21897,7 +22219,7 @@ snapshots: dependencies: '@babel/runtime': 7.27.0 '@types/base16': 1.0.5 - '@types/lodash': 4.17.9 + '@types/lodash': 4.17.17 base16: 1.0.0 color: 3.2.1 csstype: 3.1.3 @@ -21986,11 +22308,11 @@ snapshots: dependencies: react: 18.3.1 - react-intl@7.1.11(react@18.3.1)(typescript@5.8.2): + react-intl@7.1.11(react@18.3.1)(typescript@5.8.3): dependencies: '@formatjs/ecma402-abstract': 2.3.4 '@formatjs/icu-messageformat-parser': 2.11.2 - '@formatjs/intl': 3.1.6(typescript@5.8.2) + '@formatjs/intl': 3.1.6(typescript@5.8.3) '@types/hoist-non-react-statics': 3.3.1 '@types/react': 18.3.10 hoist-non-react-statics: 3.3.2 @@ -21998,7 +22320,7 @@ snapshots: react: 18.3.1 tslib: 2.8.1 optionalDependencies: - typescript: 5.8.2 + typescript: 5.8.3 react-is@16.13.1: {} @@ -22008,15 +22330,15 @@ snapshots: react-lifecycles-compat@3.0.4: {} - react-plotly.js@2.6.0(plotly.js@2.35.3(@rspack/core@1.3.4(@swc/helpers@0.5.5))(mapbox-gl@1.13.3)(webpack@5.99.5))(react@18.3.1): + react-plotly.js@2.6.0(plotly.js@2.35.3(@rspack/core@1.3.11(@swc/helpers@0.5.5))(mapbox-gl@1.13.3)(webpack@5.99.5))(react@18.3.1): dependencies: - plotly.js: 2.35.3(@rspack/core@1.3.4(@swc/helpers@0.5.5))(mapbox-gl@1.13.3)(webpack@5.99.5) + plotly.js: 2.35.3(@rspack/core@1.3.11(@swc/helpers@0.5.5))(mapbox-gl@1.13.3)(webpack@5.99.5) prop-types: 15.8.1 react: 18.3.1 react-property@2.0.0: {} - react-redux@8.1.3(@types/react-dom@18.3.6(@types/react@18.3.10))(@types/react@18.3.10)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@4.2.1): + react-redux@8.1.3(@types/react-dom@18.3.7(@types/react@18.3.22))(@types/react@18.3.22)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(redux@4.2.1): dependencies: '@babel/runtime': 7.25.6 '@types/hoist-non-react-statics': 3.3.1 @@ -22026,8 +22348,8 @@ snapshots: react-is: 18.3.1 use-sync-external-store: 1.5.0(react@18.3.1) optionalDependencies: - '@types/react': 18.3.10 - '@types/react-dom': 18.3.6(@types/react@18.3.10) + '@types/react': 18.3.22 + '@types/react-dom': 18.3.7(@types/react@18.3.22) react-dom: 18.3.1(react@18.3.1) redux: 4.2.1 @@ -22050,12 +22372,12 @@ snapshots: react-shallow-renderer: 16.15.0(react@18.3.1) scheduler: 0.23.2 - react-textarea-autosize@8.5.9(@types/react@18.3.10)(react@18.3.1): + react-textarea-autosize@8.5.9(@types/react@18.3.22)(react@18.3.1): dependencies: '@babel/runtime': 7.27.0 react: 18.3.1 - use-composed-ref: 1.4.0(@types/react@18.3.10)(react@18.3.1) - use-latest: 1.3.0(@types/react@18.3.10)(react@18.3.1) + use-composed-ref: 1.4.0(@types/react@18.3.22)(react@18.3.1) + use-latest: 1.3.0(@types/react@18.3.22)(react@18.3.1) transitivePeerDependencies: - '@types/react' @@ -22063,7 +22385,7 @@ snapshots: dependencies: react: 18.3.1 - react-virtuoso@4.12.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + react-virtuoso@4.12.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) @@ -22132,6 +22454,12 @@ snapshots: indent-string: 4.0.0 strip-indent: 3.0.0 + redis-errors@1.2.0: {} + + redis-parser@3.0.0: + dependencies: + redis-errors: 1.2.0 + redux@4.2.1: dependencies: '@babel/runtime': 7.25.6 @@ -22145,7 +22473,7 @@ snapshots: es-abstract: 1.23.9 es-errors: 1.3.0 es-object-atoms: 1.1.1 - get-intrinsic: 1.3.0 + get-intrinsic: 1.2.7 get-proto: 1.0.1 which-builtin-type: 1.2.1 @@ -22406,7 +22734,7 @@ snapshots: samsam@1.3.0: {} - sanitize-html@2.15.0: + sanitize-html@2.17.0: dependencies: deepmerge: 4.3.1 escape-string-regexp: 4.0.0 @@ -22415,15 +22743,15 @@ snapshots: parse-srcset: 1.0.2 postcss: 8.5.3 - sass-loader@16.0.5(@rspack/core@1.3.4(@swc/helpers@0.5.5))(sass@1.86.3)(webpack@5.99.5): + sass-loader@16.0.5(@rspack/core@1.3.11(@swc/helpers@0.5.5))(sass@1.89.0)(webpack@5.99.5): dependencies: neo-async: 2.6.2 optionalDependencies: - '@rspack/core': 1.3.4(@swc/helpers@0.5.5) - sass: 1.86.3 + '@rspack/core': 1.3.11(@swc/helpers@0.5.5) + sass: 1.89.0 webpack: 5.99.5 - sass@1.86.3: + sass@1.89.0: dependencies: chokidar: 4.0.3 immutable: 5.1.1 @@ -22450,6 +22778,13 @@ snapshots: ajv-formats: 2.1.1(ajv@8.17.1) ajv-keywords: 5.1.0(ajv@8.17.1) + schema-utils@4.3.2: + dependencies: + '@types/json-schema': 7.0.15 + ajv: 8.17.1 + ajv-formats: 2.1.1(ajv@8.17.1) + ajv-keywords: 5.1.0(ajv@8.17.1) + script-loader@0.7.2: dependencies: raw-loader: 0.5.1 @@ -22475,6 +22810,8 @@ snapshots: semver@7.7.1: {} + semver@7.7.2: {} + send@0.19.0: dependencies: debug: 2.6.9 @@ -22523,7 +22860,7 @@ snapshots: define-data-property: 1.1.4 es-errors: 1.3.0 function-bind: 1.1.2 - get-intrinsic: 1.2.7 + get-intrinsic: 1.3.0 gopd: 1.2.0 has-property-descriptors: 1.0.2 @@ -22616,22 +22953,22 @@ snapshots: dependencies: call-bound: 1.0.4 es-errors: 1.3.0 - get-intrinsic: 1.3.0 + get-intrinsic: 1.2.7 object-inspect: 1.13.4 side-channel-weakmap@1.0.2: dependencies: call-bound: 1.0.4 es-errors: 1.3.0 - get-intrinsic: 1.3.0 + get-intrinsic: 1.2.7 object-inspect: 1.13.4 side-channel-map: 1.0.1 side-channel@1.0.6: dependencies: - call-bind: 1.0.7 + call-bind: 1.0.8 es-errors: 1.3.0 - get-intrinsic: 1.2.7 + get-intrinsic: 1.3.0 object-inspect: 1.13.2 side-channel@1.1.0: @@ -22698,6 +23035,47 @@ snapshots: smart-buffer@4.2.0: {} + socket.io-adapter@2.5.5(supports-color@9.4.0): + dependencies: + debug: 4.3.7(supports-color@9.4.0) + ws: 8.17.1 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + socket.io-client@4.8.1(supports-color@9.4.0): + dependencies: + '@socket.io/component-emitter': 3.1.2 + debug: 4.3.7(supports-color@9.4.0) + engine.io-client: 6.6.3(supports-color@9.4.0) + socket.io-parser: 4.2.4(supports-color@9.4.0) + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + socket.io-parser@4.2.4(supports-color@9.4.0): + dependencies: + '@socket.io/component-emitter': 3.1.2 + debug: 4.3.7(supports-color@9.4.0) + transitivePeerDependencies: + - supports-color + + socket.io@4.8.1(supports-color@9.4.0): + dependencies: + accepts: 1.3.8 + base64id: 2.0.0 + cors: 2.8.5 + debug: 4.3.7(supports-color@9.4.0) + engine.io: 6.6.4(supports-color@9.4.0) + socket.io-adapter: 2.5.5(supports-color@9.4.0) + socket.io-parser: 4.2.4(supports-color@9.4.0) + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + sockjs@0.3.24: dependencies: faye-websocket: 0.11.4 @@ -22707,7 +23085,7 @@ snapshots: socks-proxy-agent@8.0.5: dependencies: agent-base: 7.1.3 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) socks: 2.8.4 transitivePeerDependencies: - supports-color @@ -22758,7 +23136,7 @@ snapshots: spdy-transport@3.0.0: dependencies: - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) detect-node: 2.1.0 hpack.js: 2.1.6 obuf: 1.1.2 @@ -22769,7 +23147,7 @@ snapshots: spdy@4.0.2: dependencies: - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) handle-thing: 2.0.1 http-deceiver: 1.2.7 select-hose: 2.0.0 @@ -22793,6 +23171,8 @@ snapshots: stackframe@1.3.4: {} + standard-as-callback@2.1.0: {} + static-eval@2.1.1: dependencies: escodegen: 2.1.0 @@ -22942,7 +23322,7 @@ snapshots: stripe@17.7.0: dependencies: - '@types/node': 18.19.86 + '@types/node': 18.19.103 qs: 6.13.0 strnum@1.1.2: {} @@ -23050,6 +23430,8 @@ snapshots: tapable@2.2.1: {} + tapable@2.2.2: {} + tar-fs@2.1.2: dependencies: chownr: 1.1.4 @@ -23114,9 +23496,9 @@ snapshots: dependencies: '@jridgewell/trace-mapping': 0.3.25 jest-worker: 27.5.1 - schema-utils: 4.3.0 + schema-utils: 4.3.2 serialize-javascript: 6.0.2 - terser: 5.39.0 + terser: 5.39.2 webpack: 5.99.5(uglify-js@3.19.3) optionalDependencies: uglify-js: 3.19.3 @@ -23125,9 +23507,9 @@ snapshots: dependencies: '@jridgewell/trace-mapping': 0.3.25 jest-worker: 27.5.1 - schema-utils: 4.3.0 + schema-utils: 4.3.2 serialize-javascript: 6.0.2 - terser: 5.39.0 + terser: 5.39.2 webpack: 5.99.5 terser@4.8.1: @@ -23144,6 +23526,13 @@ snapshots: commander: 2.20.3 source-map-support: 0.5.21 + terser@5.39.2: + dependencies: + '@jridgewell/source-map': 0.3.6 + acorn: 8.14.1 + commander: 2.20.3 + source-map-support: 0.5.21 + test-exclude@6.0.0: dependencies: '@istanbuljs/schema': 0.1.3 @@ -23199,9 +23588,9 @@ snapshots: tinyexec@0.3.2: {} - tinyglobby@0.2.12: + tinyglobby@0.2.13: dependencies: - fdir: 6.4.3(picomatch@4.0.2) + fdir: 6.4.4(picomatch@4.0.2) picomatch: 4.0.2 tinyqueue@2.0.3: {} @@ -23259,24 +23648,45 @@ snapshots: trough@2.2.0: {} - ts-api-utils@1.4.3(typescript@5.8.2): + ts-api-utils@1.4.3(typescript@5.8.3): dependencies: - typescript: 5.8.2 + typescript: 5.8.3 ts-dedent@2.2.0: {} - ts-jest@29.2.5(@babel/core@7.26.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.9))(jest@29.7.0(@types/node@18.19.86))(typescript@5.8.2): + ts-jest@29.3.4(@babel/core@7.26.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.9))(jest@29.7.0(@types/node@18.19.103))(typescript@5.8.3): dependencies: bs-logger: 0.2.6 ejs: 3.1.10 fast-json-stable-stringify: 2.1.0 - jest: 29.7.0(@types/node@18.19.86) + jest: 29.7.0(@types/node@18.19.103) jest-util: 29.7.0 json5: 2.2.3 lodash.memoize: 4.1.2 make-error: 1.3.6 - semver: 7.6.3 - typescript: 5.8.2 + semver: 7.7.2 + type-fest: 4.41.0 + typescript: 5.8.3 + yargs-parser: 21.1.1 + optionalDependencies: + '@babel/core': 7.26.9 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + babel-jest: 29.7.0(@babel/core@7.26.9) + + ts-jest@29.3.4(@babel/core@7.26.9)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.9))(jest@29.7.0(@types/node@22.15.21))(typescript@5.8.3): + dependencies: + bs-logger: 0.2.6 + ejs: 3.1.10 + fast-json-stable-stringify: 2.1.0 + jest: 29.7.0(@types/node@22.15.21) + jest-util: 29.7.0 + json5: 2.2.3 + lodash.memoize: 4.1.2 + make-error: 1.3.6 + semver: 7.7.2 + type-fest: 4.41.0 + typescript: 5.8.3 yargs-parser: 21.1.1 optionalDependencies: '@babel/core': 7.26.9 @@ -23305,8 +23715,6 @@ snapshots: dependencies: safe-buffer: 5.2.1 - tweetnacl@1.0.3: {} - type-check@0.4.0: dependencies: prelude-ls: 1.2.1 @@ -23331,6 +23739,8 @@ snapshots: type-fest@4.32.0: {} + type-fest@4.41.0: {} + type-is@1.6.18: dependencies: media-typer: 0.3.0 @@ -23378,7 +23788,7 @@ snapshots: typedarray@0.0.6: {} - typescript@5.8.2: {} + typescript@5.8.3: {} uc.micro@1.0.6: {} @@ -23411,6 +23821,8 @@ snapshots: undici-types@5.26.5: {} + undici-types@6.21.0: {} + unicorn-magic@0.1.0: {} unified@10.1.2: @@ -23483,6 +23895,12 @@ snapshots: escalade: 3.2.0 picocolors: 1.1.1 + update-browserslist-db@1.1.3(browserslist@4.24.5): + dependencies: + browserslist: 4.24.5 + escalade: 3.2.0 + picocolors: 1.1.1 + update-diff@1.1.0: {} uri-js@4.4.1: @@ -23513,11 +23931,11 @@ snapshots: dependencies: react: 18.3.1 - use-composed-ref@1.4.0(@types/react@18.3.10)(react@18.3.1): + use-composed-ref@1.4.0(@types/react@18.3.22)(react@18.3.1): dependencies: react: 18.3.1 optionalDependencies: - '@types/react': 18.3.10 + '@types/react': 18.3.22 use-debounce@7.0.1(react@18.3.1): dependencies: @@ -23529,18 +23947,18 @@ snapshots: dequal: 2.0.3 react: 18.3.1 - use-isomorphic-layout-effect@1.2.0(@types/react@18.3.10)(react@18.3.1): + use-isomorphic-layout-effect@1.2.0(@types/react@18.3.22)(react@18.3.1): dependencies: react: 18.3.1 optionalDependencies: - '@types/react': 18.3.10 + '@types/react': 18.3.22 - use-latest@1.3.0(@types/react@18.3.10)(react@18.3.1): + use-latest@1.3.0(@types/react@18.3.22)(react@18.3.1): dependencies: react: 18.3.1 - use-isomorphic-layout-effect: 1.2.0(@types/react@18.3.10)(react@18.3.1) + use-isomorphic-layout-effect: 1.2.0(@types/react@18.3.22)(react@18.3.1) optionalDependencies: - '@types/react': 18.3.10 + '@types/react': 18.3.22 use-resize-observer@9.1.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): dependencies: @@ -23688,7 +24106,7 @@ snapshots: dependencies: loose-envify: 1.4.0 - watchpack@2.4.2: + watchpack@2.4.4: dependencies: glob-to-regexp: 0.4.1 graceful-fs: 4.2.11 @@ -23759,7 +24177,7 @@ snapshots: dependencies: '@types/bonjour': 3.5.13 '@types/connect-history-api-fallback': 1.5.4 - '@types/express': 4.17.21 + '@types/express': 4.17.22 '@types/serve-index': 1.9.4 '@types/serve-static': 1.15.7 '@types/sockjs': 0.3.36 @@ -23768,22 +24186,22 @@ snapshots: bonjour-service: 1.3.0 chokidar: 3.6.0 colorette: 2.0.20 - compression: 1.7.4 + compression: 1.8.0 connect-history-api-fallback: 2.0.0 express: 4.21.2 graceful-fs: 4.2.11 - http-proxy-middleware: 2.0.9(@types/express@4.17.21) + http-proxy-middleware: 2.0.9(@types/express@4.17.22) ipaddr.js: 2.2.0 launch-editor: 2.10.0 - open: 10.1.0 + open: 10.1.2 p-retry: 6.2.1 - schema-utils: 4.3.0 + schema-utils: 4.3.2 selfsigned: 2.4.1 serve-index: 1.9.1 sockjs: 0.3.24 spdy: 4.0.2 webpack-dev-middleware: 7.4.2(webpack@5.99.5) - ws: 8.18.1 + ws: 8.18.2 optionalDependencies: webpack: 5.99.5 transitivePeerDependencies: @@ -23812,10 +24230,10 @@ snapshots: '@webassemblyjs/wasm-edit': 1.14.1 '@webassemblyjs/wasm-parser': 1.14.1 acorn: 8.14.1 - browserslist: 4.24.4 + browserslist: 4.24.5 chrome-trace-event: 1.0.4 enhanced-resolve: 5.18.1 - es-module-lexer: 1.6.0 + es-module-lexer: 1.7.0 eslint-scope: 5.1.1 events: 3.3.0 glob-to-regexp: 0.4.1 @@ -23824,10 +24242,10 @@ snapshots: loader-runner: 4.3.0 mime-types: 2.1.35 neo-async: 2.6.2 - schema-utils: 4.3.0 - tapable: 2.2.1 + schema-utils: 4.3.2 + tapable: 2.2.2 terser-webpack-plugin: 5.3.14(webpack@5.99.5) - watchpack: 2.4.2 + watchpack: 2.4.4 webpack-sources: 3.2.3 transitivePeerDependencies: - '@swc/core' @@ -23842,10 +24260,10 @@ snapshots: '@webassemblyjs/wasm-edit': 1.14.1 '@webassemblyjs/wasm-parser': 1.14.1 acorn: 8.14.1 - browserslist: 4.24.4 + browserslist: 4.24.5 chrome-trace-event: 1.0.4 enhanced-resolve: 5.18.1 - es-module-lexer: 1.6.0 + es-module-lexer: 1.7.0 eslint-scope: 5.1.1 events: 3.3.0 glob-to-regexp: 0.4.1 @@ -23854,10 +24272,10 @@ snapshots: loader-runner: 4.3.0 mime-types: 2.1.35 neo-async: 2.6.2 - schema-utils: 4.3.0 - tapable: 2.2.1 + schema-utils: 4.3.2 + tapable: 2.2.2 terser-webpack-plugin: 5.3.14(uglify-js@3.19.3)(webpack@5.99.5(uglify-js@3.19.3)) - watchpack: 2.4.2 + watchpack: 2.4.4 webpack-sources: 3.2.3 transitivePeerDependencies: - '@swc/core' @@ -23875,9 +24293,9 @@ snapshots: websocket-sftp@0.8.4: dependencies: awaiting: 3.0.0 - debug: 4.4.0(supports-color@8.1.1) + debug: 4.4.1(supports-color@8.1.1) port-get: 1.0.4 - ws: 8.18.1 + ws: 8.18.2 transitivePeerDependencies: - bufferutil - supports-color @@ -23978,7 +24396,9 @@ snapshots: ws@7.5.10: {} - ws@8.18.1: {} + ws@8.17.1: {} + + ws@8.18.2: {} xdg-basedir@5.1.0: {} @@ -24010,6 +24430,8 @@ snapshots: xmlbuilder@15.1.1: {} + xmlhttprequest-ssl@2.1.2: {} + xpath@0.0.32: {} xpath@0.0.33: {} @@ -24050,7 +24472,7 @@ snapshots: yallist@4.0.0: {} - yaml@2.7.1: {} + yaml@2.8.0: {} yargs-parser@18.1.3: dependencies: @@ -24103,9 +24525,9 @@ snapshots: zlibjs@0.3.1: {} - zod-to-json-schema@3.21.4(zod@3.24.2): + zod-to-json-schema@3.21.4(zod@3.25.17): dependencies: - zod: 3.24.2 + zod: 3.25.17 zod-to-json-schema@3.24.5(zod@3.24.2): dependencies: @@ -24113,4 +24535,12 @@ snapshots: zod@3.24.2: {} + zod@3.25.17: {} + + zstd-napi@0.0.10: + dependencies: + '@types/node': 18.19.103 + node-addon-api: 7.1.1 + prebuild-install: 7.1.3 + zwitch@2.0.4: {} diff --git a/src/packages/pnpm-workspace.yaml b/src/packages/pnpm-workspace.yaml index 0e2c3f64b8..d7476c0bee 100644 --- a/src/packages/pnpm-workspace.yaml +++ b/src/packages/pnpm-workspace.yaml @@ -5,3 +5,9 @@ packages: # weird build system, and it totally messes up stuff. - "!**/cdn/dist/**" - "!compute" +onlyBuiltDependencies: + - better-sqlite3 + - websocket-sftp + - websocketfs + - zeromq + - zstd-napi \ No newline at end of file diff --git a/src/packages/project/browser-websocket/server.ts b/src/packages/project/browser-websocket/server.ts index 93c6f3a79c..7893619991 100644 --- a/src/packages/project/browser-websocket/server.ts +++ b/src/packages/project/browser-websocket/server.ts @@ -12,7 +12,7 @@ import { Router } from "express"; import { Server } from "http"; import Primus from "primus"; import type { PrimusWithChannels } from "@cocalc/terminal"; -import initNats from "@cocalc/project/nats"; +import initConat from "@cocalc/project/conat"; // We are NOT using UglifyJS because it can easily take 3 blocking seconds of cpu // during project startup to save 100kb -- it just isn't worth it. Obviously, it @@ -57,8 +57,8 @@ export default function init(server: Server, basePath: string): Router { `waiting for clients to request primus.js (length=${library.length})...`, ); - // we also init the new nats server, which is meant to replace this: - initNats(); + // we also init the conat server, which is meant to replace this: + initConat(); return router; } diff --git a/src/packages/project/client.ts b/src/packages/project/client.ts index f8a19fdf72..271420322d 100644 --- a/src/packages/project/client.ts +++ b/src/packages/project/client.ts @@ -48,17 +48,17 @@ import * as sage_session from "./sage_session"; import { getListingsTable } from "@cocalc/project/sync/listings"; import { get_synctable } from "./sync/open-synctables"; import { get_syncdoc } from "./sync/sync-doc"; -import synctable_nats from "@cocalc/project/nats/synctable"; -import pubsub from "@cocalc/project/nats/pubsub"; -import type { NatsSyncTableFunction } from "@cocalc/nats/sync/synctable"; -import { getEnv as getNatsEnv } from "@cocalc/project/nats/env"; +import synctable_conat from "@cocalc/project/conat/synctable"; +import pubsub from "@cocalc/project/conat/pubsub"; +import type { NatsSyncTableFunction } from "@cocalc/conat/sync/synctable"; +import { getEnv as getNatsEnv } from "@cocalc/project/conat/env"; import { - callNatsService, - createNatsService, - type CallNatsServiceFunction, - type CreateNatsServiceFunction, -} from "@cocalc/nats/service"; -import type { NatsEnvFunction } from "@cocalc/nats/types"; + callConatService, + createConatService, + type CallConatServiceFunction, + type CreateConatServiceFunction, +} from "@cocalc/conat/service"; +import type { NatsEnvFunction } from "@cocalc/conat/types"; const winston = getLogger("client"); @@ -520,20 +520,20 @@ export class Client extends EventEmitter implements ProjectClientInterface { return the_synctable; } - synctable_nats: NatsSyncTableFunction = async (query, options?) => { - return await synctable_nats(query, options); + synctable_conat: NatsSyncTableFunction = async (query, options?) => { + return await synctable_conat(query, options); }; pubsub_nats = async ({ path, name }: { path?: string; name: string }) => { return await pubsub({ path, name }); }; - callNatsService: CallNatsServiceFunction = async (options) => { - return await callNatsService(options); + callConatService: CallConatServiceFunction = async (options) => { + return await callConatService(options); }; - createNatsService: CreateNatsServiceFunction = (options) => { - return createNatsService({ + createConatService: CreateConatServiceFunction = (options) => { + return createConatService({ ...options, project_id: this.project_id, }); diff --git a/src/packages/project/nats/README.md b/src/packages/project/conat/README.md similarity index 64% rename from src/packages/project/nats/README.md rename to src/packages/project/conat/README.md index b3c001bf2d..f1cb3a8be2 100644 --- a/src/packages/project/nats/README.md +++ b/src/packages/project/conat/README.md @@ -1,9 +1,9 @@ -How to setup a standalone nodejs command line session to connect to nats **as a project** +How to setup a standalone nodejs command line session to connect to conat **as a project** -1. Create a file project-env.sh as explained in projects/nats/README.md, which defines these environment variables (your values will be different). You can use the command `export` from within a terminal in a project to find these values. +1. Create a file project-env.sh as explained in projects/conat/README.md, which defines these environment variables (your values will be different). You can use the command `export` from within a terminal in a project to find these values. ```sh -export NATS_SERVER="ws://localhost:5000/6b851643-360e-435e-b87e-f9a6ab64a8b1/port/5000/nats" +export CONAT_SERVER="http://localhost:5000/6b851643-360e-435e-b87e-f9a6ab64a8b1/port/5000" export COCALC_PROJECT_ID="00847397-d6a8-4cb0-96a8-6ef64ac3e6cf" export COCALC_USERNAME=`echo $COCALC_PROJECT_ID | tr -d '-'` export HOME="/projects/6b851643-360e-435e-b87e-f9a6ab64a8b1/cocalc/src/data/projects/$COCALC_PROJECT_ID" @@ -31,4 +31,4 @@ $ . project-env.sh $ node ``` -Now anything involving NATS will work with identity the project. +Now anything involving conat will work with identity the project. diff --git a/src/packages/project/nats/api/editor.ts b/src/packages/project/conat/api/editor.ts similarity index 100% rename from src/packages/project/nats/api/editor.ts rename to src/packages/project/conat/api/editor.ts diff --git a/src/packages/project/conat/api/index.ts b/src/packages/project/conat/api/index.ts new file mode 100644 index 0000000000..913e46494c --- /dev/null +++ b/src/packages/project/conat/api/index.ts @@ -0,0 +1,170 @@ +/* + +DEVELOPMENT: + +How to do development (so in a dev project doing cc-in-cc dev). + +0. From the browser, terminate this api server running in the project: + + await cc.client.conat_client.projectApi(cc.current()).system.terminate({service:'api'}) + +1. Create a file project-env.sh as explained in projects/conat/README.md, which defines these environment variables (your values will be different): + + export COCALC_PROJECT_ID="00847397-d6a8-4cb0-96a8-6ef64ac3e6cf" + export COCALC_USERNAME=`echo $COCALC_PROJECT_ID | tr -d '-'` + export HOME="/projects/6b851643-360e-435e-b87e-f9a6ab64a8b1/cocalc/src/data/projects/$COCALC_PROJECT_ID" + export DATA=$HOME/.smc + + # optional for more flexibility + export API_KEY=sk-OUwxAN8d0n7Ecd48000055 + export COMPUTE_SERVER_ID=0 + + # optional for more logging + export DEBUG=cocalc:* + export DEBUG_CONSOLE=yes + +If API_KEY is a project-wide API key, then you can change +COCALC_PROJECT_ID however you want and don't have to worry +about whether the project is running or the project secret +key changing when the project is restarted. + +2. Then do this: + + $ . project-env.sh + $ node + ... + > require("@cocalc/project/conat/api/index").init() + +You can then easily be able to grab some state, e.g., by writing this in any cocalc code, +rebuilding and restarting: + + global.x = {...} + +Remember, if you don't set API_KEY, then the project MUST be running so that the secret token in $HOME/.smc/secret_token is valid. + +3. Use the browser to see the project is on the conat network and works: + + a = cc.client.conat_client.projectApi({project_id:'81e0c408-ac65-4114-bad5-5f4b6539bd0e'}); + await a.system.ping(); + await a.system.exec({command:'echo $COCALC_PROJECT_ID'}); + +*/ + +import getLogger from "@cocalc/backend/logger"; +import { type ProjectApi } from "@cocalc/conat/project-api"; +import { connectToConat } from "@cocalc/project/conat/connection"; +import { getSubject } from "../names"; +import { terminate as terminateOpenFiles } from "@cocalc/project/conat/open-files"; +import { close as closeListings } from "@cocalc/project/conat/listings"; +import { project_id } from "@cocalc/project/data"; +import { close as closeFilesRead } from "@cocalc/project/conat/files/read"; +import { close as closeFilesWrite } from "@cocalc/project/conat/files/write"; + +const logger = getLogger("project:conat:api"); + +export function init() { + serve(); +} + +let terminate = false; +async function serve() { + logger.debug("serve: create project conat api service"); + const cn = connectToConat(); + const subject = getSubject({ service: "api" }); + // @ts-ignore + const name = `project-${project_id}`; + logger.debug(`serve: creating api service ${name}`); + const api = await cn.subscribe(subject); + logger.debug(`serve: subscribed to subject='${subject}'`); + await listen(api, subject); +} + +async function listen(api, subject) { + for await (const mesg of api) { + if (terminate) { + return; + } + (async () => { + try { + await handleMessage(api, subject, mesg); + } catch (err) { + logger.debug(`WARNING: issue handling a message -- ${err}`); + } + })(); + } +} + +async function handleMessage(api, subject, mesg) { + const request = mesg.data ?? ({} as any); + // logger.debug("got message", request); + if (request.name == "system.terminate") { + // TODO: should be part of handleApiRequest below, but done differently because + // one case halts this loop + const { service } = request.args[0] ?? {}; + if (service == "open-files") { + terminateOpenFiles(); + mesg.respond({ status: "terminated", service }); + return; + } else if (service == "listings") { + closeListings(); + mesg.respond({ status: "terminated", service }); + return; + } else if (service == "files:read") { + await closeFilesRead(); + mesg.respond({ status: "terminated", service }); + return; + } else if (service == "files:write") { + await closeFilesWrite(); + mesg.respond({ status: "terminated", service }); + return; + } else if (service == "api") { + // special hook so admin can terminate handling. This is useful for development. + terminate = true; + console.warn("TERMINATING listening on ", subject); + logger.debug("TERMINATING listening on ", subject); + mesg.respond({ status: "terminated", service }); + api.stop(); + return; + } else { + mesg.respond({ error: `Unknown service ${service}` }); + } + } else { + handleApiRequest(request, mesg); + } +} + +async function handleApiRequest(request, mesg) { + let resp; + const { name, args } = request as any; + if (name == "ping") { + resp = "pong"; + } else { + try { + // logger.debug("handling project.api request:", { name }); + resp = (await getResponse({ name, args })) ?? null; + } catch (err) { + logger.debug(`project.api request err = ${err}`, { name }); + resp = { error: `${err}` }; + } + } + mesg.respond(resp); +} + +import * as system from "./system"; +import * as editor from "./editor"; +import * as sync from "./sync"; + +export const projectApi: ProjectApi = { + system, + editor, + sync, +}; + +async function getResponse({ name, args }) { + const [group, functionName] = name.split("."); + const f = projectApi[group]?.[functionName]; + if (f == null) { + throw Error(`unknown function '${name}'`); + } + return await f(...args); +} diff --git a/src/packages/project/nats/api/sync.ts b/src/packages/project/conat/api/sync.ts similarity index 100% rename from src/packages/project/nats/api/sync.ts rename to src/packages/project/conat/api/sync.ts diff --git a/src/packages/project/nats/api/system.ts b/src/packages/project/conat/api/system.ts similarity index 97% rename from src/packages/project/nats/api/system.ts rename to src/packages/project/conat/api/system.ts index cfe4febf12..9c25636bee 100644 --- a/src/packages/project/nats/api/system.ts +++ b/src/packages/project/conat/api/system.ts @@ -4,7 +4,7 @@ export async function ping() { export async function terminate() {} -import getConnection from "@cocalc/project/nats/connection"; +import getConnection from "@cocalc/project/conat/connection"; export async function resetConnection() { const nc = await getConnection(); await nc.close(); diff --git a/src/packages/project/nats/browser-websocket-api.ts b/src/packages/project/conat/browser-websocket-api.ts similarity index 67% rename from src/packages/project/nats/browser-websocket-api.ts rename to src/packages/project/conat/browser-websocket-api.ts index b6a14a0486..45a4c9ce36 100644 --- a/src/packages/project/nats/browser-websocket-api.ts +++ b/src/packages/project/conat/browser-websocket-api.ts @@ -6,7 +6,7 @@ How to do development (so in a dev project doing cc-in-cc dev): 0. From the browser, send a terminate-handler message, so the handler running in the project stops: - await cc.client.nats_client.projectWebsocketApi({project_id:cc.current().project_id, mesg:{cmd:"terminate"}}) + await cc.client.conat_client.projectWebsocketApi({project_id:cc.current().project_id, mesg:{cmd:"terminate"}}) 1. Open a terminal in the project itself, which sets up the required environment variables. See api/index.ts for details!! @@ -14,11 +14,11 @@ How to do development (so in a dev project doing cc-in-cc dev): 3. Do this: - echo 'require("@cocalc/project/client").init(); require("@cocalc/project/nats/browser-websocket-api").init()' | DEBUG=cocalc:* DEBUG_CONSOLE=yes node + echo 'require("@cocalc/project/client").init(); require("@cocalc/project/conat/browser-websocket-api").init()' | DEBUG=cocalc:* DEBUG_CONSOLE=yes node Or just run node then paste in - require("@cocalc/project/client").init(); require("@cocalc/project/nats/browser-websocket-api").init() + require("@cocalc/project/client").init(); require("@cocalc/project/conat/browser-websocket-api").init() A nice thing about doing that is if you write this deep in some code: @@ -26,49 +26,48 @@ A nice thing about doing that is if you write this deep in some code: then after that code runs you can access x from the node console! -4. Use the browser to see the project is on nats and works: +4. Use the browser to see the project is on conat and works: - await cc.client.nats_client.projectWebsocketApi({project_id:'56eb622f-d398-489a-83ef-c09f1a1e8094', mesg:{cmd:"listing"}}) + await cc.client.conat_client.projectWebsocketApi({project_id:'56eb622f-d398-489a-83ef-c09f1a1e8094', mesg:{cmd:"listing"}}) -5. In a terminal you can always tap into the message stream for a particular project (do `pnpm nats-shell` if necessary to setup your environment): +5. In a terminal you can always tap into the message stream for a particular project: - nats sub --match-replies project.56eb622f-d398-489a-83ef-c09f1a1e8094.browser-api + cd packages/backend + pnpm conat-watch project.56eb622f-d398-489a-83ef-c09f1a1e8094.browser-api --match-replies */ import { getLogger } from "@cocalc/project/logger"; -import { JSONCodec } from "nats"; -import getConnection from "./connection"; +import getConnection, { connectToConat } from "./connection"; import { handleApiCall } from "@cocalc/project/browser-websocket/api"; -import { getPrimusConnection } from "@cocalc/nats/primus"; +import { getPrimusConnection } from "@cocalc/conat/primus"; import { getSubject } from "./names"; const logger = getLogger("project:nats:browser-websocket-api"); -const jc = JSONCodec(); - export async function init() { const nc = await getConnection(); + const cn = connectToConat(); const subject = getSubject({ service: "browser-api", }); logger.debug(`initAPI -- NATS project subject '${subject}'`); - const sub = nc.subscribe(subject); + const sub = await cn.subscribe(subject); const primus = getPrimusConnection({ subject: getSubject({ service: "primus", }), - env: { nc, jc }, + env: { nc, jc: null as any, cn }, role: "server", id: "project", }); for await (const mesg of sub) { - const data = jc.decode(mesg.data) ?? ({} as any); + const data = mesg.data ?? ({} as any); if (data.cmd == "terminate") { logger.debug( "received terminate-handler, so will not handle any further messages", ); - mesg.respond(jc.encode({ exiting: true })); + mesg.respond({ exiting: true }); return; } handleRequest({ data, mesg, primus }); @@ -84,5 +83,5 @@ async function handleRequest({ data, mesg, primus }) { resp = { error: `${err}` }; } //logger.debug("responded", resp); - mesg.respond(jc.encode(resp)); + mesg.respond(resp ?? null); } diff --git a/src/packages/project/nats/connection.ts b/src/packages/project/conat/connection.ts similarity index 87% rename from src/packages/project/nats/connection.ts rename to src/packages/project/conat/connection.ts index 55fbd73b62..7e97982f12 100644 --- a/src/packages/project/nats/connection.ts +++ b/src/packages/project/conat/connection.ts @@ -16,12 +16,13 @@ servers. import getConnection, { setConnectionOptions, -} from "@cocalc/backend/nats/persistent-connection"; +} from "@cocalc/backend/conat/persistent-connection"; import { getLogger } from "@cocalc/project/logger"; import { apiKey, natsWebsocketServer } from "@cocalc/backend/data"; -import { inboxPrefix as getInboxPrefix } from "@cocalc/nats/names"; +import { inboxPrefix as getInboxPrefix } from "@cocalc/conat/names"; import { project_id } from "@cocalc/project/data"; import secretToken from "@cocalc/project/servers/secret-token"; +export { connect as connectToConat } from "@cocalc/backend/conat/conat"; export default getConnection; diff --git a/src/packages/project/nats/env.ts b/src/packages/project/conat/env.ts similarity index 61% rename from src/packages/project/nats/env.ts rename to src/packages/project/conat/env.ts index ace0b589d6..09c14bfb74 100644 --- a/src/packages/project/nats/env.ts +++ b/src/packages/project/conat/env.ts @@ -1,18 +1,18 @@ import { sha1 } from "@cocalc/backend/sha1"; -import getConnection from "./connection"; -import { JSONCodec } from "nats"; -import { setNatsClient } from "@cocalc/nats/client"; +import getConnection, { connectToConat } from "./connection"; +import { setConatClient } from "@cocalc/conat/client"; import { compute_server_id, project_id } from "@cocalc/project/data"; import { getLogger } from "@cocalc/project/logger"; -const jc = JSONCodec(); +const jc = null as any; export async function getEnv() { const nc = await getConnection(); - return { sha1, nc, jc }; + const cn = connectToConat(); + return { sha1, nc, jc, cn }; } export function init() { - setNatsClient({ + setConatClient({ getNatsEnv: getEnv, project_id, compute_server_id, diff --git a/src/packages/project/nats/files/read.ts b/src/packages/project/conat/files/read.ts similarity index 81% rename from src/packages/project/nats/files/read.ts rename to src/packages/project/conat/files/read.ts index 76882f2fd0..4f035f0673 100644 --- a/src/packages/project/nats/files/read.ts +++ b/src/packages/project/conat/files/read.ts @@ -5,7 +5,7 @@ DEVELOPMENT: 1. Stop files:read service running in the project by running this in your browser: - await cc.client.nats_client.projectApi(cc.current()).system.terminate({service:'files:read'}) + await cc.client.conat_client.projectApi(cc.current()).system.terminate({service:'files:read'}) {status: 'terminated', service: 'files:read'} @@ -19,12 +19,12 @@ You can also skip step 1 if you instead set COMPUTE_SERVER_ID to something nonze Welcome to Node.js v18.17.1. Type ".help" for more information. - require('@cocalc/project/nats/files/read').init() + require('@cocalc/project/conat/files/read').init() */ -import "@cocalc/project/nats/env"; // ensure nats env available +import "@cocalc/project/conat/env"; // ensure nats env available import { createReadStream as fs_createReadStream } from "fs"; import { compute_server_id, project_id } from "@cocalc/project/data"; @@ -32,7 +32,7 @@ import { join } from "path"; import { createServer, close as closeReadServer, -} from "@cocalc/nats/files/read"; +} from "@cocalc/conat/files/read"; function createReadStream(path: string) { if (path[0] != "/" && process.env.HOME) { diff --git a/src/packages/project/nats/files/write.ts b/src/packages/project/conat/files/write.ts similarity index 85% rename from src/packages/project/nats/files/write.ts rename to src/packages/project/conat/files/write.ts index 576618c0f8..0c6ed3c4e1 100644 --- a/src/packages/project/nats/files/write.ts +++ b/src/packages/project/conat/files/write.ts @@ -5,7 +5,7 @@ DEVELOPMENT: 1. Stop the files:write service running in the project by running this in your browser: - await cc.client.nats_client.projectApi(cc.current()).system.terminate({service:'files:write'}) + await cc.client.conat_client.projectApi(cc.current()).system.terminate({service:'files:write'}) {status: 'terminated', service: 'files:write'} @@ -19,12 +19,12 @@ You can also skip step 1 if you instead set COMPUTE_SERVER_ID to something nonze Welcome to Node.js v18.17.1. Type ".help" for more information. - require('@cocalc/project/nats/files/write').init() + require('@cocalc/project/conat/files/write').init() */ -import "@cocalc/project/nats/env"; // ensure nats env available +import "@cocalc/project/conat/env"; // ensure nats env available import ensureContainingDirectoryExists from "@cocalc/backend/misc/ensure-containing-directory-exists"; import { createWriteStream as fs_createWriteStream } from "fs"; import { rename } from "fs/promises"; @@ -33,8 +33,8 @@ import { join } from "path"; import { createServer, close as closeWriteServer, -} from "@cocalc/nats/files/write"; -import { randomId } from "@cocalc/nats/names"; +} from "@cocalc/conat/files/write"; +import { randomId } from "@cocalc/conat/names"; import { rimraf } from "rimraf"; async function createWriteStream(path: string) { diff --git a/src/packages/project/nats/formatter.ts b/src/packages/project/conat/formatter.ts similarity index 88% rename from src/packages/project/nats/formatter.ts rename to src/packages/project/conat/formatter.ts index 8381c4f61d..aa593e89bc 100644 --- a/src/packages/project/nats/formatter.ts +++ b/src/packages/project/conat/formatter.ts @@ -3,7 +3,7 @@ File formatting service. */ import { run_formatter, type Options } from "../formatters"; -import { createFormatterService as create } from "@cocalc/nats/service/formatter"; +import { createFormatterService as create } from "@cocalc/conat/service/formatter"; import { compute_server_id, project_id } from "@cocalc/project/data"; interface Message { diff --git a/src/packages/project/nats/index.ts b/src/packages/project/conat/index.ts similarity index 100% rename from src/packages/project/nats/index.ts rename to src/packages/project/conat/index.ts diff --git a/src/packages/project/nats/listings.ts b/src/packages/project/conat/listings.ts similarity index 94% rename from src/packages/project/nats/listings.ts rename to src/packages/project/conat/listings.ts index d8dbda7b39..a881e0248e 100644 --- a/src/packages/project/nats/listings.ts +++ b/src/packages/project/conat/listings.ts @@ -13,7 +13,7 @@ DEVELOPMENT: 1. Stop listings service running in the project by running this in your browser: - await cc.client.nats_client.projectApi(cc.current()).system.terminate({service:'listings'}) + await cc.client.conat_client.projectApi(cc.current()).system.terminate({service:'listings'}) {status: 'terminated', service: 'listings'} @@ -25,7 +25,7 @@ DEVELOPMENT: .../src/packages/project/nats$ node - await require('@cocalc/project/nats/listings').init() + await require('@cocalc/project/conat/listings').init() */ @@ -38,18 +38,18 @@ import { INTEREST_CUTOFF_MS, type Listing, type Times, -} from "@cocalc/nats/service/listings"; +} from "@cocalc/conat/service/listings"; import { compute_server_id, project_id } from "@cocalc/project/data"; import { init as initClient } from "@cocalc/project/client"; import { delay } from "awaiting"; import { type DKV } from "./sync"; -import { type NatsService } from "@cocalc/nats/service"; +import { type ConatService } from "@cocalc/conat/service"; import { MultipathWatcher } from "@cocalc/backend/path-watcher"; import getLogger from "@cocalc/backend/logger"; const logger = getLogger("project:nats:listings"); -let service: NatsService | null; +let service: ConatService | null; export async function init() { logger.debug("init: initializing"); initClient(); diff --git a/src/packages/project/nats/names.ts b/src/packages/project/conat/names.ts similarity index 81% rename from src/packages/project/nats/names.ts rename to src/packages/project/conat/names.ts index 9b1dbc1301..0b2611dba2 100644 --- a/src/packages/project/nats/names.ts +++ b/src/packages/project/conat/names.ts @@ -1,5 +1,5 @@ import { compute_server_id, project_id } from "@cocalc/project/data"; -import { projectSubject, projectStreamName } from "@cocalc/nats/names"; +import { projectSubject, projectStreamName } from "@cocalc/conat/names"; export function getSubject(opts: { path?: string; service: string }) { return projectSubject({ ...opts, compute_server_id, project_id }); diff --git a/src/packages/project/nats/open-files.ts b/src/packages/project/conat/open-files.ts similarity index 94% rename from src/packages/project/nats/open-files.ts rename to src/packages/project/conat/open-files.ts index 7bc84c6f84..6a713cc719 100644 --- a/src/packages/project/nats/open-files.ts +++ b/src/packages/project/conat/open-files.ts @@ -5,8 +5,7 @@ DEVELOPMENT: 0. From the browser with the project opened, terminate the open-files api service: - await cc.client.nats_client.projectApi(cc.current()).system.terminate({service:'open-files'}) - + . // {status: 'terminated', service: 'open-files'} @@ -15,7 +14,7 @@ Set env variables as in a project (see api/index.ts ), then in nodejs: DEBUG_CONSOLE=yes DEBUG=cocalc:debug:project:nats:* node - x = await require("@cocalc/project/nats/open-files").init(); Object.keys(x) + x = await require("@cocalc/project/conat/open-files").init(); Object.keys(x) [ 'openFiles', 'openDocs', 'formatter', 'terminate', 'computeServers' ] @@ -32,7 +31,7 @@ DEBUG_CONSOLE=yes DEBUG=cocalc:debug:project:nats:* node OR: - echo "require('@cocalc/project/nats/open-files').init(); require('@cocalc/project/bug-counter').init()" | node + echo "require('@cocalc/project/conat/open-files').init(); require('@cocalc/project/bug-counter').init()" | node COMPUTE SERVER: @@ -67,7 +66,7 @@ doing this! Then: /cocalc/github/src/packages/project$ COCALC_PROJECT_ID=... COCALC_SECRET_TOKEN="/secrets/secret-token/token" NATS_SERVER=nats-server node Welcome to Node.js v20.19.0. Type ".help" for more information. -> x = await require("@cocalc/project/nats/open-files").init(); Object.keys(x) +> x = await require("@cocalc/project/conat/open-files").init(); Object.keys(x) [ 'openFiles', 'openDocs', 'formatter', 'terminate', 'computeServers' ] > @@ -78,8 +77,8 @@ import { openFiles as createOpenFiles, type OpenFiles, type OpenFileEntry, -} from "@cocalc/project/nats/sync"; -import { getSyncDocType } from "@cocalc/nats/sync/syncdoc-info"; +} from "@cocalc/project/conat/sync"; +import { getSyncDocType } from "@cocalc/conat/sync/syncdoc-info"; import { NATS_OPEN_FILE_TOUCH_INTERVAL } from "@cocalc/util/nats"; import { compute_server_id, project_id } from "@cocalc/project/data"; import type { SyncDoc } from "@cocalc/sync/editor/generic/sync-doc"; @@ -92,7 +91,7 @@ import { delay } from "awaiting"; import { initJupyterRedux, removeJupyterRedux } from "@cocalc/jupyter/kernel"; import { filename_extension, original_path } from "@cocalc/util/misc"; import { createFormatterService } from "./formatter"; -import { type NatsService } from "@cocalc/nats/service/service"; +import { type ConatService } from "@cocalc/conat/service/service"; import { createTerminalService } from "./terminal"; import { exists } from "@cocalc/backend/misc/async-utils-node"; import { map as awaitMap } from "awaiting"; @@ -101,11 +100,11 @@ import { join } from "path"; import { computeServerManager, ComputeServerManager, -} from "@cocalc/nats/compute/manager"; +} from "@cocalc/conat/compute/manager"; import { JUPYTER_SYNCDB_EXTENSIONS } from "@cocalc/util/jupyter/names"; // ensure nats connection stuff is initialized -import "@cocalc/project/nats/env"; +import "@cocalc/project/conat/env"; import { chdir } from "node:process"; const logger = getLogger("project:nats:open-files"); @@ -127,7 +126,7 @@ const FILE_DELETION_INITIAL_DELAY = 15000; let openFiles: OpenFiles | null = null; let formatter: any = null; -const openDocs: { [path: string]: SyncDoc | NatsService } = {}; +const openDocs: { [path: string]: SyncDoc | ConatService } = {}; let computeServers: ComputeServerManager | null = null; const openTimes: { [path: string]: number } = {}; @@ -416,7 +415,7 @@ const openDoc = reuseInFlight(async (path: string) => { if (doc != null) { return; } - openTimes[path] = Date.now(); + openTimes[path] = Date.now(); if (path.endsWith(".term")) { const service = await createTerminalService(path); diff --git a/src/packages/project/nats/pubsub.ts b/src/packages/project/conat/pubsub.ts similarity index 83% rename from src/packages/project/nats/pubsub.ts rename to src/packages/project/conat/pubsub.ts index e02a26a3ab..aa9c0efce3 100644 --- a/src/packages/project/nats/pubsub.ts +++ b/src/packages/project/conat/pubsub.ts @@ -1,5 +1,5 @@ import { getEnv } from "./env"; -import { PubSub } from "@cocalc/nats/sync/pubsub"; +import { PubSub } from "@cocalc/conat/sync/pubsub"; import { project_id } from "@cocalc/project/data"; export default async function pubsub({ diff --git a/src/packages/project/conat/sync.ts b/src/packages/project/conat/sync.ts new file mode 100644 index 0000000000..49a2cd3119 --- /dev/null +++ b/src/packages/project/conat/sync.ts @@ -0,0 +1,38 @@ +import { + dstream as createDstream, + type DStream, +} from "@cocalc/conat/sync/dstream"; +import { dkv as createDKV, type DKV } from "@cocalc/conat/sync/dkv"; +import { dko as createDKO, type DKO } from "@cocalc/conat/sync/dko"; +import { project_id } from "@cocalc/project/data"; +import { + createOpenFiles, + type OpenFiles, + Entry as OpenFileEntry, +} from "@cocalc/conat/sync/open-files"; +import { + inventory as createInventory, + type Inventory, +} from "@cocalc/conat/sync/inventory"; + +export type { DStream, DKV, OpenFiles, OpenFileEntry }; + +export async function dstream(opts): Promise> { + return await createDstream({ project_id, ...opts }); +} + +export async function dkv(opts): Promise> { + return await createDKV({ project_id, ...opts }); +} + +export async function dko(opts): Promise> { + return await createDKO({ project_id, ...opts }); +} + +export async function openFiles(): Promise { + return await createOpenFiles({ project_id }); +} + +export async function inventory(): Promise { + return await createInventory({ project_id }); +} diff --git a/src/packages/project/nats/synctable.ts b/src/packages/project/conat/synctable.ts similarity index 72% rename from src/packages/project/nats/synctable.ts rename to src/packages/project/conat/synctable.ts index a06b4253ea..0b3afc26bf 100644 --- a/src/packages/project/nats/synctable.ts +++ b/src/packages/project/conat/synctable.ts @@ -1,21 +1,21 @@ -import getConnection from "./connection"; +import getConnection, { connectToConat } from "./connection"; import { project_id } from "@cocalc/project/data"; -import { JSONCodec } from "nats"; import { createSyncTable, type NatsSyncTable, -} from "@cocalc/nats/sync/synctable"; +} from "@cocalc/conat/sync/synctable"; import { parse_query } from "@cocalc/sync/table/util"; import { keys } from "lodash"; -import type { NatsSyncTableFunction } from "@cocalc/nats/sync/synctable"; +import type { NatsSyncTableFunction } from "@cocalc/conat/sync/synctable"; -const jc = JSONCodec(); +const jc = null as any; const synctable: NatsSyncTableFunction = async ( query, options?, ): Promise => { const nc = await getConnection(); + const cn = connectToConat(); query = parse_query(query); const table = keys(query)[0]; const obj = options?.obj; @@ -29,7 +29,7 @@ const synctable: NatsSyncTableFunction = async ( project_id, ...options, query, - env: { jc, nc }, + env: { jc, nc, cn }, }); await s.init(); return s; diff --git a/src/packages/project/nats/terminal.ts b/src/packages/project/conat/terminal.ts similarity index 99% rename from src/packages/project/nats/terminal.ts rename to src/packages/project/conat/terminal.ts index 29edcca2ba..cd6bec9eb9 100644 --- a/src/packages/project/nats/terminal.ts +++ b/src/packages/project/conat/terminal.ts @@ -12,12 +12,12 @@ import { exists } from "@cocalc/backend/misc/async-utils-node"; import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; import { getLogger } from "@cocalc/project/logger"; import { readlink, realpath } from "node:fs/promises"; -import { dstream, type DStream } from "@cocalc/project/nats/sync"; +import { dstream, type DStream } from "@cocalc/project/conat/sync"; import { createTerminalServer, createBrowserClient, SIZE_TIMEOUT_MS, -} from "@cocalc/nats/service/terminal"; +} from "@cocalc/conat/service/terminal"; import { project_id, compute_server_id } from "@cocalc/project/data"; import { isEqual, throttle } from "lodash"; @@ -337,6 +337,7 @@ class Session { this.state = "running"; logger.debug("creating stream"); await this.createStream(); + logger.debug("created the stream"); if ((this.state as State) == "closed") { return; } diff --git a/src/packages/project/nats/api/index.ts b/src/packages/project/nats/api/index.ts deleted file mode 100644 index cfd87f2cbf..0000000000 --- a/src/packages/project/nats/api/index.ts +++ /dev/null @@ -1,216 +0,0 @@ -/* - -DEVELOPMENT: - -How to do development (so in a dev project doing cc-in-cc dev). - -0. From the browser, terminate this api server running in the project: - - await cc.client.nats_client.projectApi(cc.current()).system.terminate({service:'api'}) - -1. Create a file project-env.sh as explained in projects/nats/README.md, which defines these environment variables (your values will be different): - - export COCALC_PROJECT_ID="00847397-d6a8-4cb0-96a8-6ef64ac3e6cf" - export COCALC_USERNAME=`echo $COCALC_PROJECT_ID | tr -d '-'` - export HOME="/projects/6b851643-360e-435e-b87e-f9a6ab64a8b1/cocalc/src/data/projects/$COCALC_PROJECT_ID" - export DATA=$HOME/.smc - - # optional for more flexibility - export API_KEY=sk-OUwxAN8d0n7Ecd48000055 - export COMPUTE_SERVER_ID=0 - - # optional for more logging - export DEBUG=cocalc:* - export DEBUG_CONSOLE=yes - -If API_KEY is a project-wide API key, then you can change COCALC_PROJECT_ID however you want -and don't have to worry about whether the project is running or the project secret key changing -when the project is restarted. - -2. Then do this: - - $ . project-env.sh - $ node - ... - > require("@cocalc/project/nats/api/index").init() - -You can then easily be able to grab some state, e.g., by writing this in any cocalc code, -rebuilding and restarting: - - global.x = {...} - -Remember, if you don't set API_KEY, then the project MUST be running so that the secret token in $HOME/.smc/secret_token is valid. - -3. Use the browser to see the project is on nats and works: - - a = cc.client.nats_client.projectApi({project_id:'81e0c408-ac65-4114-bad5-5f4b6539bd0e'}); - await a.system.ping(); - await a.system.exec({command:'echo $COCALC_PROJECT_ID'}); - -*/ - -import { JSONCodec } from "nats"; -import getLogger from "@cocalc/backend/logger"; -import { type ProjectApi } from "@cocalc/nats/project-api"; -import getConnection from "@cocalc/project/nats/connection"; -import { getSubject } from "../names"; -import { terminate as terminateOpenFiles } from "@cocalc/project/nats/open-files"; -import { close as closeListings } from "@cocalc/project/nats/listings"; -import { Svcm } from "@nats-io/services"; -import { compute_server_id, project_id } from "@cocalc/project/data"; -import { close as closeFilesRead } from "@cocalc/project/nats/files/read"; -import { close as closeFilesWrite } from "@cocalc/project/nats/files/write"; -import { delay } from "awaiting"; -import { waitUntilConnected } from "@cocalc/nats/util"; - -const MONITOR_INTERVAL = 30000; - -const logger = getLogger("project:nats:api"); -const jc = JSONCodec(); - -export function init() { - mainLoop(); -} - -let terminate = false; -export async function mainLoop() { - let d = 3000; - let lastStart = 0; - while (!terminate) { - try { - lastStart = Date.now(); - await serve(); - logger.debug("project nats api service ended"); - } catch (err) { - logger.debug(`project nats api service error -- ${err}`); - if (Date.now() - lastStart >= 30000) { - // it ran for a while, so no delay - logger.debug(`will restart immediately`); - d = 3000; - } else { - // it crashed quickly, so delay! - d = Math.min(20000, d * 1.25 + Math.random()); - logger.debug(`will restart in ${d}ms`); - await delay(d); - } - } - } -} - -async function serve() { - logger.debug("serve: create project nats api service"); - await waitUntilConnected(); - const nc = await getConnection(); - const subject = getSubject({ service: "api" }); - // @ts-ignore - const svcm = new Svcm(nc); - const name = `project-${project_id}`; - logger.debug(`serve: creating API microservice ${name}`); - await waitUntilConnected(); - const service = await svcm.add({ - name, - version: "0.1.0", - description: `CoCalc ${compute_server_id ? "Compute Server" : "Project"}`, - }); - const api = service.addEndpoint("api", { subject }); - serviceMonitor({ api, subject, nc }); - logger.debug(`serve: subscribed to subject='${subject}'`); - await listen(api, subject); -} - -async function serviceMonitor({ nc, api, subject }) { - while (true) { - logger.debug(`serviceMonitor: waiting ${MONITOR_INTERVAL}ms`); - await delay(MONITOR_INTERVAL); - try { - await waitUntilConnected(); - await nc.request(subject, jc.encode({ name: "ping" }), { - timeout: 7500, - }); - logger.debug("serviceMonitor: ping succeeded"); - } catch (err) { - logger.debug( - `serviceMonitor: ping failed, so restarting service -- ${err}`, - ); - api.stop(); - return; - } - } -} - -async function listen(api, subject) { - for await (const mesg of api) { - const request = jc.decode(mesg.data) ?? ({} as any); - // logger.debug("got message", request); - if (request.name == "system.terminate") { - // TODO: should be part of handleApiRequest below, but done differently because - // one case halts this loop - const { service } = request.args[0] ?? {}; - if (service == "open-files") { - terminateOpenFiles(); - mesg.respond(jc.encode({ status: "terminated", service })); - continue; - } else if (service == "listings") { - closeListings(); - mesg.respond(jc.encode({ status: "terminated", service })); - continue; - } else if (service == "files:read") { - await closeFilesRead(); - mesg.respond(jc.encode({ status: "terminated", service })); - continue; - } else if (service == "files:write") { - await closeFilesWrite(); - mesg.respond(jc.encode({ status: "terminated", service })); - continue; - } else if (service == "api") { - // special hook so admin can terminate handling. This is useful for development. - terminate = true; - console.warn("TERMINATING listening on ", subject); - logger.debug("TERMINATING listening on ", subject); - mesg.respond(jc.encode({ status: "terminated", service })); - api.stop(); - return; - } else { - mesg.respond(jc.encode({ error: `Unknown service ${service}` })); - } - } else { - handleApiRequest(request, mesg); - } - } -} - -async function handleApiRequest(request, mesg) { - let resp; - const { name, args } = request as any; - if (name == "ping") { - resp = "pong"; - } else { - try { - // logger.debug("handling project.api request:", { name }); - resp = (await getResponse({ name, args })) ?? null; - } catch (err) { - logger.debug(`project.api request err = ${err}`, { name }); - resp = { error: `${err}` }; - } - } - mesg.respond(jc.encode(resp)); -} - -import * as system from "./system"; -import * as editor from "./editor"; -import * as sync from "./sync"; - -export const projectApi: ProjectApi = { - system, - editor, - sync, -}; - -async function getResponse({ name, args }) { - const [group, functionName] = name.split("."); - const f = projectApi[group]?.[functionName]; - if (f == null) { - throw Error(`unknown function '${name}'`); - } - return await f(...args); -} diff --git a/src/packages/project/nats/sync.ts b/src/packages/project/nats/sync.ts deleted file mode 100644 index bd7d1aa013..0000000000 --- a/src/packages/project/nats/sync.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { stream as createStream, type Stream } from "@cocalc/nats/sync/stream"; -import { - dstream as createDstream, - type DStream, -} from "@cocalc/nats/sync/dstream"; -import { kv as createKV, type KV } from "@cocalc/nats/sync/kv"; -import { dkv as createDKV, type DKV } from "@cocalc/nats/sync/dkv"; -import { dko as createDKO, type DKO } from "@cocalc/nats/sync/dko"; -import { getEnv } from "./env"; -import { project_id } from "@cocalc/project/data"; -import { - createOpenFiles, - type OpenFiles, - Entry as OpenFileEntry, -} from "@cocalc/nats/sync/open-files"; -import { - inventory as createInventory, - type Inventory, -} from "@cocalc/nats/sync/inventory"; - -export type { Stream, DStream, KV, DKV, OpenFiles, OpenFileEntry }; - -export async function stream(opts): Promise> { - return await createStream({ project_id, env: await getEnv(), ...opts }); -} - -export async function dstream(opts): Promise> { - return await createDstream({ project_id, env: await getEnv(), ...opts }); -} - -export async function kv(opts): Promise> { - return await createKV({ project_id, env: await getEnv(), ...opts }); -} - -export async function dkv(opts): Promise> { - return await createDKV({ project_id, env: await getEnv(), ...opts }); -} - -export async function dko(opts): Promise> { - return await createDKO({ project_id, env: await getEnv(), ...opts }); -} - -export async function openFiles(): Promise { - return await createOpenFiles({ project_id }); -} - -export async function inventory(): Promise { - return await createInventory({ project_id }); -} diff --git a/src/packages/project/package.json b/src/packages/project/package.json index 31c941df02..a9ff9a539a 100644 --- a/src/packages/project/package.json +++ b/src/packages/project/package.json @@ -4,7 +4,7 @@ "description": "CoCalc: project daemon", "exports": { "./named-servers": "./dist/named-servers/index.js", - "./nats": "./dist/nats/index.js", + "./conat": "./dist/conat/index.js", "./*": "./dist/*.js" }, "keywords": [ @@ -21,8 +21,8 @@ "dependencies": { "@cocalc/backend": "workspace:*", "@cocalc/comm": "workspace:*", + "@cocalc/conat": "workspace:*", "@cocalc/jupyter": "workspace:*", - "@cocalc/nats": "workspace:*", "@cocalc/primus-multiplex": "^1.1.0", "@cocalc/primus-responder": "^1.0.5", "@cocalc/project": "workspace:*", @@ -32,9 +32,6 @@ "@cocalc/terminal": "workspace:*", "@cocalc/util": "workspace:*", "@lydell/node-pty": "^1.1.0", - "@nats-io/jetstream": "3.0.0", - "@nats-io/kv": "3.0.0", - "@nats-io/services": "3.0.0", "@nteract/messaging": "^7.0.20", "awaiting": "^3.0.0", "body-parser": "^1.20.3", @@ -53,7 +50,6 @@ "lean-client-js-node": "^1.2.12", "lodash": "^4.17.21", "lru-cache": "^7.18.3", - "nats": "^2.29.3", "pidusage": "^1.2.0", "prettier": "^3.0.2", "primus": "^8.0.9", diff --git a/src/packages/project/tsconfig.json b/src/packages/project/tsconfig.json index 3208f0532a..71fdeb3571 100644 --- a/src/packages/project/tsconfig.json +++ b/src/packages/project/tsconfig.json @@ -12,7 +12,7 @@ { "path": "../backend" }, { "path": "../comm" }, { "path": "../jupyter" }, - { "path": "../nats" }, + { "path": "../conat" }, { "path": "../sync" }, { "path": "../sync-client" }, { "path": "../sync-fs" }, diff --git a/src/packages/server/nats/api/db.ts b/src/packages/server/conat/api/db.ts similarity index 100% rename from src/packages/server/nats/api/db.ts rename to src/packages/server/conat/api/db.ts diff --git a/src/packages/server/nats/api/file-use-times.ts b/src/packages/server/conat/api/file-use-times.ts similarity index 92% rename from src/packages/server/nats/api/file-use-times.ts rename to src/packages/server/conat/api/file-use-times.ts index ba332096bd..ab6c4c500a 100644 --- a/src/packages/server/nats/api/file-use-times.ts +++ b/src/packages/server/conat/api/file-use-times.ts @@ -7,9 +7,9 @@ import getPool from "@cocalc/database/pool"; import type { FileUseTimesOptions, FileUseTimesResponse, -} from "@cocalc/nats/hub-api/db"; -import { dstream } from "@cocalc/nats/sync/dstream"; -import { patchesStreamName } from "@cocalc/nats/sync/synctable-stream"; +} from "@cocalc/conat/hub-api/db"; +import { dstream } from "@cocalc/conat/sync/dstream"; +import { patchesStreamName } from "@cocalc/conat/sync/synctable-stream"; export async function fileUseTimes({ project_id, diff --git a/src/packages/server/conat/api/index.ts b/src/packages/server/conat/api/index.ts new file mode 100644 index 0000000000..d6d986db50 --- /dev/null +++ b/src/packages/server/conat/api/index.ts @@ -0,0 +1,188 @@ +/* +This is meant to be similar to the nexts pages http api/v2, but using NATS instead of HTTPS. + +To do development: + +1. Turn off nats-server handling for the hub by sending this message from a browser as an admin: + + await cc.client.conat_client.hub.system.terminate({service:'api'}) + +NOTE: there's no way to turn the auth back on in the hub, so you'll have to restart +your dev hub after doing the above. + +2. Run this script at the terminal: + + echo "require('@cocalc/server/conat/api').initAPI()" | COCALC_MODE='single-user' DEBUG_CONSOLE=yes DEBUG=cocalc:* node + + +3. Optional: start more servers -- requests get randomly routed to exactly one of them: + + echo "require('@cocalc/server/conat').default()" | COCALC_MODE='single-user' DEBUG_CONSOLE=yes DEBUG=cocalc:* node + echo "require('@cocalc/server/conat').default()" | COCALC_MODE='single-user' DEBUG_CONSOLE=yes DEBUG=cocalc:* node + + +To make use of this from a browser: + + await cc.client.conat_client.hub.system.getCustomize(['siteName']) + +or + + await cc.client.conat_client.callHub({name:"system.getCustomize", args:[['siteName']]}) + +When you make changes, just restart the above. All clients will instantly +use the new version after you restart, and there is no need to restart the hub +itself or any clients. + +To view requests in realtime + +cd packages/backend +pnpm conat-watch 'hub.*.*.api' --match-replies + +And remember to use the nats command, do "pnpm nats-cli" from cocalc/src. +*/ + +import getLogger from "@cocalc/backend/logger"; +import { type HubApi, getUserId, transformArgs } from "@cocalc/conat/hub-api"; +import { getEnv } from "@cocalc/backend/conat"; +import userIsInGroup from "@cocalc/server/accounts/is-in-group"; +import { terminate as terminateChangefeedServer } from "@cocalc/conat/changefeed/server"; +import { terminate as terminatePersistServer } from "@cocalc/conat/persist/server"; +import { delay } from "awaiting"; + +const logger = getLogger("server:nats:api"); + +export function initAPI() { + mainLoop(); +} + +let terminate = false; +async function mainLoop() { + let d = 3000; + let lastStart = 0; + while (!terminate) { + try { + lastStart = Date.now(); + await serve(); + } catch (err) { + logger.debug(`hub nats api service error -- ${err}`); + if (Date.now() - lastStart >= 30000) { + // it ran for a while, so no delay + logger.debug(`will restart immediately`); + d = 3000; + } else { + // it crashed quickly, so delay! + d = Math.min(20000, d * 1.25 + Math.random()); + logger.debug(`will restart in ${d}ms`); + await delay(d); + } + } + } +} + +async function serve() { + const subject = "hub.*.*.api"; + logger.debug(`initAPI -- subject='${subject}', options=`, { + queue: "0", + }); + const { cn } = await getEnv(); + const api = await cn.subscribe(subject); + for await (const mesg of api) { + (async () => { + try { + await handleMessage({ api, subject, mesg }); + } catch (err) { + logger.debug(`WARNING: unexpected error - ${err}`); + } + })(); + } +} + +async function handleMessage({ api, subject, mesg }) { + const request = mesg.data ?? ({} as any); + if (request.name == "system.terminate") { + // special hook so admin can terminate handling. This is useful for development. + const { account_id } = getUserId(mesg.subject); + if (!(!!account_id && (await userIsInGroup(account_id, "admin")))) { + mesg.respond({ error: "only admin can terminate" }); + return; + } + // TODO: could be part of handleApiRequest below, but done differently because + // one case halts this loop + const { service } = request.args[0] ?? {}; + logger.debug(`Terminate service '${service}'`); + if (service == "changefeeds") { + terminateChangefeedServer(); + mesg.respond({ status: "terminated", service }); + return; + } else if (service == "persist") { + terminatePersistServer(); + mesg.respond({ status: "terminated", service }); + return; + } else if (service == "api") { + // special hook so admin can terminate handling. This is useful for development. + console.warn("TERMINATING listening on ", subject); + logger.debug("TERMINATING listening on ", subject); + terminate = true; + mesg.respond({ status: "terminated", service }); + api.stop(); + return; + } else { + mesg.respond({ error: `Unknown service ${service}` }); + } + } else { + // we explicitly do NOT await this, since we want this hub server to handle + // potentially many messages at once, not one at a time! + handleApiRequest({ request, mesg }); + } +} + +async function handleApiRequest({ request, mesg }) { + let resp; + try { + const { account_id, project_id } = getUserId(mesg.subject); + const { name, args } = request as any; + logger.debug("handling hub.api request:", { + account_id, + project_id, + name, + }); + resp = (await getResponse({ name, args, account_id, project_id })) ?? null; + } catch (err) { + resp = { error: `${err}` }; + } + try { + await mesg.respond(resp); + } catch (err) { + // there's nothing we can do here, e.g., maybe NATS just died. + logger.debug( + `WARNING: error responding to hub.api request (client will receive no response) -- ${err}`, + ); + } +} + +import * as purchases from "./purchases"; +import * as db from "./db"; +import * as system from "./system"; +import * as projects from "./projects"; + +export const hubApi: HubApi = { + system, + projects, + db, + purchases, +}; + +async function getResponse({ name, args, account_id, project_id }) { + const [group, functionName] = name.split("."); + const f = hubApi[group]?.[functionName]; + if (f == null) { + throw Error(`unknown function '${name}'`); + } + const args2 = await transformArgs({ + name, + args, + account_id, + project_id, + }); + return await f(...args2); +} diff --git a/src/packages/server/nats/api/projects.ts b/src/packages/server/conat/api/projects.ts similarity index 100% rename from src/packages/server/nats/api/projects.ts rename to src/packages/server/conat/api/projects.ts diff --git a/src/packages/server/nats/api/purchases.ts b/src/packages/server/conat/api/purchases.ts similarity index 100% rename from src/packages/server/nats/api/purchases.ts rename to src/packages/server/conat/api/purchases.ts diff --git a/src/packages/server/nats/api/system.ts b/src/packages/server/conat/api/system.ts similarity index 100% rename from src/packages/server/nats/api/system.ts rename to src/packages/server/conat/api/system.ts diff --git a/src/packages/server/conat/configuration.ts b/src/packages/server/conat/configuration.ts new file mode 100644 index 0000000000..84181469d7 --- /dev/null +++ b/src/packages/server/conat/configuration.ts @@ -0,0 +1,32 @@ +/* +Load Conat configuration from the database, in case anything is set there. +*/ + +import getPool from "@cocalc/database/pool"; +import { + setConatServer, + setConatPath, + setConatPassword, +} from "@cocalc/backend/data"; + +export async function loadConatConfiguration() { + const pool = getPool(); + const { rows } = await pool.query( + "SELECT name, value FROM server_settings WHERE name=ANY($1)", + [["conat_server", "conat_path", "conat_password"]], + ); + for (const { name, value } of rows) { + if (!value) { + continue; + } + if (name == "conat_password") { + setConatPassword(value.trim()); + } else if (name == "conat_server") { + setConatServer(value.trim()); + } else if (name == "conat_path") { + setConatPath(value.trim()); + } else { + throw Error("bug"); + } + } +} diff --git a/src/packages/server/conat/index.ts b/src/packages/server/conat/index.ts new file mode 100644 index 0000000000..207bcf6974 --- /dev/null +++ b/src/packages/server/conat/index.ts @@ -0,0 +1,28 @@ +import getLogger from "@cocalc/backend/logger"; +import { initAPI } from "./api"; +import { init as initChangefeedServer } from "@cocalc/database/conat/changefeed-api"; +import { init as initLLM } from "./llm"; +import { loadConatConfiguration } from "./configuration"; +import { createTimeService } from "@cocalc/conat/service/time"; +import { initServer as initPersistServer } from "@cocalc/backend/conat/persist"; + +export { loadConatConfiguration }; + +const logger = getLogger("server:nats"); + +export async function initConatChangefeedServer() { + await loadConatConfiguration(); + // do NOT await initDatabase + initChangefeedServer(); +} + +export async function initConatMicroservices() { + logger.debug("initializing nats cocalc hub server"); + await loadConatConfiguration(); + + // do not block on any of these! + initAPI(); + initLLM(); + initPersistServer(); + createTimeService(); +} diff --git a/src/packages/server/nats/llm.ts b/src/packages/server/conat/llm.ts similarity index 66% rename from src/packages/server/nats/llm.ts rename to src/packages/server/conat/llm.ts index 79a1d0c23c..6f06bdc169 100644 --- a/src/packages/server/nats/llm.ts +++ b/src/packages/server/conat/llm.ts @@ -1,4 +1,4 @@ -import { init as init0, close } from "@cocalc/nats/llm/server"; +import { init as init0, close } from "@cocalc/conat/llm/server"; import { evaluate } from "@cocalc/server/llm/index"; export async function init() { diff --git a/src/packages/server/conat/socketio/auth.ts b/src/packages/server/conat/socketio/auth.ts new file mode 100644 index 0000000000..de46bdcda6 --- /dev/null +++ b/src/packages/server/conat/socketio/auth.ts @@ -0,0 +1,204 @@ +import { inboxPrefix } from "@cocalc/conat/names"; +import { isValidUUID } from "@cocalc/util/misc"; +import isCollaborator from "@cocalc/server/projects/is-collaborator"; +import { getAccountIdFromRememberMe } from "@cocalc/server/auth/get-account"; +import { parse } from "cookie"; +import { REMEMBER_ME_COOKIE_NAME } from "@cocalc/backend/auth/cookie-names"; +import { getRememberMeHashFromCookieValue } from "@cocalc/server/auth/remember-me"; + +// [ ] TODO -- api keys, hubs, +export async function getUser(socket): Promise { + if (!socket.handshake.headers.cookie) { + return null; + } + const cookies = parse(socket.handshake.headers.cookie); + const value = cookies[REMEMBER_ME_COOKIE_NAME]; + if (!value) { + return null; + } + const hash = getRememberMeHashFromCookieValue(value); + if (!hash) { + return null; + } + const account_id = await getAccountIdFromRememberMe(hash); + return { account_id }; +} + +export async function isAllowed({ + user, + subject, + type, +}: { + user?: CoCalcUser; + subject: string; + type: "sub" | "pub"; +}) { + if (user == null) { + // TODO: temporarily allowing everything for non-authenticated user for dev only + return true; + } + const userId = getCoCalcUserId(user); + const userType = getCoCalcUserType(user); + + const common = checkCommonPermissions({ + userId, + userType, + user, + subject, + type, + }); + if (common != null) { + return common; + } + if (userType == "project") { + return await isProjectAllowed({ project_id: userId, subject, type }); + } else if (userType == "account") { + return await isAccountAllowed({ account_id: userId, subject, type }); + } + return false; +} + +export function checkCommonPermissions({ + user, + userType, + userId, + subject, + type, +}: { + user: CoCalcUser; + userType: "account" | "project"; + userId: string; + subject: string; + type: "sub" | "pub"; +}): null | boolean { + // can publish as *this user* to the hub's api's + if (subject.startsWith(`hub.${userType}.${userId}.`)) { + return type == "pub"; + } + + // everyone can publish to all inboxes. This seems like a major + // security risk, but with request/reply, the reply subject under + // _INBOX is a long random code that is only known for a moment + // by the sender and the service, so it is NOT a security risk. + if (type == "pub" && subject.startsWith("_INBOX.")) { + return true; + } + // custom inbox only for this user -- important for security, so we + // can only listen to messages for us, and not for anybody else. + if (type == "sub" && subject.startsWith(inboxPrefix(user))) { + return true; + } + + if (type == "sub" && subject.startsWith("public.")) { + return true; + } + + // no decision yet + return null; +} + +function isProjectAllowed({ + project_id, + subject, +}: { + project_id: string; + subject: string; + type: "sub" | "pub"; +}): boolean { + // pub and sub are the same + + if (subject.startsWith(`project.${project_id}.`)) { + return true; + } + // *.project-${project_id}.> + if (subject.split(".")[1] == `project-${project_id}`) { + return true; + } + + return false; +} + +async function isAccountAllowed({ + account_id, + subject, +}: { + account_id: string; + subject: string; + type: "sub" | "pub"; +}): Promise { + // pub and sub are the same + if (subject.startsWith(`account.${account_id}.`)) { + return true; + } + // *.account-${account_id}.> + if (subject.split(".")[1] == `account-${account_id}`) { + return true; + } + + // account accessing a project + const project_id = extractProjectSubject(subject); + if (!project_id) { + return false; + } + return await isCollaborator({ account_id, project_id }); +} + +function extractProjectSubject(subject: string): string { + if (subject.startsWith("project.")) { + const project_id = subject.split(".")[1]; + if (isValidUUID(project_id)) { + return project_id; + } + return ""; + } + const v = subject.split("."); + if (v[1]?.startsWith("project-")) { + const project_id = v[1].slice("project-".length); + if (isValidUUID(project_id)) { + return project_id; + } + } + return ""; +} + + +// A CoCalc User is (so far): a project or account or a hub (not covered here). +export type CoCalcUser = + | { + account_id: string; + project_id?: string; + } + | { + account_id?: string; + project_id: string; + }; + +export function getCoCalcUserType({ + account_id, + project_id, +}: CoCalcUser): "account" | "project" { + if (account_id) { + if (project_id) { + throw Error("exactly one of account_id or project_id must be specified"); + } + return "account"; + } + if (project_id) { + return "project"; + } + throw Error("account_id or project_id must be specified"); +} + +export function getCoCalcUserId({ account_id, project_id }: CoCalcUser): string { + if (account_id) { + if (project_id) { + throw Error("exactly one of account_id or project_id must be specified"); + } + return account_id; + } + if (project_id) { + return project_id; + } + throw Error("account_id or project_id must be specified"); +} + diff --git a/src/packages/server/conat/socketio/index.ts b/src/packages/server/conat/socketio/index.ts new file mode 100644 index 0000000000..b920766d93 --- /dev/null +++ b/src/packages/server/conat/socketio/index.ts @@ -0,0 +1 @@ +export { init as initConatServer } from "./server"; diff --git a/src/packages/server/conat/socketio/server.ts b/src/packages/server/conat/socketio/server.ts new file mode 100644 index 0000000000..0638c3f918 --- /dev/null +++ b/src/packages/server/conat/socketio/server.ts @@ -0,0 +1,42 @@ +/* +To start this standalone + + s = await require('@cocalc/server/conat/socketio').initConatServer() + +It will also get run integrated with the hub if the --conat-server option is passed in. + +Using valkey + + s1 = await require('@cocalc/server/conat/socketio').initConatServer({port:3000, valkey:'redis://127.0.0.1:6379'}) + +and in another session: + + s2 = await require('@cocalc/server/conat/socketio').initConatServer({port:3001, valkey:'redis://127.0.0.1:6379'}) + +Then make a client connected to each: + + c1 = require('@cocalc/conat/core/client').connect('http://localhost:3000'); + c2 = require('@cocalc/conat/core/client').connect('http://localhost:3001'); +*/ + +import { + init as createConatServer, + type Options, +} from "@cocalc/conat/core/server"; +import { Server } from "socket.io"; +import { getLogger } from "@cocalc/backend/logger"; +import { getUser, isAllowed } from "./auth"; + +const logger = getLogger("conat-server"); + +export async function init(options: Partial = {}) { + logger.debug("init"); + + return createConatServer({ + logger: logger.debug, + Server, + getUser, + isAllowed, + ...options, + }); +} diff --git a/src/packages/server/conat/socketio/start-server.ts b/src/packages/server/conat/socketio/start-server.ts new file mode 100644 index 0000000000..94d98aa281 --- /dev/null +++ b/src/packages/server/conat/socketio/start-server.ts @@ -0,0 +1,22 @@ +/* + +To start this: + + pnpm conat-server + +Environment variables: + +- CONAT_PORT - port to listen on +*/ + +import { init as createConatServer } from "@cocalc/conat/core/server"; +import { Server } from "socket.io"; + +const DEFAULT_PORT = 3000; + +const port = parseInt(process.env.CONAT_PORT ?? `${DEFAULT_PORT}`); + +console.log("* CONATS *"); +console.log(`http://localhost:${port}`); + +createConatServer({ port, Server, logger: console.log }); diff --git a/src/packages/server/jest.config.js b/src/packages/server/jest.config.js index 3ea89e18a9..763f4887ba 100644 --- a/src/packages/server/jest.config.js +++ b/src/packages/server/jest.config.js @@ -1,7 +1,7 @@ /** @type {import('ts-jest').JestConfigWithTsJest} */ module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', - setupFiles: ['./test/setup.js'], // Path to your setup file - testMatch: ['**/?(*.)+(spec|test).ts?(x)'], + preset: "ts-jest", + testEnvironment: "node", + setupFiles: ["./test/setup.js"], // Path to your setup file + testMatch: ["**/?(*.)+(spec|test).ts?(x)"], }; diff --git a/src/packages/server/nats/api/index.ts b/src/packages/server/nats/api/index.ts deleted file mode 100644 index 6ae0921e9f..0000000000 --- a/src/packages/server/nats/api/index.ts +++ /dev/null @@ -1,230 +0,0 @@ -/* -This is meant to be similar to the nexts pages http api/v2, but using NATS instead of HTTPS. - -To do development: - -1. Turn off nats-server handling for the hub by sending this message from a browser as an admin: - - await cc.client.nats_client.hub.system.terminate({service:'api'}) - -NOTE: there's no way to turn the auth back on in the hub, so you'll have to restart -your dev hub after doing the above. - -2. Run this script at the terminal: - - echo "require('@cocalc/server/nats').default()" | COCALC_MODE='single-user' DEBUG_CONSOLE=yes DEBUG=cocalc:* node - - -3. Optional: start more servers -- requests get randomly routed to exactly one of them: - - echo "require('@cocalc/server/nats').default()" | COCALC_MODE='single-user' DEBUG_CONSOLE=yes DEBUG=cocalc:* node - echo "require('@cocalc/server/nats').default()" | COCALC_MODE='single-user' DEBUG_CONSOLE=yes DEBUG=cocalc:* node - - -To make use of this from a browser: - - await cc.client.nats_client.hub.system.getCustomize(['siteName']) - -or - - await cc.client.nats_client.callHub({name:"system.getCustomize", args:[['siteName']]}) - -When you make changes, just restart the above. All clients will instantly -use the new version after you restart, and there is no need to restart the hub -itself or any clients. - -To view all requests (and replies) in realtime: - - nats sub 'hub.*.*.api' --match-replies - -And remember to use the nats command, do "pnpm nats-cli" from cocalc/src. -*/ - -import { JSONCodec } from "nats"; -import getLogger from "@cocalc/backend/logger"; -import { type HubApi, getUserId, transformArgs } from "@cocalc/nats/hub-api"; -import { getConnection } from "@cocalc/backend/nats"; -import userIsInGroup from "@cocalc/server/accounts/is-in-group"; -import { terminate as terminateDatabase } from "@cocalc/database/nats/changefeeds"; -import { terminate as terminateChangefeedServer } from "@cocalc/nats/changefeed/server"; -import { Svcm } from "@nats-io/services"; -import { terminate as terminateAuth } from "@cocalc/server/nats/auth"; -import { terminate as terminateTieredStorage } from "@cocalc/server/nats/tiered-storage/api"; -import { respondMany } from "@cocalc/nats/service/many"; -import { delay } from "awaiting"; -import { waitUntilConnected } from "@cocalc/nats/util"; - -const MONITOR_INTERVAL = 30000; - -const logger = getLogger("server:nats:api"); - -const jc = JSONCodec(); - -export function initAPI() { - mainLoop(); -} - -let terminate = false; -async function mainLoop() { - let d = 3000; - let lastStart = 0; - while (!terminate) { - try { - lastStart = Date.now(); - await serve(); - } catch (err) { - logger.debug(`hub nats api service error -- ${err}`); - if (Date.now() - lastStart >= 30000) { - // it ran for a while, so no delay - logger.debug(`will restart immediately`); - d = 3000; - } else { - // it crashed quickly, so delay! - d = Math.min(20000, d * 1.25 + Math.random()); - logger.debug(`will restart in ${d}ms`); - await delay(d); - } - } - } -} - -async function serviceMonitor({ nc, api, subject }) { - while (!terminate) { - logger.debug(`serviceMonitor: waiting ${MONITOR_INTERVAL}ms`); - await delay(MONITOR_INTERVAL); - try { - await waitUntilConnected(); - await nc.request(subject, jc.encode({ name: "ping" }), { - timeout: 7500, - }); - logger.debug("serviceMonitor: ping succeeded"); - } catch (err) { - logger.debug( - `serviceMonitor: ping failed, so restarting service -- ${err}`, - ); - api.stop(); - return; - } - } -} - -async function serve() { - const subject = "hub.*.*.api"; - logger.debug(`initAPI -- subject='${subject}', options=`, { - queue: "0", - }); - const nc = await getConnection(); - // @ts-ignore - const svcm = new Svcm(nc); - - await waitUntilConnected(); - const service = await svcm.add({ - name: "hub-server", - version: "0.1.0", - description: "CoCalc Hub Server", - }); - - const api = service.addEndpoint("api", { subject }); - serviceMonitor({ api, subject, nc }); - await listen({ api, subject }); -} - -async function listen({ api, subject }) { - for await (const mesg of api) { - const request = jc.decode(mesg.data) ?? ({} as any); - if (request.name == "system.terminate") { - // special hook so admin can terminate handling. This is useful for development. - const { account_id } = getUserId(mesg.subject); - if (!(!!account_id && (await userIsInGroup(account_id, "admin")))) { - mesg.respond(jc.encode({ error: "only admin can terminate" })); - continue; - } - // TODO: could be part of handleApiRequest below, but done differently because - // one case halts this loop - const { service } = request.args[0] ?? {}; - logger.debug(`Terminate service '${service}'`); - if (service == "db") { - terminateDatabase(); - mesg.respond(jc.encode({ status: "terminated", service })); - continue; - } else if (service == "auth") { - terminateAuth(); - mesg.respond(jc.encode({ status: "terminated", service })); - continue; - } else if (service == "tiered-storage") { - terminateTieredStorage(); - mesg.respond(jc.encode({ status: "terminated", service })); - continue; - } else if (service == "changefeeds") { - terminateChangefeedServer(); - mesg.respond(jc.encode({ status: "terminated", service })); - continue; - } else if (service == "api") { - // special hook so admin can terminate handling. This is useful for development. - console.warn("TERMINATING listening on ", subject); - logger.debug("TERMINATING listening on ", subject); - terminate = true; - mesg.respond(jc.encode({ status: "terminated", service })); - api.stop(); - return; - } else { - mesg.respond(jc.encode({ error: `Unknown service ${service}` })); - } - } else { - // we explicitly do NOT await this, since we want this hub server to handle - // potentially many messages at once, not one at a time! - handleApiRequest({ request, mesg }); - } - } -} - -async function handleApiRequest({ request, mesg }) { - let resp; - try { - const { account_id, project_id } = getUserId(mesg.subject); - const { name, args } = request as any; - logger.debug("handling hub.api request:", { - account_id, - project_id, - name, - }); - resp = (await getResponse({ name, args, account_id, project_id })) ?? null; - } catch (err) { - resp = { error: `${err}` }; - } - try { - await respondMany({ mesg, data: jc.encode(resp) }); - } catch (err) { - // there's nothing we can do here, e.g., maybe NATS just died. - logger.debug( - `WARNING: error responding to hub.api request (client will receive no response) -- ${err}`, - ); - } -} - -import * as purchases from "./purchases"; -import * as db from "./db"; -import * as system from "./system"; -import * as projects from "./projects"; - -export const hubApi: HubApi = { - system, - projects, - db, - purchases, -}; - -async function getResponse({ name, args, account_id, project_id }) { - const [group, functionName] = name.split("."); - const f = hubApi[group]?.[functionName]; - if (f == null) { - throw Error(`unknown function '${name}'`); - } - const args2 = await transformArgs({ - name, - args, - account_id, - project_id, - }); - return await f(...args2); -} diff --git a/src/packages/server/nats/auth/index.ts b/src/packages/server/nats/auth/index.ts deleted file mode 100644 index 4c4887a5e9..0000000000 --- a/src/packages/server/nats/auth/index.ts +++ /dev/null @@ -1,274 +0,0 @@ -/* -Implementation of Auth Callout for NATS - -DEPLOYMENT: - -Run as many of these as you want -- the load gets randomly spread across all of them. -They just need access to the database. - -There is some nontrivial compute associated with handling each auth, due to: - - - 1000 rounds of sha512 for the remember_me cookie takes time - - encoding/encrypting/decoding/decrypting JWT stuff with NATS takes maybe 50ms of CPU. - -The main "weird" thing about this is that when a connection is being authenticated, -we have to decide on its *exact* permissions once-and-for all at that point in time. -This means browser clients have to reconnect if they want to communicate with a project -they didn't explicit authenticate to. - -AUTH CALLOUT - -At least one of these cocalc servers (which relies on the database) *must* -be available to handle every user connection, unlike with decentralized PKI auth. -It also makes banning users a bit more complicated. - -DOCS: - -- https://docs.nats.io/running-a-nats-service/configuration/securing_nats/auth_callout - -- https://github.com/nats-io/nats-architecture-and-design/blob/main/adr/ADR-26.md - -- https://natsbyexample.com/examples/auth/callout/cli - -- https://www.youtube.com/watch?v=VvGxrT-jv64 - - -DEVELOPMENT - -1. From the browser, turn off the nats auth that is being served by your development hub -by sending this message from a browser as an admin: - - await cc.client.nats_client.hub.system.terminate({service:'auth'}) - -2. Run this code in nodejs: - - x = await require('@cocalc/server/nats/auth').init() - - -NOTE: there's no way to turn the auth back on in the hub, so you'll have to restart -your dev hub after doing the above. - - -WHY NOT DECENTRALIZED AUTH? - -I wish I knew the following earlier, as it would have saved me at least a -week of work... - -We *fully* implemented decentralized auth first using JWT's, but it DOES NOT -SCALE! The problem is that we need potentially dozens of pub/sub rules for each -user, so that's too much information to put in a client JWT cookie, so we -*must* use signing keys. Thus the permissions information for every user goes -into one massive account key blob, and a tiny signed JWT goes to each browser. -This is very nice because permissions can be dynamically updated at any time, -and everybody's permissions are known to NATS without cocalc's database having -to be consulted at all (that said, with multiple nats servers, I am worries the -permissions update would take too long). SADLY, this doesn't scale! -Every time we make a change, the account key has to be updated, and only -a few hundred (or thousand) -users are enough to make the key too big to fit in a message. -Also, each update would take at least a second. Now imagine 150 students in -a class all signing in at once, and it taking over 150 seconds just to -process auth, and you can see this is a nonstarter. -Decentralized auth could work if each cocalc user had a different -account, but... that doesn't work either, since import/export doesn't -really work for jetstream... and setting up all the -import/export would be a nightmare, and probaby much more complicated. - -NOTE: There is one approach to decentralized auth that doesn't obviously fail, -but it would require a separate websocket connection for each project and maybe -some mangling of auth cookies in the proxy server. That said, it still relies -on using command line nsc with pull/push, which feels very slow and brittle. -Using a separate connection for each project is also just really bad practice. -*/ - -import { Svcm } from "@nats-io/services"; -import { getConnection } from "@cocalc/backend/nats"; -import type { NatsConnection } from "@nats-io/nats-core"; -import { - natsAuthCalloutNSeed, - natsAuthCalloutXSeed, -} from "@cocalc/backend/data"; -import { fromPublic, fromSeed } from "@nats-io/nkeys"; -import { - decode as decodeJwt, - encodeAuthorizationResponse, - encodeUser, -} from "@nats-io/jwt"; -import getLogger from "@cocalc/backend/logger"; -import { getUserPermissions } from "./permissions"; -import { validate } from "./validate"; -import adminAlert from "@cocalc/server/messages/admin-alert"; - - -// we put a per-connection limit on subscription to hopefully avoid -// some potential DOS situations. For reference each open file -// takes up to 15 subs (3 for a txt file, ~15 for a jupyter notebook). -// WARNING: I do not think this does anything at all: -const MAX_SUBSCRIPTIONS = 1500; -//const MAX_SUBSCRIPTIONS = 50; - -// some high but nontrivial limit on MB per second for each client -// WARNING: I do not think this does anything at all: -const MAX_BYTES_SECOND = 100 * 1000000; -//const MAX_BYTES_SECOND = 1000000; - -// ADMIN -- use `pnpm nats-cli-sys` then `nats server report connections` -// to see the number of connections by each user. - -const logger = getLogger("server:nats:auth-callout"); - -let api: any | null = null; -export async function init() { - logger.debug("init"); - // coerce to NatsConnection is to workaround a bug in the - // typescript libraries for nats, which might disappear at some point. - const nc = (await getConnection()) as NatsConnection; - const svcm = new Svcm(nc); - - const service = await svcm.add({ - name: "auth", - version: "0.0.1", - description: "CoCalc auth callout service", - // all auth callout handlers randomly take turns authenticating users - queue: "q", - }); - const g = service.addGroup("$SYS").addGroup("REQ").addGroup("USER"); - api = g.addEndpoint("AUTH"); - const encoder = new TextEncoder(); - - const xkp = fromSeed(encoder.encode(natsAuthCalloutXSeed)); - listen(api, xkp); - - return { - service, - nc, - close: () => { - api.stop(); - }, - }; -} - -export function terminate() { - api?.stop(); -} - -//const SESSION_EXPIRE_MS = 1000 * 60 * 60 * 12; - -async function listen(api, xkp) { - logger.debug("listening..."); - try { - for await (const mesg of api) { - // do NOT await this - handleRequest(mesg, xkp); - } - } catch (err) { - logger.debug("WARNING: Problem with auth callout", err); - // restart? I don't know why this would ever fail assuming - // our code isn't buggy, hence alert if this ever happens: - adminAlert({ - subject: "NATS auth-callout service crashed", - body: `A nats auth callout service crashed with the following error:\n\n${err}\n\nWilliam thinks this is impossible and will never happen, so investigate. This problem could cause all connections to cocalc to fail, and would be fixable by restarting certain hubs.`, - }); - } -} - -async function handleRequest(mesg, xkp) { - const t0 = Date.now(); - try { - const requestJwt = getRequestJwt(mesg, xkp); - const requestClaim = decodeJwt(requestJwt) as any; - logger.debug("handleRequest", requestClaim.nats.connect_opts.name); - const userNkey = requestClaim.nats.user_nkey; - const serverId = requestClaim.nats.server_id; - const { pub, sub } = await getPermissions(requestClaim.nats.connect_opts); - const user = fromPublic(userNkey); - const server = fromPublic(serverId.name); - const encoder = new TextEncoder(); - const issuer = fromSeed(encoder.encode(natsAuthCalloutNSeed)); - const userName = requestClaim.nats.connect_opts.user; - const opts = { aud: "COCALC" }; - const jwt = await encodeUser( - userName, - user, - issuer, - { - pub, - sub, - locale: Intl.DateTimeFormat().resolvedOptions().timeZone, - // I don't think the data and subs limits actually do anything at all. - // bytes per second - data: MAX_BYTES_SECOND, - // total number of simultaneous subscriptions - subs: MAX_SUBSCRIPTIONS, - }, - opts, - ); - const data = { jwt }; - const authResponse = await encodeAuthorizationResponse( - user, - server, - issuer, - data, - opts, - ); - const xkey = mesg.headers.get("Nats-Server-Xkey"); - let signedResponse; - if (xkey) { - signedResponse = xkp.seal(encoder.encode(authResponse), xkey); - } else { - signedResponse = encoder.encode(authResponse); - } - - mesg.respond(signedResponse); - } catch (err) { - // TODO: send fail response (?) - logger.debug(`Warning - ${err}`); - } finally { - logger.debug( - `time to handle one auth request completely: ${Date.now() - t0}ms`, - ); - } -} - -function getRequestJwt(mesg, xkp): string { - const xkey = mesg.headers.get("Nats-Server-Xkey"); - let data; - if (xkey) { - // encrypted - // we have natsAuthCalloutXSeedPath above. So have enough info to decrypt. - data = xkp.open(mesg.data, xkey); - } else { - // not encrypted - data = mesg.data; - } - const decoder = new TextDecoder("utf-8"); - return decoder.decode(data); -} - -async function getPermissions({ - auth_token, - name, -}: { - // auth token: - // - remember me - // - api key - // - project secret - auth_token?: string; - name?: string; -}) { - if (!name) { - throw Error("name must be specified"); - } - const { - account_id, - project_id, - project_ids: requested_project_ids, - } = JSON.parse(name) ?? {}; - const { project_ids } = await validate({ - account_id, - project_id, - auth_token, - requested_project_ids, - }); - return getUserPermissions({ account_id, project_id, project_ids }); -} diff --git a/src/packages/server/nats/auth/permissions.ts b/src/packages/server/nats/auth/permissions.ts deleted file mode 100644 index c37669611a..0000000000 --- a/src/packages/server/nats/auth/permissions.ts +++ /dev/null @@ -1,184 +0,0 @@ -import { inboxPrefix } from "@cocalc/nats/names"; -import getLogger from "@cocalc/backend/logger"; -import { isValidUUID } from "@cocalc/util/misc"; - -const logger = getLogger("server:nats:auth:permissions"); - -export function getUserPermissions({ - project_id, - account_id, - project_ids, -}: CoCalcUser & { project_ids?: string[] }) { - logger.debug("getUserPermissions", { - account_id, - project_id, - project_ids, - }); - if (project_id) { - if (!isValidUUID(project_id)) { - throw Error(`invalid project_id ${project_id}`); - } - // project_ids are ignored in this case - return projectPermissions(project_id); - } else if (account_id) { - if (!isValidUUID(account_id)) { - throw Error(`invalid account_id ${account_id}`); - } - const { pub, sub } = accountPermissions(account_id); - if (project_ids) { - for (const project_id of project_ids) { - if (!isValidUUID(project_id)) { - throw Error(`invalid project_id ${project_id}`); - } - const x = projectPermissions(project_id); - pub.allow.push(...x.pub.allow); - sub.allow.push(...x.sub.allow); - pub.deny.push(...x.pub.deny); - sub.deny.push(...x.sub.deny); - } - } - // uniq because there is a little overlap - return { - pub: { allow: uniq(pub.allow), deny: uniq(pub.deny) }, - sub: { allow: uniq(sub.allow), deny: uniq(sub.deny) }, - }; - } else { - throw Error("account_id or project_id must be specified"); - } -} - -function uniq(v: string[]): string[] { - return Array.from(new Set(v)); -} - -function commonPermissions(cocalcUser) { - const pub = { allow: [] as string[], deny: [] as string[] }; - const sub = { allow: [] as string[], deny: [] as string[] }; - const userId = getCoCalcUserId(cocalcUser); - if (!isValidUUID(userId)) { - throw Error("must be a valid uuid"); - } - const userType = getCoCalcUserType(cocalcUser); - - // can talk as *only this user* to the hub's api's - pub.allow.push(`hub.${userType}.${userId}.>`); - // everyone can publish to all inboxes. This seems like a major - // security risk, but with request/reply, the reply subject under - // _INBOX is a long random code that is only known for a moment - // by the sender and the service, so I think it is NOT a security risk. - pub.allow.push("_INBOX.>"); - - // custom inbox only for this user -- critical for security, so we - // can only listen to messages for us, and not for anybody else. - sub.allow.push(inboxPrefix(cocalcUser) + ".>"); - // access to READ the public system info kv store. - sub.allow.push("public.>"); - - // get info about jetstreams - pub.allow.push("$JS.API.INFO"); - // the public jetstream: this makes it available *read only* to all accounts and projects. - pub.allow.push("$JS.API.*.*.public"); - pub.allow.push("$JS.API.*.*.public.>"); - pub.allow.push("$JS.API.CONSUMER.MSG.NEXT.public.>"); - - // everyone can ack messages -- this publish to something like this - // $JS.ACK.account-6aae57c6-08f1-4bb5-848b-3ceb53e61ede.lZiQnTzW.11.1.98.1743611921171669063.0 - // which contains a random string, so there is no danger letting anyone publish to this. - pub.allow.push("$JS.ACK.>"); - - // microservices info api -- **TODO: security concerns!?** - // Please don't tell me I have to name all microservice identically :-( - sub.allow.push("$SRV.>"); - pub.allow.push("$SRV.>"); - - // so client can find out what they can pub/sub to... - pub.allow.push("$SYS.REQ.USER.INFO"); - return { pub, sub }; -} - -function projectPermissions(project_id: string) { - const { pub, sub } = commonPermissions({ project_id }); - - pub.allow.push(`project.${project_id}.>`); - sub.allow.push(`project.${project_id}.>`); - - pub.allow.push(`*.project-${project_id}.>`); - sub.allow.push(`*.project-${project_id}.>`); - - // The unique project-wide kv store: - pub.allow.push(`$JS.*.*.*.KV_project-${project_id}`); - pub.allow.push(`$JS.*.*.*.KV_project-${project_id}.>`); - - // this FC is needed for "flow control" - without this, you get random hangs forever at scale! - pub.allow.push(`$JS.FC.KV_project-${project_id}.>`); - - // The unique project-wide stream: - pub.allow.push(`$JS.*.*.*.project-${project_id}`); - pub.allow.push(`$JS.*.*.*.project-${project_id}.>`); - pub.allow.push(`$JS.*.*.*.*.project-${project_id}.>`); - return { pub, sub }; -} - -function accountPermissions(account_id: string) { - const { pub, sub } = commonPermissions({ account_id }); - sub.allow.push(`*.account-${account_id}.>`); - pub.allow.push(`*.account-${account_id}.>`); - - // the account-specific kv store: - pub.allow.push(`$JS.*.*.*.KV_account-${account_id}`); - pub.allow.push(`$JS.*.*.*.KV_account-${account_id}.>`); - - // the account-specific stream: - // (not used yet at all!) - pub.allow.push(`$JS.*.*.*.account-${account_id}`); - pub.allow.push(`$JS.*.*.*.account-${account_id}.>`); - pub.allow.push(`$JS.*.*.*.*.account-${account_id}`); - pub.allow.push(`$JS.*.*.*.*.account-${account_id}.>`); - - sub.allow.push(`account.${account_id}.>`); - pub.allow.push(`account.${account_id}.>`); - - // this FC is needed for "flow control" - without this, you get random hangs forever at scale! - pub.allow.push(`$JS.FC.KV_account-${account_id}.>`); - return { pub, sub }; -} - -// A CoCalc User is (so far): a project or account or a hub (not covered here). -type CoCalcUser = - | { - account_id: string; - project_id?: string; - } - | { - account_id?: string; - project_id: string; - }; - -function getCoCalcUserType({ - account_id, - project_id, -}: CoCalcUser): "account" | "project" { - if (account_id) { - if (project_id) { - throw Error("exactly one of account_id or project_id must be specified"); - } - return "account"; - } - if (project_id) { - return "project"; - } - throw Error("account_id or project_id must be specified"); -} - -function getCoCalcUserId({ account_id, project_id }: CoCalcUser): string { - if (account_id) { - if (project_id) { - throw Error("exactly one of account_id or project_id must be specified"); - } - return account_id; - } - if (project_id) { - return project_id; - } - throw Error("account_id or project_id must be specified"); -} diff --git a/src/packages/server/nats/auth/stress.ts b/src/packages/server/nats/auth/stress.ts deleted file mode 100644 index bf1222db2e..0000000000 --- a/src/packages/server/nats/auth/stress.ts +++ /dev/null @@ -1,15 +0,0 @@ -/* -Tools for stress testing nats so we understand it better. - -NOTHING USEFUL HERE NOW -*/ - -export function intToUuid(n) { - const base8 = n.toString(8); - const padded = base8.padStart(32, "0"); - return `${padded.slice(0, 8)}-${padded.slice(8, 12)}-${padded.slice(12, 16)}-${padded.slice(16, 20)}-${padded.slice(20, 32)}`; -} - -export function progress({ n, stop }) { - console.log(`${n}/${stop}`); -} diff --git a/src/packages/server/nats/auth/validate.ts b/src/packages/server/nats/auth/validate.ts deleted file mode 100644 index c1eeeaae62..0000000000 --- a/src/packages/server/nats/auth/validate.ts +++ /dev/null @@ -1,88 +0,0 @@ -import getPool from "@cocalc/database/pool"; -import { getAccountWithApiKey } from "@cocalc/server/api/manage"; -import { subsetCollaboratorMulti } from "@cocalc/server/projects/is-collaborator"; -import { getAccountIdFromRememberMe } from "@cocalc/server/auth/get-account"; -import { getRememberMeHashFromCookieValue } from "@cocalc/server/auth/remember-me"; - -// if throw error or not return true, then validation fails. -// success = NOT throwing error and returning true. - -export async function validate({ - account_id, - project_id, - requested_project_ids, - auth_token, -}: { - account_id?: string; - project_id?: string; - requested_project_ids?: string[]; - auth_token?: string; -}): Promise<{ project_ids?: string[] }> { - if (account_id && project_id) { - throw Error("exactly one of account_id and project_id must be specified"); - } - if (!auth_token) { - throw Error("auth_token must be specified"); - } - - // are they who they say they are? - await assertValidUser({ account_id, project_id, auth_token }); - - // we now know that auth_token provides they are either project_id or account_id. - // what about requested_project_ids? - if ( - !requested_project_ids || - requested_project_ids.length == 0 || - project_id - ) { - // none requested or is a project - return {}; - } - - if (!account_id) { - throw Error("bug"); - } - const project_ids = await subsetCollaboratorMulti({ - account_id, - project_ids: requested_project_ids, - }); - return { project_ids }; -} - -async function assertValidUser({ auth_token, project_id, account_id }) { - if (auth_token?.startsWith("sk-") || auth_token?.startsWith("sk_")) { - // auth_token is presumably an api key - const a = await getAccountWithApiKey(auth_token); - if (project_id && a?.project_id == project_id) { - return; - } else if (account_id && a?.account_id == account_id) { - return; - } - throw Error( - `auth_token valid for ${JSON.stringify(a)} by does not match ${project_id} or ${account_id}`, - ); - } - if (project_id) { - if ((await getProjectSecretToken(project_id)) == auth_token) { - return; - } - } - if (account_id) { - // maybe auth_token is a valid remember me browser cookie? - const hash = getRememberMeHashFromCookieValue(auth_token); - if (hash && account_id == (await getAccountIdFromRememberMe(hash))) { - return; - } - } - // nothing above matches, so FAIL! - throw Error("invalid auth_token"); -} - -async function getProjectSecretToken(project_id): Promise { - const pool = getPool(); - const { rows } = await pool.query( - "select status#>'{secret_token}' as secret_token from projects where project_id=$1", - [project_id], - ); - return rows[0]?.secret_token; -} diff --git a/src/packages/server/nats/configuration.ts b/src/packages/server/nats/configuration.ts deleted file mode 100644 index da067389ac..0000000000 --- a/src/packages/server/nats/configuration.ts +++ /dev/null @@ -1,50 +0,0 @@ -/* -Load NATS configuration from the database, in case anything is set there. -*/ - -import getPool from "@cocalc/database/pool"; -import { - setNatsPassword, - setNatsServer, - setNatsPort, - setNatsWebsocketPort, - setNatsAuthCalloutNSeed, - setNatsAuthCalloutXSeed, -} from "@cocalc/backend/data"; - -export async function loadNatsConfiguration() { - const pool = getPool(); - const { rows } = await pool.query( - "SELECT name, value FROM server_settings WHERE name=ANY($1)", - [ - [ - "nats_password", - "nats_auth_nseed", - "nats_auth_xseed", - "nats_port", - "nats_ws_port", - "nats_server", - ], - ], - ); - for (const { name, value } of rows) { - if (!value) { - continue; - } - if (name == "nats_password") { - setNatsPassword(value.trim()); - } else if (name == "nats_auth_nseed") { - setNatsAuthCalloutNSeed(value.trim()); - } else if (name == "nats_auth_xseed") { - setNatsAuthCalloutXSeed(value.trim()); - } else if (name == "nats_server") { - setNatsServer(value.trim()); - } else if (name == "nats_port") { - setNatsPort(value.trim()); - } else if (name == "nats_ws_port") { - setNatsWebsocketPort(value.trim()); - } else { - throw Error("bug"); - } - } -} diff --git a/src/packages/server/nats/index.ts b/src/packages/server/nats/index.ts deleted file mode 100644 index ca3ea7af4c..0000000000 --- a/src/packages/server/nats/index.ts +++ /dev/null @@ -1,39 +0,0 @@ -import getLogger from "@cocalc/backend/logger"; -import { initAPI } from "./api"; -import { init as initDatabase } from "@cocalc/database/nats/changefeeds"; -import { init as initChangefeedServer } from "@cocalc/database/nats/changefeed-api"; -import { init as initLLM } from "./llm"; -import { init as initAuth } from "./auth"; -import { init as initTieredStorage } from "./tiered-storage/api"; -import { loadNatsConfiguration } from "./configuration"; -import { createTimeService } from "@cocalc/nats/service/time"; - -export { loadNatsConfiguration }; - -const logger = getLogger("server:nats"); - -export async function initNatsDatabaseServer() { - await loadNatsConfiguration(); - // do NOT await initDatabase - initDatabase(); -} - -export async function initNatsChangefeedServer() { - await loadNatsConfiguration(); - // do NOT await initDatabase - initChangefeedServer(); -} - -export async function initNatsTieredStorage() { - await loadNatsConfiguration(); - initTieredStorage(); -} - -export async function initNatsServer() { - logger.debug("initializing nats cocalc hub server"); - await loadNatsConfiguration(); - initAPI(); - await initAuth(); - await initLLM(); - createTimeService(); -} diff --git a/src/packages/server/nats/system.ts b/src/packages/server/nats/system.ts deleted file mode 100644 index d23fdcb70f..0000000000 --- a/src/packages/server/nats/system.ts +++ /dev/null @@ -1,22 +0,0 @@ -/* -This seems like it will be really useful... but we're not -using it yet. -*/ - -import { SystemKv } from "@cocalc/nats/system"; -import { JSONCodec } from "nats"; -import { getConnection } from "@cocalc/backend/nats"; -import { sha1 } from "@cocalc/backend/misc_node"; -import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; - -let cache: SystemKv | null = null; -export const systemKv = reuseInFlight(async () => { - if (cache != null) { - return cache; - } - const jc = JSONCodec(); - const nc = await getConnection(); - cache = new SystemKv({ jc, nc, sha1 }); - await cache.init(); - return cache; -}); diff --git a/src/packages/server/nats/tiered-storage/api.ts b/src/packages/server/nats/tiered-storage/api.ts deleted file mode 100644 index 8b4b93f25b..0000000000 --- a/src/packages/server/nats/tiered-storage/api.ts +++ /dev/null @@ -1,128 +0,0 @@ -/* -Our tiered storage code as a server on the nats network. - -DEVELOPMENT: - -If you're running a dev server, turn off the tiered storage service running in it by sending this message from a browser as an admin: - - await cc.client.nats_client.hub.system.terminate({service:'tiered-storage'}) - - -To start this service: - -> a = require('@cocalc/server/nats/tiered-storage'); a.init() - -or - - echo "require('@cocalc/server/nats/tiered-storage').init()" | node - - -To *USE* this service in another terminal: - -> require('@cocalc/backend/nats'); c = require('@cocalc/nats/tiered-storage/client') -{ - state: [AsyncFunction: state], - restore: [AsyncFunction: restore], - archive: [AsyncFunction: archive], - backup: [AsyncFunction: backup], - info: [AsyncFunction: info] -} -> await c.info({project_id:'27cf0030-a9c8-4168-bc03-d0efb3d2269e'}) -{ - subject: 'tiered-storage.project-27cf0030-a9c8-4168-bc03-d0efb3d2269e.api' -} -*/ - -import { - type TieredStorage as TieredStorageInterface, - type Info, - init as initServer, - terminate, -} from "@cocalc/nats/tiered-storage/server"; -import { type Location } from "@cocalc/nats/types"; -import { type LocationType } from "./types"; -import { backupProject, backupAccount } from "./backup"; -import { restoreProject, restoreAccount } from "./restore"; -import { archiveProject, archiveAccount } from "./archive"; -import { getProjectInfo, getAccountInfo } from "./info"; -import { isValidUUID } from "@cocalc/util/misc"; -import "@cocalc/backend/nats"; -import getLogger from "@cocalc/backend/logger"; - -const logger = getLogger("tiered-storage:api"); - -export { terminate }; - -export async function init() { - logger.debug("init"); - const ts = new TieredStorage(); - initServer(ts); -} - -function getType({ account_id, project_id }: Location): LocationType { - if (account_id) { - if (project_id) { - throw Error( - "exactly one of account_id or project_id may be specified but both are", - ); - } - if (!isValidUUID(account_id)) { - throw Error("account_id must be a valid uuid"); - } - return "account"; - } else if (project_id) { - if (!isValidUUID(project_id)) { - throw Error("project_id must be a valid uuid"); - } - return "project"; - } else { - throw Error( - "exactly one of account_id or project_id may be specified but neither are", - ); - } -} - -class TieredStorage implements TieredStorageInterface { - info = async (location: Location): Promise => { - const type = getType(location); - if (type == "account") { - return await getAccountInfo(location as { account_id: string }); - } else if (type == "project") { - return await getProjectInfo(location as { project_id: string }); - } - throw Error("invalid type"); - }; - - restore = async (location: Location): Promise => { - const type = getType(location); - if (type == "account") { - return await restoreAccount(location as { account_id: string }); - } else if (type == "project") { - return await restoreProject(location as { project_id: string }); - } - throw Error("invalid type"); - }; - - archive = async (location: Location): Promise => { - const type = getType(location); - if (type == "account") { - return await archiveAccount(location as { account_id: string }); - } else if (type == "project") { - return await archiveProject(location as { project_id: string }); - } - throw Error("invalid type"); - }; - - backup = async (location: Location): Promise => { - const type = getType(location); - if (type == "account") { - return await backupAccount(location as { account_id: string }); - } else if (type == "project") { - return await backupProject(location as { project_id: string }); - } - throw Error("invalid type"); - }; - - // shut this server down (no-op right now) - close = async (): Promise => {}; -} diff --git a/src/packages/server/nats/tiered-storage/archive.ts b/src/packages/server/nats/tiered-storage/archive.ts deleted file mode 100644 index 9c9caca019..0000000000 --- a/src/packages/server/nats/tiered-storage/archive.ts +++ /dev/null @@ -1,87 +0,0 @@ -import { executeCode } from "@cocalc/backend/execute-code"; -import { natsCoCalcUserEnv } from "@cocalc/backend/nats/cli"; -import { backupStream, backupKV, backupLocation } from "./backup"; -import { restoreKV } from "./restore"; -import type { LocationType } from "./types"; -import getLogger from "@cocalc/backend/logger"; - -const logger = getLogger("tiered-storage:archive"); - -export async function rmStream(name: string) { - // TODO: probably this should be done via the API - const { exit_code, stderr } = await executeCode({ - command: "nats", - args: ["stream", "rm", "-f", name], - err_on_exit: false, - env: natsCoCalcUserEnv(), - }); - if (exit_code) { - if (stderr.includes("stream not found")) { - return; - } - throw Error(stderr); - } -} - -export async function archiveStream(name: string) { - logger.debug("archive", { name }); - const output = await backupStream(name); - await rmStream(name); - return output; -} - -export async function rmKV(name: string) { - // TODO: probably this should be done via the API - const { exit_code, stderr } = await executeCode({ - command: "nats", - args: ["kv", "del", "-f", name], - err_on_exit: false, - env: natsCoCalcUserEnv(), - }); - if (exit_code) { - if (stderr.includes("bucket not found")) { - return; - } - throw Error(stderr); - } -} - -export async function archiveKV(name: string) { - const output = await backupKV(name); - await rmKV(name); - return output; -} - -export async function archiveLocation({ - user_id, - type, -}: { - user_id: string; - type: LocationType; -}) { - const output = await backupLocation({ user_id, type }); - const name = `${type}-${user_id}`; - await rmKV(name); - try { - await rmStream(name); - } catch (err) { - // try to roll back to valid state: - logger.debug( - `unexpected error archiving -- attempting roll back -- ${err} `, - { - name, - }, - ); - await restoreKV(name); - throw err; - } - return output; -} - -export async function archiveProject({ project_id }: { project_id: string }) { - return await archiveLocation({ user_id: project_id, type: "project" }); -} - -export async function archiveAccount({ account_id }: { account_id: string }) { - return await archiveLocation({ user_id: account_id, type: "account" }); -} diff --git a/src/packages/server/nats/tiered-storage/backup.ts b/src/packages/server/nats/tiered-storage/backup.ts deleted file mode 100644 index 75c5eabd1b..0000000000 --- a/src/packages/server/nats/tiered-storage/backup.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { executeCode } from "@cocalc/backend/execute-code"; -import { natsBackup } from "@cocalc/backend/data"; -import { join } from "path"; -import mkdirp from "mkdirp"; -import { natsCoCalcUserEnv } from "@cocalc/backend/nats/cli"; -import getLogger from "@cocalc/backend/logger"; - -const logger = getLogger("tiered-storage:backup"); - -export async function backupStream(name: string) { - logger.debug("backup stream", { name }); - await mkdirp(join(natsBackup, name)); - const { stdout, stderr, exit_code } = await executeCode({ - command: "nats", - args: [ - "stream", - "backup", - "--no-progress", - "--no-consumers", - name, - join(natsBackup, name), - ], - err_on_exit: false, - env: { ...process.env, ...natsCoCalcUserEnv() }, - }); - if (exit_code) { - if (stderr.includes("stream not found")) { - return; - } else { - throw Error(stderr); - } - } - return `${stdout}\n${stderr}`; -} - -export async function backupKV(name: string) { - return await backupStream(`KV_${name}`); -} - -export async function backupLocation({ - user_id, - type, -}: { - user_id: string; - type: "account" | "project"; -}) { - const name = `${type}-${user_id}`; - await backupKV(name); - await backupStream(name); -} - -export async function backupProject({ project_id }: { project_id: string }) { - return await backupLocation({ user_id: project_id, type: "project" }); -} - -export async function backupAccount({ account_id }: { account_id: string }) { - return await backupLocation({ user_id: account_id, type: "account" }); -} diff --git a/src/packages/server/nats/tiered-storage/clean.ts b/src/packages/server/nats/tiered-storage/clean.ts deleted file mode 100644 index 1e50f2ec18..0000000000 --- a/src/packages/server/nats/tiered-storage/clean.ts +++ /dev/null @@ -1,141 +0,0 @@ -/* -Archive inactive things to save on resources. -*/ - -import { getKvManager, getStreamManager } from "./info"; -import "@cocalc/backend/nats"; -import { isValidUUID } from "@cocalc/util/misc"; -import getLogger from "@cocalc/backend/logger"; -import { archiveProject, archiveAccount } from "./archive"; - -const logger = getLogger("tiered-storage:clean"); -const log = (...args) => { - logger.debug(...args); - console.log("tiered-storage:clean: ", ...args); -}; - -const DAY = 1000 * 60 * 60 * 24; - -const DEFAULT_DAYS = 7; -const MIN_DAYS = 3; - -function ageToTimestamp(days: number) { - return Date.now() - days * DAY; -} - -function isProjectOrAccount(name) { - if (!(name.startsWith("account-") || name.startsWith("project-"))) { - return false; - } - if (!isValidUUID(name.slice(-36))) { - return false; - } - return true; -} - -export async function getOldKvs({ - days = DEFAULT_DAYS, -}: { - days?: number; -} = {}) { - const cutoff = ageToTimestamp(days); - const kvm = await getKvManager(); - const names: string[] = []; - for await (const { si } of kvm.list()) { - if (!si.config.name.startsWith("KV_")) { - continue; - } - const name = si.config.name.slice("KV_".length); - if (!isProjectOrAccount(name)) { - continue; - } - const { last_ts } = si.state; - const last = last_ts.startsWith("0001") ? 0 : new Date(last_ts).valueOf(); - if (last <= cutoff) { - names.push(name); - } - } - return names; -} - -export async function getOldStreams({ - days = DEFAULT_DAYS, -}: { - days?: number; -} = {}) { - const cutoff = ageToTimestamp(days); - const jsm = await getStreamManager(); - const names: string[] = []; - for await (const si of jsm.streams.list()) { - const name = si.config.name; - if (!isProjectOrAccount(name)) { - continue; - } - if (name.startsWith("KV_")) { - continue; - } - const { last_ts } = si.state; - const last = last_ts.startsWith("0001") ? 0 : new Date(last_ts).valueOf(); - if (last <= cutoff) { - names.push(name); - } - } - return names; -} - -export async function getOldProjectsAndAccounts({ - days = DEFAULT_DAYS, -}: { - days?: number; -} = {}) { - const kvs = await getOldKvs({ days }); - const streams = await getOldStreams({ days }); - const projects = new Set(); - const accounts = new Set(); - for (const kv of kvs.concat(streams)) { - if (kv.startsWith("account")) { - accounts.add(kv.slice("account-".length)); - } - if (kv.startsWith("project")) { - projects.add(kv.slice("project-".length)); - } - } - return { - accounts: Array.from(accounts).sort(), - projects: Array.from(projects).sort(), - }; -} - -export async function archiveInactive({ - days = DEFAULT_DAYS, - force = false, - dryRun = true, -}: { - days?: number; - force?: boolean; - dryRun?: boolean; -} = {}) { - log("archiveInactive", { days, force, dryRun }); - // step 1 -- get all streams and kv in nats - if (days < MIN_DAYS && !force) { - throw Error(`days is < ${MIN_DAYS} day, which is very suspicious!`); - } - - const { accounts, projects } = await getOldProjectsAndAccounts({ days }); - log( - `archiveInactive: got ${accounts.length} accounts and ${projects.length} projects`, - ); - if (dryRun) { - log(`archiveInactive: dry run so not doing`); - return; - } - - for (const account_id of accounts) { - log(`archiving account ${account_id}`); - await archiveAccount({ account_id }); - } - for (const project_id of projects) { - log(`archiving project ${project_id}`); - await archiveProject({ project_id }); - } -} diff --git a/src/packages/server/nats/tiered-storage/index.ts b/src/packages/server/nats/tiered-storage/index.ts deleted file mode 100644 index bfcd008f1d..0000000000 --- a/src/packages/server/nats/tiered-storage/index.ts +++ /dev/null @@ -1,15 +0,0 @@ -export { init } from "./api"; - -export { backupStream, backupKV, backupProject, backupAccount } from "./backup"; -export { - restoreStream, - restoreKV, - restoreProject, - restoreAccount, -} from "./restore"; -export { - archiveStream, - archiveKV, - archiveProject, - archiveAccount, -} from "./archive"; diff --git a/src/packages/server/nats/tiered-storage/info.ts b/src/packages/server/nats/tiered-storage/info.ts deleted file mode 100644 index 6d1d33ef60..0000000000 --- a/src/packages/server/nats/tiered-storage/info.ts +++ /dev/null @@ -1,98 +0,0 @@ -import { type Info } from "@cocalc/nats/tiered-storage/server"; -import { - jetstreamManager, - type JetStreamManager, - type StreamInfo, -} from "@nats-io/jetstream"; -import { Kvm } from "@nats-io/kv"; -import { getConnection } from "@cocalc/nats/client"; -import { natsBackup } from "@cocalc/backend/data"; -import { join } from "path"; -import { readFile } from "fs/promises"; - -let jsm: null | JetStreamManager = null; -export async function getStreamManager(): Promise { - if (jsm == null) { - jsm = await jetstreamManager(await getConnection()); - } - return jsm; -} - -export async function getNatsStreamInfo(stream: string): Promise { - const jsm = await getStreamManager(); - try { - return await jsm.streams.info(stream); - } catch (err) { - if (err.status == 404) { - // the stream simply doesn't exist -- not just some weird problem contacting the api server - return null; - } - throw err; - } -} - -let kvm: null | Kvm = null; -export async function getKvManager(): Promise { - if (kvm == null) { - kvm = new Kvm(await getConnection()); - } - return kvm; -} - -export async function getNatsKvInfo(bucket: string): Promise { - const kvm = await getKvManager(); - try { - const kv = await kvm.open(bucket); - const status = await kv.status(); - // @ts-ignore - return status.si; - } catch (err) { - if (err.status == 404) { - // the kv simply doesn't exist -- *not* just some weird problem contacting the api server - return null; - } - throw err; - } -} - -async function getBackupInfo(name: string) { - const path = join(natsBackup, name, "backup.json"); - try { - const content = await readFile(path); - return JSON.parse(content.toString()); - } catch (err) { - if (err.code == "ENOENT") { - return null; - } - throw err; - } -} - -async function getInfo({ type, user_id }) { - return { - nats: { - stream: await getNatsStreamInfo(`${type}-${user_id}`), - kv: await getNatsKvInfo(`${type}-${user_id}`), - }, - backup: { - stream: await getBackupInfo(`${type}-${user_id}`), - kv: await getBackupInfo(`KV_${type}-${user_id}`), - }, - }; -} - -export async function getProjectInfo({ project_id }): Promise { - const info = await getInfo({ type: "project", user_id: project_id }); - return { - location: { project_id }, - ...info, - }; -} - -export async function getAccountInfo({ account_id }): Promise { - const info = await getInfo({ type: "account", user_id: account_id }); - return { - location: { account_id }, - ...info, - }; -} diff --git a/src/packages/server/nats/tiered-storage/restore.ts b/src/packages/server/nats/tiered-storage/restore.ts deleted file mode 100644 index 6e1387df64..0000000000 --- a/src/packages/server/nats/tiered-storage/restore.ts +++ /dev/null @@ -1,90 +0,0 @@ -import { executeCode } from "@cocalc/backend/execute-code"; -import { natsBackup } from "@cocalc/backend/data"; -import { join } from "path"; -import { natsCoCalcUserEnv } from "@cocalc/backend/nats/cli"; -import { rmKV, rmStream } from "./archive"; -import { exists } from "@cocalc/backend/misc/async-utils-node"; -import type { LocationType } from "./types"; -import { getNatsStreamInfo, getNatsKvInfo } from "./info"; -import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; -import getLogger from "@cocalc/backend/logger"; -const logger = getLogger("tiered-storage:restore"); - -export const restoreStream = reuseInFlight(async (name: string) => { - if (!(await exists(join(natsBackup, name, "backup.json")))) { - // no data about this stream - non-fatal, since this is how - // we backup never-created streams... and what else are we - // going to do? - logger.debug("restoreStream", { name }, " no backup data"); - return; - } - if (await getNatsStreamInfo(name)) { - // stream already exists in nats - logger.debug("restoreStream", { name }, " stream already exists in nats"); - return; - } - logger.debug("restoreStream", { name }, " restoring from backup"); - const { stdout, stderr } = await executeCode({ - command: "nats", - args: ["stream", "restore", "--no-progress", join(natsBackup, name)], - err_on_exit: true, - env: natsCoCalcUserEnv(), - }); - return `${stderr}\n${stdout}`; -}); - -export const restoreKV = reuseInFlight(async (name: string) => { - if (await getNatsKvInfo(name)) { - // kv already exists in nats - return; - } - return await restoreStream(`KV_${name}`); -}); - -export const restoreLocation = reuseInFlight( - async ({ - user_id, - type, - force, - }: { - user_id: string; - type: LocationType; - force?: boolean; - }) => { - const name = `${type}-${user_id}`; - if (force) { - try { - await rmKV(name); - } catch (err) { - console.log(`${err}`); - } - try { - await rmStream(name); - } catch (err) { - console.log(`${err}`); - } - } - await restoreKV(name); - await restoreStream(name); - }, -); - -export async function restoreProject({ - project_id, - force, -}: { - project_id: string; - force?: boolean; -}) { - return await restoreLocation({ user_id: project_id, type: "project", force }); -} - -export async function restoreAccount({ - account_id, - force, -}: { - account_id: string; - force?: boolean; -}) { - return await restoreLocation({ user_id: account_id, type: "account", force }); -} diff --git a/src/packages/server/nats/tiered-storage/types.ts b/src/packages/server/nats/tiered-storage/types.ts deleted file mode 100644 index 6dbb1b96ac..0000000000 --- a/src/packages/server/nats/tiered-storage/types.ts +++ /dev/null @@ -1 +0,0 @@ -export type LocationType = "project" | "account"; diff --git a/src/packages/server/package.json b/src/packages/server/package.json index 0c7233d84d..f52dc161a4 100644 --- a/src/packages/server/package.json +++ b/src/packages/server/package.json @@ -12,10 +12,10 @@ "./compute/maintenance": "./dist/compute/maintenance/index.js", "./database/*": "./dist/database/*.js", "./mentions/*": "./dist/mentions/*.js", - "./nats": "./dist/nats/index.js", - "./nats/api": "./dist/nats/api/index.js", - "./nats/auth": "./dist/nats/auth/index.js", - "./nats/tiered-storage": "./dist/nats/tiered-storage/index.js", + "./conat": "./dist/conat/index.js", + "./conat/api": "./dist/conat/api/index.js", + "./conat/auth": "./dist/conat/auth/index.js", + "./conat/socketio": "./dist/conat/socketio/index.js", "./purchases/*": "./dist/purchases/*.js", "./stripe/*": "./dist/stripe/*.js", "./licenses/purchase": "./dist/licenses/purchase/index.js", @@ -36,15 +36,16 @@ "build": "../node_modules/.bin/tsc --build", "tsc": "../node_modules/.bin/tsc --watch --pretty --preserveWatchOutput ", "test": "TZ=UTC jest --forceExit --runInBand", - "prepublishOnly": "test" + "prepublishOnly": "test", + "conat-server": "node ./dist/conat/socketio/start-server.js" }, "author": "SageMath, Inc.", "license": "SEE LICENSE.md", "dependencies": { "@cocalc/backend": "workspace:*", + "@cocalc/conat": "workspace:*", "@cocalc/database": "workspace:*", "@cocalc/gcloud-pricing-calculator": "^1.16.0", - "@cocalc/nats": "workspace:*", "@cocalc/server": "workspace:*", "@cocalc/util": "workspace:*", "@google-ai/generativelanguage": "^3.1.0", @@ -61,12 +62,6 @@ "@langchain/mistralai": "^0.2.0", "@langchain/ollama": "^0.2.0", "@langchain/openai": "^0.5.5", - "@nats-io/jetstream": "3.0.0", - "@nats-io/jwt": "0.0.10-5", - "@nats-io/kv": "3.0.0", - "@nats-io/nats-core": "3.0.0", - "@nats-io/nkeys": "^2.0.3", - "@nats-io/services": "3.0.0", "@node-saml/passport-saml": "^5.0.1", "@passport-js/passport-twitter": "^1.0.8", "@passport-next/passport-google-oauth2": "^1.0.0", @@ -84,6 +79,7 @@ "base64-js": "^1.5.1", "bottleneck": "^2.19.5", "cloudflare": "^2.9.1", + "cookie": "^1.0.0", "cookies": "^0.8.0", "dayjs": "^1.11.11", "dot-object": "^2.1.5", @@ -101,7 +97,6 @@ "mkdirp": "^1.0.4", "ms": "2.1.2", "nanoid": "^3.3.8", - "nats": "^2.29.3", "node-zendesk": "^5.0.13", "nodemailer": "^6.9.16", "openai": "^4.95.1", @@ -121,6 +116,7 @@ "random-key": "^0.3.2", "safe-json-stringify": "^1.2.0", "sanitize-html": "^2.12.1", + "socket.io": "^4.8.1", "stripe": "^17.5.0", "uuid": "^8.3.2" }, diff --git a/src/packages/server/projects/control/stop-idle-projects.test.ts b/src/packages/server/projects/control/stop-idle-projects.test.ts index a167ce06c2..69926aa51f 100644 --- a/src/packages/server/projects/control/stop-idle-projects.test.ts +++ b/src/packages/server/projects/control/stop-idle-projects.test.ts @@ -8,6 +8,7 @@ import getPool, { initEphemeralDatabase } from "@cocalc/database/pool"; import { isValidUUID } from "@cocalc/util/misc"; import { test } from "./stop-idle-projects"; const { stopIdleProjects } = test; +import { delay } from "awaiting"; beforeAll(async () => { await initEphemeralDatabase(); @@ -45,7 +46,7 @@ describe("creates a project, set various parameters, and runs idle project funct await pool.query( `UPDATE projects SET run_quota='{"network": false, "cpu_limit": 1, "disk_quota": 3000, "privileged": false, "cpu_request": 0.02, "member_host": false, "dedicated_vm": false, "idle_timeout": 1800, "memory_limit": 1000, "always_running": false, "memory_request": 200, "dedicated_disks": []}', last_edited=NOW(), last_started=NOW(), state='{"state":"running"}' WHERE project_id=$1`, - [project_id] + [project_id], ); await stopIdleProjects(stopProject); expect(projectsThatGotStopped.has(project_id)).toBe(false); @@ -54,16 +55,19 @@ describe("creates a project, set various parameters, and runs idle project funct it("changes our project so that last_edited is an hour ago and last_started is an hour ago, and observe project gets stopped", async () => { await pool.query( `UPDATE projects SET last_edited=NOW()-interval '1 hour', last_started=NOW()-interval '1 hour' WHERE project_id=$1`, - [project_id] + [project_id], ); await stopIdleProjects(stopProject); + while (!projectsThatGotStopped.has(project_id)) { + await delay(30); + } expect(projectsThatGotStopped.has(project_id)).toBe(true); }); it("changes our project so that last_edited is an hour ago and last_started is a minute ago, and observe project does NOT get stopped", async () => { await pool.query( `UPDATE projects SET last_edited=NOW()-interval '1 hour', last_started=NOW()-interval '1 minute' WHERE project_id=$1`, - [project_id] + [project_id], ); reset(); await stopIdleProjects(stopProject); @@ -73,7 +77,7 @@ describe("creates a project, set various parameters, and runs idle project funct it("changes our project so that last_edited is a minute ago and last_started is an hour ago, and observe project does NOT get stopped", async () => { await pool.query( `UPDATE projects SET last_edited=NOW()-interval '1 minute', last_started=NOW()-interval '1 hour' WHERE project_id=$1`, - [project_id] + [project_id], ); reset(); await stopIdleProjects(stopProject); @@ -84,7 +88,7 @@ describe("creates a project, set various parameters, and runs idle project funct await pool.query( `UPDATE projects SET run_quota='{"network": false, "cpu_limit": 1, "disk_quota": 3000, "privileged": false, "cpu_request": 0.02, "member_host": false, "dedicated_vm": false, "idle_timeout": 1800, "memory_limit": 1000, "always_running": true, "memory_request": 200, "dedicated_disks": []}', last_edited=NOW()-interval '1 month', last_started=NOW()-interval '1 month' WHERE project_id=$1`, - [project_id] + [project_id], ); reset(); await stopIdleProjects(stopProject); @@ -95,11 +99,14 @@ describe("creates a project, set various parameters, and runs idle project funct await pool.query( `UPDATE projects SET run_quota='{"network": false, "cpu_limit": 1, "disk_quota": 3000, "privileged": false, "cpu_request": 0.02, "member_host": false, "dedicated_vm": false, "idle_timeout": 1800, "memory_limit": 1000, "always_running": false, "memory_request": 200, "dedicated_disks": []}', last_edited=NOW()-interval '1 month', last_started=NOW()-interval '1 month', state='{"state":"running"}' WHERE project_id=$1`, - [project_id] + [project_id], ); // first confirm stopProject2 will get called reset(); await stopIdleProjects(stopProject); + while (!projectsThatGotStopped.has(project_id)) { + await delay(30); + } expect(projectsThatGotStopped.has(project_id)).toBe(true); // now call again with error but doesn't break anything const stopProject2 = async (project_id) => { @@ -108,6 +115,9 @@ describe("creates a project, set various parameters, and runs idle project funct }; reset(); await stopIdleProjects(stopProject2); + while (!projectsThatGotStopped.has(project_id)) { + await delay(30); + } expect(projectsThatGotStopped.has(project_id)).toBe(true); }); }); diff --git a/src/packages/server/projects/control/util.ts b/src/packages/server/projects/control/util.ts index c88b2b4ccc..32e5e9c501 100644 --- a/src/packages/server/projects/control/util.ts +++ b/src/packages/server/projects/control/util.ts @@ -13,9 +13,8 @@ import { getUid } from "@cocalc/backend/misc"; import base_path from "@cocalc/backend/base-path"; import { db } from "@cocalc/database"; import { getProject } from "."; +import { conatPath, conatServer } from "@cocalc/backend/data"; import { pidFilename } from "@cocalc/util/project-info"; -import { getServerSettings } from "@cocalc/database/settings/server-settings"; -import { natsPorts, natsServer } from "@cocalc/backend/data"; import { executeCode } from "@cocalc/backend/execute-code"; const logger = getLogger("project-control:util"); @@ -249,19 +248,6 @@ export function sanitizedEnv(env: { [key: string]: string | undefined }): { return env2 as { [key: string]: string }; } -async function natsWebsocketServer() { - const { nats_project_server } = await getServerSettings(); - if (nats_project_server) { - if (nats_project_server.startsWith("ws")) { - if (base_path.length <= 1) { - return nats_project_server; - } - return `${nats_project_server}${base_path}/nats`; - } - } - return `${natsServer}:${natsPorts.server}`; -} - export async function getEnvironment( project_id: string, ): Promise<{ [key: string]: any }> { @@ -291,8 +277,8 @@ export async function getEnvironment( USER, COCALC_EXTRA_ENV: extra_env, PATH: `${HOME}/bin:${HOME}/.local/bin:${process.env.PATH}`, - // url of the NATS websocket server the project will connect to: - NATS_SERVER: await natsWebsocketServer(), + CONAT_SERVER: conatServer, + CONAT_PATH: conatPath, }, }; } diff --git a/src/packages/server/test/setup.js b/src/packages/server/test/setup.js index ee2e6cce0d..68b402d5f5 100644 --- a/src/packages/server/test/setup.js +++ b/src/packages/server/test/setup.js @@ -5,3 +5,5 @@ process.env.PGDATABASE = "smc_ephemeral_testing_database"; // checked for in some code to behave differently while running unit tests. process.env.COCALC_TEST_MODE = true; + +process.env.COCALC_MODE = "single-user"; diff --git a/src/packages/server/tsconfig.json b/src/packages/server/tsconfig.json index d08ee56187..3fb51ed22e 100644 --- a/src/packages/server/tsconfig.json +++ b/src/packages/server/tsconfig.json @@ -10,7 +10,7 @@ "references": [ { "path": "../backend" }, { "path": "../database" }, - { "path": "../nats" }, + { "path": "../conat" }, { "path": "../util" } ] } diff --git a/src/packages/sync-fs/README.md b/src/packages/sync-fs/README.md index 8445bee043..eac355bc7f 100644 --- a/src/packages/sync-fs/README.md +++ b/src/packages/sync-fs/README.md @@ -26,7 +26,7 @@ Running 'find' as a subcommand seems optimal, taking a few KB memory and about - TODO: This sync protocol does NOT deal with file permissions, e.g., changing a file to be executable when it wasn't, since that doesn't update the mtime. See https://github.com/sagemathinc/cocalc/issues/7342 - Dependencies: this doesn't depend on @cocalc/project, but you do need to import -say @cocalc/project/nats before using this code, so that the client process knows +say @cocalc/project/conat before using this code, so that the client process knows how to connect to NATS. ## ALGORITHM diff --git a/src/packages/sync-fs/jest.config.js b/src/packages/sync-fs/jest.config.js index 8d924c848e..7f9768234b 100644 --- a/src/packages/sync-fs/jest.config.js +++ b/src/packages/sync-fs/jest.config.js @@ -2,4 +2,12 @@ module.exports = { preset: "ts-jest", testEnvironment: "node", testMatch: ["**/?(*.)+(spec|test).ts?(x)"], + transform: { + ".*\\.tsx?$": [ + "ts-jest", + { + isolatedModules: true, + }, + ], + }, }; diff --git a/src/packages/sync-fs/lib/compressed-json.ts b/src/packages/sync-fs/lib/compressed-json.ts index d0a6543c9e..70eb5f4bf3 100644 --- a/src/packages/sync-fs/lib/compressed-json.ts +++ b/src/packages/sync-fs/lib/compressed-json.ts @@ -1,6 +1,14 @@ /* Compress and deocmpression JSON to a Buffer. This buffer *is* suitable to write to an lz4 file and lz4 -d will work with it. + +NOTE: I was worried because lz4-napi's compressSync and uncompressSync +seem to have a MASSIVE memory leak. I tested these functions via the +following, and did NOT observe a memory leak. So it's maybe just a problem +with their sync functions, fortunately. + +a = require('@cocalc/sync-fs/lib/compressed-json') +t=Date.now(); for(i=0;i<10000;i++) { await a.fromCompressedJSON(await a.toCompressedJSON({a:'x'.repeat(1000000)}))}; Date.now()-t */ import { compressFrame, decompressFrame } from "lz4-napi"; diff --git a/src/packages/sync-fs/lib/index.ts b/src/packages/sync-fs/lib/index.ts index f041ffa360..5e5d6c22d9 100644 --- a/src/packages/sync-fs/lib/index.ts +++ b/src/packages/sync-fs/lib/index.ts @@ -28,7 +28,7 @@ import { executeCode } from "@cocalc/backend/execute-code"; import { delete_files } from "@cocalc/backend/files/delete-files"; import { move_files } from "@cocalc/backend/files/move-files"; import { rename_file } from "@cocalc/backend/files/rename-file"; -import { initNatsClientService } from "./nats/syncfs-client"; +import { initConatClientService } from "./nats/syncfs-client"; import { initNatsServerService } from "./nats/syncfs-server"; const EXPLICIT_HIDDEN_EXCLUDES = [".cache", ".local"]; @@ -166,7 +166,7 @@ export class SyncFS { } init = async () => { - await this.initNatsService(); + await this.initConatService(); await this.mountUnionFS(); await this.bindMountExcludes(); await this.makeScratchDir(); @@ -798,9 +798,9 @@ export class SyncFS { } }; - initNatsService = async () => { + initConatService = async () => { if (this.role == "compute_server") { - this.natsService = await initNatsClientService({ + this.natsService = await initConatClientService({ syncfs: this, project_id: this.project_id, compute_server_id: this.compute_server_id, diff --git a/src/packages/sync-fs/lib/nats/syncfs-client.ts b/src/packages/sync-fs/lib/nats/syncfs-client.ts index 0865a0ca1a..badc313421 100644 --- a/src/packages/sync-fs/lib/nats/syncfs-client.ts +++ b/src/packages/sync-fs/lib/nats/syncfs-client.ts @@ -2,10 +2,10 @@ SyncFS Client Service, which runs on compute servers */ -import { createSyncFsClientService } from "@cocalc/nats/service/syncfs-client"; +import { createSyncFsClientService } from "@cocalc/conat/service/syncfs-client"; import { type SyncFS } from "../index"; -export async function initNatsClientService({ +export async function initConatClientService({ syncfs, compute_server_id, project_id, diff --git a/src/packages/sync-fs/lib/nats/syncfs-server.ts b/src/packages/sync-fs/lib/nats/syncfs-server.ts index f4ba57595a..9e6dcccbeb 100644 --- a/src/packages/sync-fs/lib/nats/syncfs-server.ts +++ b/src/packages/sync-fs/lib/nats/syncfs-server.ts @@ -2,7 +2,7 @@ SyncFS Server Service, which runs in the home base. */ -import { createSyncFsServerService } from "@cocalc/nats/service/syncfs-server"; +import { createSyncFsServerService } from "@cocalc/conat/service/syncfs-server"; import { type SyncFS } from "../index"; export async function initNatsServerService({ diff --git a/src/packages/sync-fs/package.json b/src/packages/sync-fs/package.json index c0bed88cf4..a8cf1f30d6 100644 --- a/src/packages/sync-fs/package.json +++ b/src/packages/sync-fs/package.json @@ -23,7 +23,7 @@ "@cocalc/api-client": "workspace:*", "@cocalc/backend": "workspace:*", "@cocalc/comm": "workspace:*", - "@cocalc/nats": "workspace:*", + "@cocalc/conat": "workspace:*", "@cocalc/sync-client": "workspace:*", "@cocalc/util": "workspace:*", "execa": "^8.0.1", diff --git a/src/packages/sync-fs/tsconfig.json b/src/packages/sync-fs/tsconfig.json index fc9b1d0f49..2290a242a5 100644 --- a/src/packages/sync-fs/tsconfig.json +++ b/src/packages/sync-fs/tsconfig.json @@ -10,7 +10,7 @@ { "path": "../sync-client" }, { "path": "../backend" }, { "path": "../comm" }, - { "path": "../nats" }, + { "path": "../conat" }, { "path": "../util" } ] } diff --git a/src/packages/sync/client/sync-client.ts b/src/packages/sync/client/sync-client.ts index c7e73123dd..3ca349bc78 100644 --- a/src/packages/sync/client/sync-client.ts +++ b/src/packages/sync/client/sync-client.ts @@ -21,7 +21,7 @@ import { } from "@cocalc/sync/table"; import synctable_project from "./synctable-project"; import type { Channel, AppClient } from "./types"; -import { getSyncDocType } from "@cocalc/nats/sync/syncdoc-info"; +import { getSyncDocType } from "@cocalc/conat/sync/syncdoc-info"; import { refCacheSync } from "@cocalc/util/refcache"; diff --git a/src/packages/sync/client/types.ts b/src/packages/sync/client/types.ts index dcea22e042..4793f493a8 100644 --- a/src/packages/sync/client/types.ts +++ b/src/packages/sync/client/types.ts @@ -1,9 +1,9 @@ import type { EventEmitter } from "events"; import type { CB } from "@cocalc/util/types/callback"; import type { - CallNatsServiceFunction, - CreateNatsServiceFunction, -} from "@cocalc/nats/service"; + CallConatServiceFunction, + CreateConatServiceFunction, +} from "@cocalc/conat/service"; // What we need the client to implement so we can use // it to support a table. @@ -21,8 +21,8 @@ export interface Client extends EventEmitter { touch_project: (project_id: string, compute_server_id?: number) => void; set_connected?: Function; is_deleted: (path: string, project_id: string) => true | false | undefined; - callNatsService?: CallNatsServiceFunction; - createNatsService?: CreateNatsServiceFunction; + callConatService?: CallConatServiceFunction; + createConatService?: CreateConatServiceFunction; client_id?: () => string | undefined; } diff --git a/src/packages/sync/editor/generic/ipywidgets-state.ts b/src/packages/sync/editor/generic/ipywidgets-state.ts index 61eadabf58..ff5818fad4 100644 --- a/src/packages/sync/editor/generic/ipywidgets-state.ts +++ b/src/packages/sync/editor/generic/ipywidgets-state.ts @@ -102,12 +102,9 @@ export class IpywidgetsState extends EventEmitter { this.syncdoc = syncdoc; this.client = client; this.create_synctable = create_synctable; - if (this.syncdoc.data_server == "project") { - // options only supported for project... - // ephemeral -- don't store longterm in database - // persistent -- doesn't automatically vanish when all browser clients disconnect - this.table_options = [{ ephemeral: true, persistent: true }]; - } + // [ ] TODO: once we have ephemeral kv + // this.table_options = [{ ephemeral: true }]; + this.table_options = []; this.gc = !DISABLE_GC && client.is_project() // no-op if not project or DISABLE_GC ? debounce(() => { diff --git a/src/packages/sync/editor/generic/legacy.ts b/src/packages/sync/editor/generic/legacy.ts index 70fa0647d6..ebf67ab7ed 100644 --- a/src/packages/sync/editor/generic/legacy.ts +++ b/src/packages/sync/editor/generic/legacy.ts @@ -3,7 +3,7 @@ Support legacy TimeTravel history from before the switch to NATS. */ import { type Client } from "./types"; -import { type DB } from "@cocalc/nats/hub-api/db"; +import { type DB } from "@cocalc/conat/hub-api/db"; export interface LegacyPatch { time: Date; @@ -30,7 +30,7 @@ export class LegacyHistory { path: string; }) { // this is only available on the frontend browser, which is all that matters. - this.db = (client as any).nats_client?.hub.db as any; + this.db = (client as any).conat_client?.hub.db as any; this.project_id = project_id; this.path = path; } @@ -57,8 +57,8 @@ export class LegacyHistory { return { patches: [], users: [] }; } const s = await this.db.getLegacyTimeTravelPatches({ - requestMany: true, // since response may be large - timeout: 60000, + // long timeout, since response may be large or take a while to pull out of cold storage + timeout: 90000, uuid: info.uuid, }); let patches; diff --git a/src/packages/sync/editor/generic/sync-doc.ts b/src/packages/sync/editor/generic/sync-doc.ts index 5a1dc48e44..e774ea0b92 100644 --- a/src/packages/sync/editor/generic/sync-doc.ts +++ b/src/packages/sync/editor/generic/sync-doc.ts @@ -19,7 +19,7 @@ EVENTS: - ... TODO */ -const USE_NATS = true; +const USE_CONAT = true; /* OFFLINE_THRESH_S - If the client becomes disconnected from the backend for more than this long then---on reconnect---do @@ -118,8 +118,8 @@ import { NATS_OPEN_FILE_TOUCH_INTERVAL } from "@cocalc/util/nats"; import mergeDeep from "@cocalc/util/immutable-deep-merge"; import { JUPYTER_SYNCDB_EXTENSIONS } from "@cocalc/util/jupyter/names"; import { LegacyHistory } from "./legacy"; -import { waitUntilConnected } from "@cocalc/nats/util"; -import { getLogger } from "@cocalc/nats/client"; +import { waitUntilConnected } from "@cocalc/conat/util"; +import { getLogger } from "@cocalc/conat/client"; export type State = "init" | "ready" | "closed"; export type DataServer = "project" | "database"; @@ -266,7 +266,7 @@ export class SyncDoc extends EventEmitter { // static because we want exactly one across all docs! private static computeServerManagerDoc?: SyncDoc; - private useNats: boolean; + private useConat: boolean; legacy: LegacyHistory; constructor(opts: SyncOpts) { @@ -305,7 +305,7 @@ export class SyncDoc extends EventEmitter { // NOTE: Do not use nats in test mode, since there we use a minimal // "fake" client that does all communication internally and doesn't // use nats. We also use this for the messages composer. - this.useNats = USE_NATS && !isTestClient(opts.client); + this.useConat = USE_CONAT && !isTestClient(opts.client); if (this.ephemeral) { // So the doctype written to the database reflects the // ephemeral state. Here ephemeral determines whether @@ -362,7 +362,7 @@ export class SyncDoc extends EventEmitter { //const t0 = new Date(); log("initializing all tables..."); - if (this.useNats) { + if (this.useConat) { await waitUntilConnected(); } await this.initAll(); @@ -617,7 +617,7 @@ export class SyncDoc extends EventEmitter { // table not initialized yet return; } - if (this.useNats) { + if (this.useConat) { const time = this.client.server_time().valueOf(); const x: { user_id: number; @@ -660,7 +660,7 @@ export class SyncDoc extends EventEmitter { set_cursor_locs: typeof this.setCursorLocsNoThrottle = throttle( this.setCursorLocsNoThrottle, - USE_NATS ? CURSOR_THROTTLE_NATS_MS : CURSOR_THROTTLE_MS, + USE_CONAT ? CURSOR_THROTTLE_NATS_MS : CURSOR_THROTTLE_MS, { leading: true, trailing: true, @@ -1238,7 +1238,7 @@ export class SyncDoc extends EventEmitter { // patches table uses the string_id, which is a SHA1 hash. private ensure_syncstring_exists_in_db = async (): Promise => { const dbg = this.dbg("ensure_syncstring_exists_in_db"); - if (this.useNats) { + if (this.useConat) { dbg("skipping -- no database"); return; } @@ -1278,7 +1278,7 @@ export class SyncDoc extends EventEmitter { this.assert_not_closed("synctable"); const dbg = this.dbg("synctable"); if ( - !this.useNats && + !this.useConat && !this.ephemeral && this.persistent && this.data_server == "project" @@ -1291,8 +1291,8 @@ export class SyncDoc extends EventEmitter { options.push({ ephemeral: true }); } let synctable; - if (this.useNats && query.patches) { - synctable = await this.client.synctable_nats(query, { + if (this.useConat && query.patches) { + synctable = await this.client.synctable_conat(query, { obj: { project_id: this.project_id, path: this.path, @@ -1302,8 +1302,8 @@ export class SyncDoc extends EventEmitter { desc: { path: this.path }, start_seq: this.last_seq, }); - } else if (this.useNats && query.syncstrings) { - synctable = await this.client.synctable_nats(query, { + } else if (this.useConat && query.syncstrings) { + synctable = await this.client.synctable_conat(query, { obj: { project_id: this.project_id, path: this.path, @@ -1313,8 +1313,8 @@ export class SyncDoc extends EventEmitter { immutable: true, desc: { path: this.path }, }); - } else if (this.useNats && query.ipywidgets) { - synctable = await this.client.synctable_nats(query, { + } else if (this.useConat && query.ipywidgets) { + synctable = await this.client.synctable_conat(query, { obj: { project_id: this.project_id, path: this.path, @@ -1328,8 +1328,8 @@ export class SyncDoc extends EventEmitter { limits: { max_age: 1000 * 60 * 60 * 24 }, desc: { path: this.path }, }); - } else if (this.useNats && (query.eval_inputs || query.eval_outputs)) { - synctable = await this.client.synctable_nats(query, { + } else if (this.useConat && (query.eval_inputs || query.eval_outputs)) { + synctable = await this.client.synctable_conat(query, { obj: { project_id: this.project_id, path: this.path, @@ -1340,8 +1340,8 @@ export class SyncDoc extends EventEmitter { limits: { max_age: 30000 }, desc: { path: this.path }, }); - } else if (this.useNats) { - synctable = await this.client.synctable_nats(query, { + } else if (this.useConat) { + synctable = await this.client.synctable_conat(query, { obj: { project_id: this.project_id, path: this.path, @@ -1947,8 +1947,8 @@ export class SyncDoc extends EventEmitter { dbg("done -- do not care about cursors for this syncdoc."); return; } - if (this.useNats) { - dbg("NATS cursors support using pub/sub"); + if (this.useConat) { + dbg("cursors broadcast using pub/sub"); this.cursors_table = await this.client.pubsub_nats({ project_id: this.project_id, path: this.path, diff --git a/src/packages/sync/editor/generic/types.ts b/src/packages/sync/editor/generic/types.ts index c7a5afd1dc..fb623a81a1 100644 --- a/src/packages/sync/editor/generic/types.ts +++ b/src/packages/sync/editor/generic/types.ts @@ -13,9 +13,9 @@ import { SyncTable } from "@cocalc/sync/table/synctable"; import type { ExecuteCodeOptionsWithCallback } from "@cocalc/util/types/execute-code"; import type { - CallNatsServiceFunction, - CreateNatsServiceFunction, -} from "@cocalc/nats/service"; + CallConatServiceFunction, + CreateConatServiceFunction, +} from "@cocalc/conat/service"; export interface Patch { // time = LOGICAL time of when patch made; this used to be ms since the epoch, but just @@ -128,10 +128,10 @@ export interface ProjectClient extends EventEmitter { id?: string, ) => Promise; - synctable_nats: (query: any, obj?) => Promise; + synctable_conat: (query: any, obj?) => Promise; pubsub_nats: (query: any, obj?) => Promise; - callNatsService?: CallNatsServiceFunction; - createNatsService?: CreateNatsServiceFunction; + callConatService?: CallConatServiceFunction; + createConatService?: CreateConatServiceFunction; // account_id or project_id or compute_server_id (encoded as a UUID - use decodeUUIDtoNum to decode) client_id: () => string; diff --git a/src/packages/sync/editor/string/test/client-test.ts b/src/packages/sync/editor/string/test/client-test.ts index ec11ed862f..08e542efcb 100644 --- a/src/packages/sync/editor/string/test/client-test.ts +++ b/src/packages/sync/editor/string/test/client-test.ts @@ -175,8 +175,8 @@ export class Client extends EventEmitter implements Client0 { throw Error("synctable_database: not implemented"); } - async synctable_nats(_query: any): Promise { - throw Error("synctable_nats: not implemented"); + async synctable_conat(_query: any): Promise { + throw Error("synctable_conat: not implemented"); } async pubsub_nats(_query: any): Promise { throw Error("pubsub_nats: not implemented"); diff --git a/src/packages/sync/jest.config.js b/src/packages/sync/jest.config.js index f8f66c15d0..1f71d32669 100644 --- a/src/packages/sync/jest.config.js +++ b/src/packages/sync/jest.config.js @@ -14,4 +14,12 @@ module.exports = { testPathIgnorePatterns: ["/node_modules/"], moduleFileExtensions: ["ts", "tsx", "js", "jsx", "json", "node"], setupFiles: ["./test/setup.js"], + transform: { + ".*\\.tsx?$": [ + "ts-jest", + { + isolatedModules: true, + }, + ], + }, }; diff --git a/src/packages/sync/package.json b/src/packages/sync/package.json index c9b1d056b7..f3f1a529a9 100644 --- a/src/packages/sync/package.json +++ b/src/packages/sync/package.json @@ -20,7 +20,7 @@ "keywords": ["cocalc", "realtime synchronization"], "license": "SEE LICENSE.md", "dependencies": { - "@cocalc/nats": "workspace:*", + "@cocalc/conat": "workspace:*", "@cocalc/sync": "workspace:*", "@cocalc/util": "workspace:*", "async": "^1.5.2", diff --git a/src/packages/sync/table/changefeed-nats2.ts b/src/packages/sync/table/changefeed-conat.ts similarity index 89% rename from src/packages/sync/table/changefeed-nats2.ts rename to src/packages/sync/table/changefeed-conat.ts index 6bfe7fbeef..0beadfa049 100644 --- a/src/packages/sync/table/changefeed-nats2.ts +++ b/src/packages/sync/table/changefeed-conat.ts @@ -4,9 +4,9 @@ */ import { EventEmitter } from "events"; -import { changefeed, renew } from "@cocalc/nats/changefeed/client"; +import { changefeed, renew } from "@cocalc/conat/changefeed/client"; import { delay } from "awaiting"; -import { waitUntilConnected } from "@cocalc/nats/util"; +import { waitUntilConnected } from "@cocalc/conat/util"; const LIFETIME = 60000; const HEARTBEAT = 15000; @@ -27,12 +27,12 @@ const log = LOW_LEVEL_DEBUG } : (..._args) => {}; -export class NatsChangefeed extends EventEmitter { +export class ConatChangefeed extends EventEmitter { private account_id: string; private query; private options; private state: "disconnected" | "connected" | "closed" = "disconnected"; - private natsSynctable?; + private conatSyncTable?; private last_hb = 0; private id?: string; private lifetime?: number; @@ -55,7 +55,7 @@ export class NatsChangefeed extends EventEmitter { connect = async () => { log("creating new changefeed", this.query); if (this.state == "closed") return; - this.natsSynctable = await changefeed({ + this.conatSyncTable = await changefeed({ account_id: this.account_id, query: this.query, options: this.options, @@ -70,7 +70,7 @@ export class NatsChangefeed extends EventEmitter { this.state = "connected"; const { value: { id, lifetime }, - } = await this.natsSynctable.next(); + } = await this.conatSyncTable.next(); this.id = id; this.lifetime = lifetime; log("got changefeed", { id, lifetime, query: this.query }); @@ -78,7 +78,7 @@ export class NatsChangefeed extends EventEmitter { // @ts-ignore while (this.state != "closed") { - const { value } = await this.natsSynctable.next(); + const { value } = await this.conatSyncTable.next(); this.last_hb = Date.now(); if (value) { this.startWatch(); @@ -102,11 +102,11 @@ export class NatsChangefeed extends EventEmitter { }; private startWatch = async () => { - if (this.natsSynctable == null || this.state == "closed") { + if (this.conatSyncTable == null || this.state == "closed") { return; } try { - for await (const x of this.natsSynctable) { + for await (const x of this.conatSyncTable) { // @ts-ignore if (this.state == "closed") { return; diff --git a/src/packages/sync/table/changefeed-nats.ts b/src/packages/sync/table/changefeed-nats.ts deleted file mode 100644 index 5f93084a46..0000000000 --- a/src/packages/sync/table/changefeed-nats.ts +++ /dev/null @@ -1,95 +0,0 @@ -/* - * This file is part of CoCalc: Copyright © 2020 Sagemath, Inc. - * License: MS-RSL – see LICENSE.md for details - */ - -// DEPRECATED!!! DELETE THIS!!! - -import { EventEmitter } from "events"; -import type { State } from "./changefeed"; -import { delay } from "awaiting"; -import { CHANGEFEED_INTEREST_PERIOD_MS } from "@cocalc/nats/sync/synctable"; -import { waitUntilConnected } from "@cocalc/nats/util"; - -export class NatsChangefeed extends EventEmitter { - private client; - private nc; - private query; - private options; - private state: State = "disconnected"; - private natsSynctable?; - - constructor({ client, query, options }: { client; query; options? }) { - super(); - this.client = client; - this.query = query; - this.options = options; - if (this.options != null && this.options.length > 0) { - console.log("NatsChangefeed -- todo: options not implemented", options); - } - } - - connect = async () => { - await waitUntilConnected(); - this.natsSynctable = await this.client.nats_client.changefeed(this.query, { - // atomic=false means less data transfer on changes, but simply does not scale up - // well and is hence quite slow overall. - atomic: true, - immutable: false, - }); - this.state = "connected"; - this.nc = await this.client.nats_client.getConnection(); - this.nc.on?.("reconnect", this.expressInterest); - this.interest(); - this.startWatch(); - const v = this.natsSynctable.get(); - return Object.values(v); - }; - - close = (): void => { - this.nc?.removeListener?.("reconnect", this.expressInterest); - this.natsSynctable?.close(); - this.state = "closed"; - this.emit("close"); // yes "close" not "closed" ;-( - }; - - get_state = (): string => { - return this.state; - }; - - private expressInterest = async () => { - try { - await waitUntilConnected(); - await this.client.nats_client.changefeedInterest(this.query); - } catch (err) { - console.log(`WARNING: changefeed -- ${err}`, this.query); - } - }; - - private interest = async () => { - while (this.state != "closed") { - await this.expressInterest(); - await delay(CHANGEFEED_INTEREST_PERIOD_MS / 2.1); - } - }; - - private startWatch = () => { - if (this.natsSynctable == null) { - return; - } - this.natsSynctable.on( - "change", - (_, { key, value: new_val, prev: old_val }) => { - let x; - if (new_val == null) { - x = { action: "delete", old_val, key }; - } else if (old_val !== undefined) { - x = { action: "update", new_val, old_val, key }; - } else { - x = { action: "insert", new_val, key }; - } - this.emit("update", x); - }, - ); - }; -} diff --git a/src/packages/sync/table/synctable.ts b/src/packages/sync/table/synctable.ts index 7e83079aba..ab475606f4 100644 --- a/src/packages/sync/table/synctable.ts +++ b/src/packages/sync/table/synctable.ts @@ -18,8 +18,8 @@ ways of orchestrating a SyncTable. let DEBUG: boolean = false; // enable experimental nats database backed changefeed. -// for this to work you must explicitly run the server in @cocalc/database/nats/changefeeds -const USE_NATS = true && !process.env.COCALC_TEST_MODE; +// for this to work you must explicitly run the server in @cocalc/database/conat/changefeeds +const USE_CONAT = true && !process.env.COCALC_TEST_MODE; export function set_debug(x: boolean): void { DEBUG = x; @@ -38,7 +38,7 @@ import * as schema from "@cocalc/util/schema"; import mergeDeep from "@cocalc/util/immutable-deep-merge"; import { reuseInFlight } from "@cocalc/util/reuse-in-flight"; import { Changefeed } from "./changefeed"; -import { NatsChangefeed } from "./changefeed-nats2"; +import { ConatChangefeed } from "./changefeed-conat"; import { parse_query, to_key } from "./util"; import { isTestClient } from "@cocalc/sync/editor/generic/util"; @@ -67,7 +67,7 @@ function is_fatal(err: string): boolean { export type State = "disconnected" | "connected" | "closed"; export class SyncTable extends EventEmitter { - private changefeed?: Changefeed | NatsChangefeed; + private changefeed?: Changefeed | ConatChangefeed; private query: Query; private client_query: any; private primary_keys: string[]; @@ -725,12 +725,12 @@ export class SyncTable extends EventEmitter { while (true) { this.close_changefeed(); if ( - USE_NATS && + USE_CONAT && !isTestClient(this.client) && this.client.is_browser() && !this.project_id ) { - this.changefeed = new NatsChangefeed({ + this.changefeed = new ConatChangefeed({ account_id: this.client.client_id?.()!, query: this.query, options: this.options, @@ -802,7 +802,7 @@ export class SyncTable extends EventEmitter { } // awkward code due to typescript weirdness using both - // NatsChangefeed and Changefeed types (for unit testing). + // ConatChangefeed and Changefeed types (for unit testing). private init_changefeed_handlers(): void { const c = this.changefeed as EventEmitter | null; if (c == null) return; diff --git a/src/packages/sync/tsconfig.json b/src/packages/sync/tsconfig.json index 4cdbe9694e..0bdfd8b3a4 100644 --- a/src/packages/sync/tsconfig.json +++ b/src/packages/sync/tsconfig.json @@ -5,5 +5,5 @@ "outDir": "dist" }, "exclude": ["node_modules", "dist", "test"], - "references": [{ "path": "../util" }, { "path": "../nats" }] + "references": [{ "path": "../util" }, { "path": "../conat" }] } diff --git a/src/packages/util/db-schema/client-db.ts b/src/packages/util/db-schema/client-db.ts index a86719e63d..d26f0f1bcd 100644 --- a/src/packages/util/db-schema/client-db.ts +++ b/src/packages/util/db-schema/client-db.ts @@ -84,48 +84,6 @@ class ClientDB { throw Error("primary key must be a string or array of strings"); } } - - // Given rows (as objects) obtained by querying a table or virtual table, - // converts any non-null string ISO timestamps to Date objects. This is - // needed because we transfer data from the database to the browser using - // JSONCodec (via NATS) and that turns Date objects into ISO timestamp strings. - // This turns them back, but by using the SCHEMA, *not* a heuristic or regexp - // to identify which fields to change. - // NOTE: this *mutates* rows. - processDates = ({ - table, - rows, - }: { - table: string; - rows: object[] | object; - }) => { - let t = SCHEMA[table]; - if (t == null) { - return; - } - if (typeof t.virtual == "string") { - t = SCHEMA[t.virtual]; - } - const timeFields: string[] = []; - const { fields } = t; - for (const field in fields) { - if (fields[field].type == "timestamp") { - timeFields.push(field); - } - } - if (timeFields.length == 0) { - // nothing to do. - return; - } - const v = is_array(rows) ? rows : [rows]; - for (const row of v) { - for (const field of timeFields) { - if (typeof row[field] == "string") { - row[field] = new Date(row[field]); - } - } - } - }; } export const client_db = new ClientDB(); diff --git a/src/packages/util/db-schema/site-defaults.ts b/src/packages/util/db-schema/site-defaults.ts index 3578fecdd3..caf22b98df 100644 --- a/src/packages/util/db-schema/site-defaults.ts +++ b/src/packages/util/db-schema/site-defaults.ts @@ -29,7 +29,7 @@ export const TAGS = [ "Email", "Logo", "Version", - "Nats", + "Conat", "Stripe", "captcha", "Zendesk", diff --git a/src/packages/util/db-schema/site-settings-extras.ts b/src/packages/util/db-schema/site-settings-extras.ts index 4aa008362d..332c2ab733 100644 --- a/src/packages/util/db-schema/site-settings-extras.ts +++ b/src/packages/util/db-schema/site-settings-extras.ts @@ -184,14 +184,10 @@ function custom_llm_display(value: string): string { export type SiteSettingsExtrasKeys = | "pii_retention" - | "nats_heading" - | "nats_server" - | "nats_port" - | "nats_ws_port" - | "nats_password" - | "nats_auth_nseed" - | "nats_auth_xseed" - | "nats_project_server" + | "conat_heading" + | "conat_server" + | "conat_path" + | "conat_password" | "stripe_heading" | "stripe_publishable_key" | "stripe_secret_key" @@ -276,64 +272,35 @@ const DEFAULT_COMPUTE_SERVER_IMAGES_JSON = // not public, but admins can edit them export const EXTRAS: SettingsExtras = { - nats_heading: { - name: "NATS Configuration", - desc: "Configuration of [NATS server](https://nats.io/), which CoCalc uses extensively for communication.", + conat_heading: { + name: "Conat Configuration", + desc: "Configuration of Conat, which CoCalc uses extensively for communication.", default: "", type: "header", - tags: ["Nats"], + tags: ["Conat"], }, - // Nats config is loaded in packages/server/nats/credentials.ts - nats_server: { - name: "Nats Server", - desc: "Hostname of server where NATS is running. Defaults to localhost or `$COCALC_NATS_SERVER` if not specified here. (TODO: support multiple servers for high availability.)", - default: "localhost", - password: false, - tags: ["Nats"], - }, - nats_port: { - name: "Nats TCP Port", - desc: "Port that NATS is serving on. Defaults to 4222 or `$COCALC_NATS_PORT` if not specified here.", - default: "4222", - password: false, - tags: ["Nats"], - }, - nats_ws_port: { - name: "Nats Websocket Port", - desc: "Port that NATS websocket server is serving on. Defaults to 8443 or `$COCALC_NATS_WS_PORT` if not specified here. This gets proxied to browser clients.", - default: "8443", - password: false, - tags: ["Nats"], - }, - nats_project_server: { - name: "Nats Project Server", - desc: "Name of the NATS server that projects should connect to. This should be either `hostname:port` for TCP or one of `ws://hostname:port` or `wss://hostname:port` for a WebSocket. Do not include the basepath for the websocket address. If not given, the tcp NATS server and port specified above is used.", + // Conat config is loaded in packages/server/nats/credentials.ts + conat_server: { + name: "Conat Server URL", + desc: "URL of server where Conat is available. Defaults to `$CONAT_SERVER` env variable if that is given.", default: "", password: false, - tags: ["Nats"], + tags: ["Conat"], }, - nats_password: { - name: "Nats Password", - desc: "Password required for nats account configured above on the NATS server. If not given, then the contents of the file `$SECRETS/nats_password` (or `$COCALC_ROOT/data/secrets/nats_password`) is used, if it exists. IMPORTANT: the nseed and xseed secrets must also exist in order for the authentication microservice to communicate with nats-server and authenticate users.", - default: "", - password: true, - tags: ["Nats"], - }, - nats_auth_nseed: { - name: "Nats Authentication Callout - Signing Private Key", - desc: "The ed25519 nkeys secret key that is used by the auth callout microservice. If not given, then the contents of the file `$SECRETS/nats_auth_nseed` (or `$COCALC_ROOT/data/secrets/nats_auth_nseed`) is used, if it exists. This is an *account* private nkey used by the server to digitally sign messages to the auth callout service: `nk -gen account`", - default: "", - password: true, - tags: ["Nats"], + conat_path: { + name: "Conat path", + desc: "Path of conat websocket endpoint that NATS is serving on. Defaults to '/conat' or `$COCALC_PATH`.", + default: "/conat", + password: false, + tags: ["Conat"], }, - nats_auth_xseed: { - name: "Nats Authentication Callout - Encryption Private Key", - desc: "The ed25519 nkeys secret key that is used by the auth callout microservice. If not given, then the contents of the file `$SCRETS/nats_auth_xseed` (or `$COCALC_ROOT/data/secrets/nats_auth_xseed`) is used, if it exists. This is a *curve* private nkey used by the auth callout service to encrypt responses to the server: `nk -gen curve`", + conat_password: { + name: "Conat Password", + desc: "Password for conat hub admin account. If not given, then the contents of the file `$SECRETS/conat_password` (or `$COCALC_ROOT/data/secrets/conat_password`) is used, if it exists.", default: "", password: true, - tags: ["Nats"], + tags: ["Conat"], }, - openai_section: { name: "Language Model Configuration", desc: "", diff --git a/src/packages/util/event-iterator.ts b/src/packages/util/event-iterator.ts new file mode 100644 index 0000000000..d9c6e612c0 --- /dev/null +++ b/src/packages/util/event-iterator.ts @@ -0,0 +1,244 @@ +/* +LICENSE: MIT + +This is a slight fork of + +https://github.com/sapphiredev/utilities/tree/main/packages/event-iterator + +because upstream is slightly broken and what it actually does doesn't +agree with the docs. I can see why. Upstream would capture ['arg1','arg2']] +for an event emitter doing this + + emitter.emit('foo', 'arg1', 'arg2') + +But for our application we only want 'arg1'. I thus added a map option, +which makes it easy to do what we want. +*/ + +import type { EventEmitter } from "node:events"; + +/** + * A filter for an EventIterator. + */ +export type EventIteratorFilter = (value: V) => boolean; + +/** + * Options to be passed to an EventIterator. + */ +export interface EventIteratorOptions { + /** + * The filter. + */ + filter?: EventIteratorFilter; + + // maps the array of args emitted by the event emitter a V + map?: (args: any[]) => V; + + /** + * The timeout in ms before ending the EventIterator. + */ + idle?: number; + + /** + * The limit of events that pass the filter to iterate. + */ + limit?: number; +} + +/** + * An EventIterator, used for asynchronously iterating over received values. + */ +export class EventIterator + implements AsyncIterableIterator +{ + /** + * The emitter to listen to. + */ + public readonly emitter: EventEmitter; + + /** + * The event the event iterator is listening for to receive values from. + */ + public readonly event: string; + + /** + * The filter used to filter out values. + */ + public filter: EventIteratorFilter; + + public map; + + /** + * Whether or not the EventIterator has ended. + */ + #ended = false; + + /** + * The amount of idle time in ms before moving on. + */ + readonly #idle?: number; + + /** + * The queue of received values. + */ + #queue: V[] = []; + + /** + * The amount of events that have passed the filter. + */ + #passed = 0; + + /** + * The limit before ending the EventIterator. + */ + readonly #limit: number; + + /** + * The timer to track when this will idle out. + */ + #idleTimer: NodeJS.Timeout | undefined | null = null; + + /** + * The push handler with context bound to the instance. + */ + readonly #push: (this: EventIterator, ...value: unknown[]) => void; + + /** + * @param emitter The event emitter to listen to. + * @param event The event we're listening for to receives values from. + * @param options Any extra options. + */ + public constructor( + emitter: EventEmitter, + event: string, + options: EventIteratorOptions = {}, + ) { + this.emitter = emitter; + this.event = event; + this.map = options.map ?? ((args) => args); + this.#limit = options.limit ?? Infinity; + this.#idle = options.idle; + this.filter = options.filter ?? ((): boolean => true); + + // This timer is to idle out on lack of valid responses + if (this.#idle) { + // NOTE: this same code is in next in case when we can't use refresh + this.#idleTimer = setTimeout(this.end.bind(this), this.#idle); + } + this.#push = this.push.bind(this); + const maxListeners = this.emitter.getMaxListeners(); + if (maxListeners !== 0) this.emitter.setMaxListeners(maxListeners + 1); + + this.emitter.on(this.event, this.#push); + } + + /** + * Whether or not the EventIterator has ended. + */ + public get ended(): boolean { + return this.#ended; + } + + /** + * Ends the EventIterator. + */ + public end(): void { + if (this.#ended) return; + this.#ended = true; + this.#queue = []; + + this.emitter.off(this.event, this.#push); + const maxListeners = this.emitter.getMaxListeners(); + if (maxListeners !== 0) this.emitter.setMaxListeners(maxListeners - 1); + } + // aliases to match usage in NATS and CoCalc. + close = this.end; + stop = this.end; + + drain(): void { + // just immediately end + this.end(); + // [ ] TODO: for compat with nats. I'm not sure what this should be + // or if it matters... + // console.log("WARNING: TODO -- event-iterator drain not implemented"); + } + + /** + * The next value that's received from the EventEmitter. + */ + public async next(): Promise> { + // If there are elements in the queue, return an undone response: + if (this.#queue.length) { + const value = this.#queue.shift()!; + if (!this.filter(value)) return this.next(); + if (++this.#passed >= this.#limit) this.end(); + if (this.#idleTimer) { + if (this.#idleTimer.refresh != null) { + this.#idleTimer.refresh(); + } else { + clearTimeout(this.#idleTimer); + this.#idleTimer = setTimeout(this.end.bind(this), this.#idle); + } + } + + return { done: false, value }; + } + + // If the iterator ended, clean-up timer and return a done response: + if (this.#ended) { + if (this.#idleTimer) clearTimeout(this.#idleTimer); + return { done: true, value: undefined as never }; + } + + // Listen for a new element from the emitter: + return new Promise>((resolve) => { + let idleTimer: NodeJS.Timeout | undefined | null = null; + + // If there is an idle time set, we will create a temporary timer, + // which will cause the iterator to end if no new elements are received: + if (this.#idle) { + idleTimer = setTimeout(() => { + this.end(); + resolve(this.next()); + }, this.#idle); + } + + // Once it has received at least one value, we will clear the timer (if defined), + // and resolve with the new value: + this.emitter.once(this.event, () => { + if (idleTimer) clearTimeout(idleTimer); + resolve(this.next()); + }); + }); + } + + /** + * Handles what happens when you break or return from a loop. + */ + public return(): Promise> { + this.end(); + return Promise.resolve({ done: true, value: undefined as never }); + } + + /** + * Handles what happens when you encounter an error in a loop. + */ + public throw(): Promise> { + this.end(); + return Promise.resolve({ done: true, value: undefined as never }); + } + + /** + * The symbol allowing EventIterators to be used in for-await-of loops. + */ + public [Symbol.asyncIterator](): AsyncIterableIterator { + return this; + } + + /** + * Pushes a value into the queue. + */ + protected push(...args): void { + this.#queue.push(this.map(args)); + } +} diff --git a/src/packages/util/refcache.ts b/src/packages/util/refcache.ts index f8a3b797e3..9e545525cf 100644 --- a/src/packages/util/refcache.ts +++ b/src/packages/util/refcache.ts @@ -34,7 +34,7 @@ export default function refCache< const count: { [key: string]: number } = {}; const close: { [key: number]: Function } = {}; if (createKey == null) { - createKey = jsonStableStringify; + createKey = (x) => jsonStableStringify(x) ?? ""; } const createObjectReuseInFlight = reuseInFlight(createObject, { createKey: (args) => createKey(args[0]), @@ -116,7 +116,7 @@ export function refCacheSync< const count: { [key: string]: number } = {}; const close: { [key: number]: Function } = {}; if (createKey == null) { - createKey = jsonStableStringify; + createKey = (x) => jsonStableStringify(x) ?? ""; } const get = (opts: Options): T => { if (opts.noCache) { diff --git a/src/scripts/g-tmux.sh b/src/scripts/g-tmux.sh index 179058ff2c..5f052385f4 100755 --- a/src/scripts/g-tmux.sh +++ b/src/scripts/g-tmux.sh @@ -1,27 +1,24 @@ #!/usr/bin/env bash -echo "Spawning tmux windows with: hub, database, nats-server, rspack or memory monitor..." +echo "Spawning tmux windows with: hub, database, rspack or memory monitor..." export PWD=`pwd` tmux new-session -d -s mysession tmux new-window -t mysession:1 tmux new-window -t mysession:2 -tmux new-window -t mysession:3 sleep 2 tmux send-keys -t mysession:0 '$PWD/scripts/g.sh' C-m sleep 2 tmux send-keys -t mysession:1 'pnpm database' C-m -sleep 2 -tmux send-keys -t mysession:2 'pnpm nats-server' C-m if [ -n "$NO_RSPACK_DEV_SERVER" ]; then sleep 2 -tmux send-keys -t mysession:3 'pnpm rspack' C-m +tmux send-keys -t mysession:2 'pnpm rspack' C-m else sleep 2 -tmux send-keys -t mysession:3 '$PWD/scripts/memory_monitor.py' C-m +tmux send-keys -t mysession:2 '$PWD/scripts/memory_monitor.py' C-m fi tmux attach -t mysession:1 diff --git a/src/workspaces.py b/src/workspaces.py index 3017bb390c..d82eddc9c6 100755 --- a/src/workspaces.py +++ b/src/workspaces.py @@ -109,7 +109,7 @@ def all_packages() -> List[str]: 'packages/sync', 'packages/sync-client', 'packages/sync-fs', - 'packages/nats', + 'packages/conat', 'packages/backend', 'packages/api-client', 'packages/jupyter', @@ -120,6 +120,7 @@ def all_packages() -> List[str]: 'packages/static', # packages/hub assumes this is built (for webpack dev server) 'packages/server', # packages/next assumes this is built 'packages/database', # packages/next also assumes database is built (or at least the coffeescript in it is) + 'packages/file-server', 'packages/next', 'packages/hub', # hub won't build if next isn't already built ]