Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 18 additions & 16 deletions packages/core/src/extensions/tiptap-extensions/Link/link.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,6 @@ import { UNICODE_WHITESPACE_REGEX_GLOBAL } from "./helpers/whitespace.js";

const DEFAULT_PROTOCOL = "https";

const HTML_ATTRIBUTES = {
target: "_blank",
rel: "noopener noreferrer nofollow",
className: "bn-inline-content-section",
"data-inline-content-type": "link",
};

// Pre-compiled regex for URI protocol validation.
// Allows: http, https, ftp, ftps, mailto, tel, callto, sms, cid, xmpp
const ALLOWED_URI_REGEX =
Expand Down Expand Up @@ -84,7 +77,12 @@ export const Link = Mark.create<LinkOptions>({

addOptions() {
return {
HTMLAttributes: {},
HTMLAttributes: {
target: "_blank",
rel: "noopener noreferrer nofollow",
className: "bn-inline-content-section",
"data-inline-content-type": "link",
},
editor: undefined,
onClick: undefined,
isValidLink: isAllowedUri,
Expand All @@ -99,12 +97,6 @@ export const Link = Mark.create<LinkOptions>({
return element.getAttribute("href");
},
},
target: {
default: HTML_ATTRIBUTES.target,
},
rel: {
default: HTML_ATTRIBUTES.rel,
},
};
},

Expand All @@ -128,12 +120,22 @@ export const Link = Mark.create<LinkOptions>({
if (!this.options.isValidLink(HTMLAttributes.href)) {
return [
"a",
mergeAttributes(HTML_ATTRIBUTES, { ...HTMLAttributes, href: "" }),
mergeAttributes(
{
...HTMLAttributes,
href: "",
},
this.options.HTMLAttributes,
),
0,
];
}

return ["a", mergeAttributes(HTML_ATTRIBUTES, HTMLAttributes), 0];
return [
"a",
mergeAttributes(HTMLAttributes, this.options.HTMLAttributes),
0,
];
},

addPasteRules() {
Expand Down
1 change: 1 addition & 0 deletions packages/xl-ai/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@
"lint": "eslint src --max-warnings 0",
"test": "NODE_EXTRA_CA_CERTS=\"$(mkcert -CAROOT)/rootCA.pem\" vitest --run",
"test-watch": "NODE_EXTRA_CA_CERTS=\"$(mkcert -CAROOT)/rootCA.pem\" vitest watch",
"rename-msw-snapshots": "node scripts/rename-msw-snapshots.mjs",
"email": "email dev"
},
"dependencies": {
Expand Down
129 changes: 129 additions & 0 deletions packages/xl-ai/scripts/rename-msw-snapshots.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
#!/usr/bin/env node
// Repair msw-snapshot files after a request-shape change.
//
// When the request body changes (e.g. a schema change in BlockNote alters the
// HTML/JSON sent to the LLM), the md5 hash that msw-snapshot embeds in each
// cached response filename no longer matches. msw-snapshot then treats the
// snapshot as missing and (because of `updateSnapshots: "missing"`) falls
// through to the real API, which fails in CI without credentials and writes a
// new file at the *correct* new hash containing the failure response (e.g.
// 401).
//
// After that failed run, every affected slot has two files:
// <test>_<seq>_<old-hash>.json -- valid 200 response, wrong hash
// <test>_<seq>_<new-hash>.json -- right hash, but a 401 body
//
// This script transplants the 200 response into the new-hash file and deletes
// the old-hash file, leaving exactly one file per slot with the right hash
// and the right response.
//
// Usage:
// pnpm --filter @blocknote/xl-ai test # populates the new-hash files
// pnpm --filter @blocknote/xl-ai rename-msw-snapshots
// pnpm --filter @blocknote/xl-ai test # all green
import {
readFileSync,
readdirSync,
statSync,
unlinkSync,
writeFileSync,
} from "node:fs";
import path from "node:path";
import { fileURLToPath } from "node:url";

const __dirname = path.dirname(fileURLToPath(import.meta.url));
const PKG_ROOT = path.resolve(__dirname, "..");
const SEARCH_ROOT = path.join(PKG_ROOT, "src");

const FILE_RE = /^(.+)_(\d+)_([a-f0-9]+)\.json$/;

function* walk(dir) {
for (const entry of readdirSync(dir, { withFileTypes: true })) {
const p = path.join(dir, entry.name);
if (entry.isDirectory()) {
yield* walk(p);
} else if (entry.isFile() && entry.name.endsWith(".json")) {
yield p;
}
}
}

const filesByDir = new Map();
for (const file of walk(SEARCH_ROOT)) {
if (!file.includes(`${path.sep}__msw_snapshots__${path.sep}`)) continue;
const dir = path.dirname(file);
const list = filesByDir.get(dir) ?? [];
list.push(path.basename(file));
filesByDir.set(dir, list);
}

let migrated = 0;
let skipped = 0;
const skipNotes = [];

for (const [dir, files] of filesByDir) {
const groups = new Map();
for (const file of files) {
const match = FILE_RE.exec(file);
if (!match) continue;
const slot = `${match[1]}_${match[2]}`;
const list = groups.get(slot) ?? [];
list.push(file);
groups.set(slot, list);
}

for (const [slot, group] of groups) {
if (group.length < 2) continue;

const entries = group.map((file) => {
const fp = path.join(dir, file);
const data = JSON.parse(readFileSync(fp, "utf8"));
return {
file,
path: fp,
data,
status: data?.response?.status,
mtime: statSync(fp).mtimeMs,
};
});

const good = entries.filter((e) => e.status === 200);
const bad = entries.filter((e) => e.status !== 200);

if (good.length !== 1 || bad.length === 0) {
skipped++;
skipNotes.push(
` ${path.relative(PKG_ROOT, dir)}/${slot}: ${good.length} good + ${bad.length} bad`,
);
continue;
}

// Use the most recently written bad file as the destination — its hash
// matches the current request body.
bad.sort((a, b) => b.mtime - a.mtime);
const target = bad[0];

target.data.response = good[0].data.response;
writeFileSync(target.path, JSON.stringify(target.data, null, 2));
unlinkSync(good[0].path);
for (const extra of bad.slice(1)) unlinkSync(extra.path);
Comment thread
nperez0111 marked this conversation as resolved.

migrated++;
console.log(
`migrated ${path.relative(PKG_ROOT, dir)}/${slot} -> ${target.file}`,
);
}
}

console.log(`\nDone. ${migrated} migrated, ${skipped} skipped.`);
if (skipped > 0) {
console.log("\nSkipped slots (need manual attention):");
for (const note of skipNotes) console.log(note);
}
if (migrated === 0 && skipped === 0) {
console.log(
"\nNo mismatched snapshot pairs found. If you expected some, run\n" +
"`pnpm --filter @blocknote/xl-ai test` first to let msw-snapshot record\n" +
"the new-hash files alongside the existing old-hash ones.",
);
}
46 changes: 3 additions & 43 deletions packages/xl-ai/src/api/formats/html-blocks/htmlBlocks.test.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import { getCurrentTest } from "@vitest/runner";
import { getSortedEntries, snapshot, toHashString } from "msw-snapshot";
import { snapshot } from "msw-snapshot";
import { setupServer } from "msw/node";
import path from "path";
import { afterAll, afterEach, beforeAll, describe, it } from "vitest";
Expand All @@ -8,6 +7,7 @@ import { testAIModels } from "../../../testUtil/testAIModels.js";
import { BlockNoteEditor } from "@blocknote/core";
import { StreamToolExecutor } from "../../../streamTool/StreamToolExecutor.js";
import { ClientSideTransport } from "../../../streamTool/vercelAiSdk/clientside/ClientSideTransport.js";
import { createSnapshotPathFn } from "../tests/snapshotPath.js";
import { generateSharedTestCases } from "../tests/sharedTestCases.js";
import { htmlBlockLLMFormat } from "./htmlBlocks.js";

Expand All @@ -17,54 +17,14 @@ const BASE_FILE_PATH = path.resolve(
path.basename(__filename),
);

const fetchCountMap: Record<string, number> = {};

async function createRequestHash(req: Request) {
const url = new URL(req.url);
return [
// url.host,
// url.pathname,
toHashString([
req.method,
url.origin,
url.pathname,
getSortedEntries(url.searchParams),
getSortedEntries(req.headers),
// getSortedEntries(req.cookies),
new TextDecoder("utf-8").decode(await req.arrayBuffer()),
]),
].join("/");
}

// Main test suite with snapshot middleware
describe("Models", () => {
// Define server with snapshot middleware for the main tests
const server = setupServer(
snapshot({
updateSnapshots: "missing",
// onSnapshotUpdated: "all",
// ignoreSnapshots: true,
async createSnapshotPath(info) {
// use a unique path for each model
const t = getCurrentTest()!;
const mswPath = path.join(
t.suite!.name, // same directory as the test snapshot
"__msw_snapshots__",
t.suite!.suite!.name, // model / streaming params
t.name,
);
// in case there are multiple requests in a test, we need to use a separate snapshot for each request
fetchCountMap[mswPath] = (fetchCountMap[mswPath] || 0) + 1;
const hash = await createRequestHash(info.request);
return mswPath + `_${fetchCountMap[mswPath]}_${hash}.json`;
},
createSnapshotPath: createSnapshotPathFn(BASE_FILE_PATH),
basePath: BASE_FILE_PATH,
// onFetchFromSnapshot(info, snapshot) {
// console.log("onFetchFromSnapshot", info, snapshot);
// },
// onFetchFromServer(info, snapshot) {
// console.log("onFetchFromServer", info, snapshot);
// },
}),
);

Expand Down
46 changes: 3 additions & 43 deletions packages/xl-ai/src/api/formats/json/json.test.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import { afterAll, afterEach, beforeAll, describe } from "vitest";

import { getCurrentTest } from "@vitest/runner";
import { getSortedEntries, snapshot, toHashString } from "msw-snapshot";
import { snapshot } from "msw-snapshot";
import { setupServer } from "msw/node";
import path from "path";
import { createSnapshotPathFn } from "../tests/snapshotPath.js";
import { generateSharedTestCases } from "../tests/sharedTestCases.js";

import { ClientSideTransport } from "../../../streamTool/vercelAiSdk/clientside/ClientSideTransport.js";
Expand All @@ -16,54 +16,14 @@ const BASE_FILE_PATH = path.resolve(
path.basename(__filename),
);

const fetchCountMap: Record<string, number> = {};

async function createRequestHash(req: Request) {
const url = new URL(req.url);
return [
// url.host,
// url.pathname,
toHashString([
req.method,
url.origin,
url.pathname,
getSortedEntries(url.searchParams),
getSortedEntries(req.headers),
// getSortedEntries(req.cookies),
new TextDecoder("utf-8").decode(await req.arrayBuffer()),
]),
].join("/");
}

// Main test suite with snapshot middleware
describe.skip("Models", () => {
// Define server with snapshot middleware for the main tests
const server = setupServer(
snapshot({
updateSnapshots: "missing",
// updateSnapshots: "all",
// ignoreSnapshots: true,
async createSnapshotPath(info) {
// use a unique path for each model
const t = getCurrentTest()!;
const mswPath = path.join(
t.suite!.name, // same directory as the test snapshot
"__msw_snapshots__",
t.suite!.suite!.name, // model / streaming params
t.name,
);
// in case there are multiple requests in a test, we need to use a separate snapshot for each request
fetchCountMap[mswPath] = (fetchCountMap[mswPath] || 0) + 1;
const hash = await createRequestHash(info.request);
return mswPath + `_${fetchCountMap[mswPath]}_${hash}.json`;
},
createSnapshotPath: createSnapshotPathFn(BASE_FILE_PATH),
basePath: BASE_FILE_PATH,
// onFetchFromSnapshot(info, snapshot) {
// console.log("onFetchFromSnapshot", info, snapshot);
// },
// onFetchFromServer(info, snapshot) {
// console.log("onFetchFromServer", info, snapshot);
// },
}),
);

Expand Down
Loading
Loading