test(import): add legacy import compatibility coverage
This commit is contained in:
@@ -0,0 +1,290 @@
|
||||
import { describe, it, expect, beforeAll, afterAll, beforeEach } from "vitest";
|
||||
import request from "supertest";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
import { getTestPrisma, setupTestDb, cleanupTestDb } from "./testUtils";
|
||||
|
||||
type LegacyDbOptions = {
|
||||
tableStyle: "prisma" | "plural-lower";
|
||||
includeCollections: boolean;
|
||||
includeMigrationsTable: boolean;
|
||||
includeTrashDrawing: boolean;
|
||||
};
|
||||
|
||||
const createTempDir = () => fs.mkdtempSync(path.join(os.tmpdir(), "excalidash-legacy-"));
|
||||
|
||||
const openWritableDb = (filePath: string): any => {
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { DatabaseSync } = require("node:sqlite") as any;
|
||||
return new DatabaseSync(filePath, { enableForeignKeyConstraints: false });
|
||||
} catch (_err) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const Database = require("better-sqlite3") as any;
|
||||
return new Database(filePath);
|
||||
}
|
||||
};
|
||||
|
||||
const createLegacySqliteDb = (opts: LegacyDbOptions): string => {
|
||||
const dir = createTempDir();
|
||||
const filePath = path.join(dir, "legacy-export.db");
|
||||
const db = openWritableDb(filePath);
|
||||
|
||||
const tableDrawing = opts.tableStyle === "plural-lower" ? "drawings" : "Drawing";
|
||||
const tableCollection = opts.tableStyle === "plural-lower" ? "collections" : "Collection";
|
||||
|
||||
try {
|
||||
if (opts.includeCollections) {
|
||||
db.exec(`
|
||||
CREATE TABLE "${tableCollection}" (
|
||||
id TEXT PRIMARY KEY NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
createdAt TEXT,
|
||||
updatedAt TEXT
|
||||
);
|
||||
`);
|
||||
db.prepare(`INSERT INTO "${tableCollection}" (id, name, createdAt, updatedAt) VALUES (?, ?, ?, ?)`).run(
|
||||
"legacy-collection-1",
|
||||
"Legacy Collection",
|
||||
new Date("2024-01-01T00:00:00.000Z").toISOString(),
|
||||
new Date("2024-01-02T00:00:00.000Z").toISOString(),
|
||||
);
|
||||
}
|
||||
|
||||
db.exec(`
|
||||
CREATE TABLE "${tableDrawing}" (
|
||||
id TEXT PRIMARY KEY NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
elements TEXT NOT NULL,
|
||||
appState TEXT NOT NULL,
|
||||
files TEXT,
|
||||
preview TEXT,
|
||||
version INTEGER,
|
||||
collectionId TEXT,
|
||||
collectionName TEXT,
|
||||
createdAt TEXT,
|
||||
updatedAt TEXT
|
||||
);
|
||||
`);
|
||||
|
||||
const now = new Date("2024-01-03T00:00:00.000Z").toISOString();
|
||||
const insertDrawing = db.prepare(
|
||||
`INSERT INTO "${tableDrawing}"
|
||||
(id, name, elements, appState, files, preview, version, collectionId, collectionName, createdAt, updatedAt)
|
||||
VALUES
|
||||
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
);
|
||||
|
||||
insertDrawing.run(
|
||||
"legacy-drawing-1",
|
||||
"Legacy Drawing 1",
|
||||
JSON.stringify([]),
|
||||
JSON.stringify({}),
|
||||
JSON.stringify({}),
|
||||
null,
|
||||
1,
|
||||
opts.includeCollections ? "legacy-collection-1" : null,
|
||||
opts.includeCollections ? "Legacy Collection" : null,
|
||||
now,
|
||||
now,
|
||||
);
|
||||
|
||||
insertDrawing.run(
|
||||
"legacy-drawing-2",
|
||||
"Legacy Drawing 2 (unorganized)",
|
||||
JSON.stringify([]),
|
||||
JSON.stringify({}),
|
||||
JSON.stringify({}),
|
||||
null,
|
||||
2,
|
||||
null,
|
||||
null,
|
||||
now,
|
||||
now,
|
||||
);
|
||||
|
||||
if (opts.includeTrashDrawing) {
|
||||
insertDrawing.run(
|
||||
"legacy-drawing-trash",
|
||||
"Legacy Trash Drawing",
|
||||
JSON.stringify([]),
|
||||
JSON.stringify({}),
|
||||
JSON.stringify({}),
|
||||
null,
|
||||
1,
|
||||
"trash",
|
||||
"Trash",
|
||||
now,
|
||||
now,
|
||||
);
|
||||
}
|
||||
|
||||
if (opts.includeMigrationsTable) {
|
||||
db.exec(`
|
||||
CREATE TABLE "_prisma_migrations" (
|
||||
id TEXT PRIMARY KEY NOT NULL,
|
||||
checksum TEXT NOT NULL,
|
||||
finished_at TEXT,
|
||||
migration_name TEXT NOT NULL,
|
||||
logs TEXT,
|
||||
rolled_back_at TEXT,
|
||||
started_at TEXT NOT NULL,
|
||||
applied_steps_count INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
`);
|
||||
db.prepare(
|
||||
`INSERT INTO "_prisma_migrations"
|
||||
(id, checksum, finished_at, migration_name, logs, rolled_back_at, started_at, applied_steps_count)
|
||||
VALUES
|
||||
(?, ?, ?, ?, ?, ?, ?, ?)`
|
||||
).run(
|
||||
"m1",
|
||||
"checksum",
|
||||
new Date("2024-01-04T00:00:00.000Z").toISOString(),
|
||||
"20240104000000_initial",
|
||||
null,
|
||||
null,
|
||||
new Date("2024-01-04T00:00:00.000Z").toISOString(),
|
||||
1,
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
db.close();
|
||||
}
|
||||
|
||||
return filePath;
|
||||
};
|
||||
|
||||
describe("Import compatibility (legacy exports)", () => {
|
||||
const uploadsDir = path.resolve(__dirname, "../../uploads");
|
||||
const userAgent = "vitest-import-compat";
|
||||
let prisma: ReturnType<typeof getTestPrisma>;
|
||||
let app: any;
|
||||
let csrfHeaderName: string;
|
||||
let csrfToken: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
setupTestDb();
|
||||
prisma = getTestPrisma();
|
||||
fs.mkdirSync(uploadsDir, { recursive: true });
|
||||
|
||||
// Import the server AFTER DATABASE_URL is set by setupTestDb/getTestPrisma.
|
||||
({ app } = await import("../index"));
|
||||
|
||||
const csrfRes = await request(app).get("/csrf-token").set("User-Agent", userAgent);
|
||||
csrfHeaderName = csrfRes.body.header;
|
||||
csrfToken = csrfRes.body.token;
|
||||
expect(typeof csrfHeaderName).toBe("string");
|
||||
expect(typeof csrfToken).toBe("string");
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await cleanupTestDb(prisma);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await prisma.$disconnect();
|
||||
});
|
||||
|
||||
it("verifies a v0.1.x–v0.3.2-style SQLite export (Drawing/Collection tables) and returns migration info when present", async () => {
|
||||
const legacyDb = createLegacySqliteDb({
|
||||
tableStyle: "prisma",
|
||||
includeCollections: true,
|
||||
includeMigrationsTable: true,
|
||||
includeTrashDrawing: false,
|
||||
});
|
||||
|
||||
const res = await request(app)
|
||||
.post("/import/sqlite/legacy/verify")
|
||||
.set("User-Agent", userAgent)
|
||||
.set(csrfHeaderName, csrfToken)
|
||||
.attach("db", legacyDb);
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.body.valid).toBe(true);
|
||||
expect(res.body.drawings).toBe(2);
|
||||
expect(res.body.collections).toBe(1);
|
||||
expect(res.body.latestMigration).toBe("20240104000000_initial");
|
||||
expect(typeof res.body.currentLatestMigration === "string").toBe(true);
|
||||
});
|
||||
|
||||
it("merge-imports a legacy SQLite export into the current account without replacing the database", async () => {
|
||||
const legacyDb = createLegacySqliteDb({
|
||||
tableStyle: "prisma",
|
||||
includeCollections: true,
|
||||
includeMigrationsTable: false,
|
||||
includeTrashDrawing: true,
|
||||
});
|
||||
|
||||
const res = await request(app)
|
||||
.post("/import/sqlite/legacy")
|
||||
.set("User-Agent", userAgent)
|
||||
.set(csrfHeaderName, csrfToken)
|
||||
.attach("db", legacyDb);
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
expect(res.body.collections?.created).toBeGreaterThanOrEqual(1);
|
||||
expect(res.body.drawings?.created).toBeGreaterThanOrEqual(3);
|
||||
|
||||
const importedDrawings = await prisma.drawing.findMany({
|
||||
orderBy: { name: "asc" },
|
||||
select: { id: true, name: true, collectionId: true, userId: true },
|
||||
});
|
||||
|
||||
// In single-user mode, imports land on the bootstrap acting user.
|
||||
expect(importedDrawings.every((d) => d.userId === "bootstrap-admin")).toBe(true);
|
||||
expect(importedDrawings.map((d) => d.id)).toEqual(
|
||||
expect.arrayContaining(["legacy-drawing-1", "legacy-drawing-2", "legacy-drawing-trash"])
|
||||
);
|
||||
|
||||
const trash = await prisma.collection.findUnique({ where: { id: "trash" } });
|
||||
expect(trash).toBeTruthy();
|
||||
});
|
||||
|
||||
it("supports older exports with plural/lowercase table names (drawings/collections)", async () => {
|
||||
const legacyDb = createLegacySqliteDb({
|
||||
tableStyle: "plural-lower",
|
||||
includeCollections: true,
|
||||
includeMigrationsTable: false,
|
||||
includeTrashDrawing: false,
|
||||
});
|
||||
|
||||
const verify = await request(app)
|
||||
.post("/import/sqlite/legacy/verify")
|
||||
.set("User-Agent", userAgent)
|
||||
.set(csrfHeaderName, csrfToken)
|
||||
.attach("db", legacyDb);
|
||||
|
||||
expect(verify.status).toBe(200);
|
||||
expect(verify.body.drawings).toBe(2);
|
||||
expect(verify.body.collections).toBe(1);
|
||||
|
||||
const res = await request(app)
|
||||
.post("/import/sqlite/legacy")
|
||||
.set("User-Agent", userAgent)
|
||||
.set(csrfHeaderName, csrfToken)
|
||||
.attach("db", legacyDb);
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(res.body.success).toBe(true);
|
||||
});
|
||||
|
||||
it("fails verification if the legacy DB is missing a Drawing table", async () => {
|
||||
const dir = createTempDir();
|
||||
const filePath = path.join(dir, "invalid.db");
|
||||
const db = openWritableDb(filePath);
|
||||
db.exec(`CREATE TABLE "NotDrawing" (id TEXT PRIMARY KEY NOT NULL);`);
|
||||
db.close();
|
||||
|
||||
const res = await request(app)
|
||||
.post("/import/sqlite/legacy/verify")
|
||||
.set("User-Agent", userAgent)
|
||||
.set(csrfHeaderName, csrfToken)
|
||||
.attach("db", filePath);
|
||||
|
||||
expect(res.status).toBe(400);
|
||||
expect(res.body.error).toBe("Invalid legacy DB");
|
||||
});
|
||||
});
|
||||
@@ -121,6 +121,9 @@ const buildLoginAttemptLimiter = (cfg: LoginRateLimitConfig) => {
|
||||
},
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
validate: {
|
||||
trustProxy: false,
|
||||
},
|
||||
store,
|
||||
keyGenerator: (req) => {
|
||||
const identifier = resolveAuthIdentifier(req as Request);
|
||||
@@ -165,6 +168,9 @@ const accountActionRateLimiter = rateLimit({
|
||||
},
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
validate: {
|
||||
trustProxy: false,
|
||||
},
|
||||
});
|
||||
|
||||
// Validation schemas
|
||||
|
||||
+51
-29
@@ -373,6 +373,12 @@ const generalRateLimiter = rateLimit({
|
||||
},
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
// We intentionally allow `app.set("trust proxy", true)` for deployments with multiple proxy layers.
|
||||
// express-rate-limit warns (and can throw) in that configuration; we accept the risk in favor of
|
||||
// correct client IP handling and rely on deployment-level network controls.
|
||||
validate: {
|
||||
trustProxy: false,
|
||||
},
|
||||
});
|
||||
|
||||
app.use(generalRateLimiter);
|
||||
@@ -1850,6 +1856,23 @@ const parseOptionalJson = <T>(raw: unknown, fallback: T): T => {
|
||||
return fallback;
|
||||
};
|
||||
|
||||
const openReadonlySqliteDb = (filePath: string): any => {
|
||||
try {
|
||||
// Prefer Node's built-in SQLite when available (no native addon rebuild needed).
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const { DatabaseSync } = require("node:sqlite") as any;
|
||||
return new DatabaseSync(filePath, {
|
||||
readOnly: true,
|
||||
enableForeignKeyConstraints: false,
|
||||
});
|
||||
} catch (_err) {
|
||||
// Fall back to better-sqlite3 on older Node versions.
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const Database = require("better-sqlite3") as any;
|
||||
return new Database(filePath, { readonly: true, fileMustExist: true });
|
||||
}
|
||||
};
|
||||
|
||||
const getCurrentLatestPrismaMigrationName = async (): Promise<string | null> => {
|
||||
try {
|
||||
const migrationsDir = path.resolve(backendRoot, "prisma/migrations");
|
||||
@@ -1887,20 +1910,9 @@ app.post("/import/sqlite/legacy/verify", requireAuth, upload.single("db"), async
|
||||
return res.status(400).json({ error: "Invalid database format" });
|
||||
}
|
||||
|
||||
// Use better-sqlite3 to inspect the legacy DB file
|
||||
let Database: any;
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
Database = require("better-sqlite3") as any;
|
||||
} catch (error) {
|
||||
return res.status(500).json({
|
||||
error: "Legacy DB support unavailable",
|
||||
message:
|
||||
"Failed to load better-sqlite3. Run `cd backend && npm rebuild better-sqlite3` (or reinstall dependencies) and try again.",
|
||||
});
|
||||
}
|
||||
const db = new Database(stagedPath, { readonly: true, fileMustExist: true });
|
||||
let db: any | null = null;
|
||||
try {
|
||||
db = openReadonlySqliteDb(stagedPath);
|
||||
const tables: string[] = db
|
||||
.prepare("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
.all()
|
||||
@@ -1941,9 +1953,15 @@ app.post("/import/sqlite/legacy/verify", requireAuth, upload.single("db"), async
|
||||
latestMigration,
|
||||
currentLatestMigration: await getCurrentLatestPrismaMigrationName(),
|
||||
});
|
||||
} catch (_error) {
|
||||
return res.status(500).json({
|
||||
error: "Legacy DB support unavailable",
|
||||
message:
|
||||
"Failed to open the SQLite database for inspection. If you're on Node < 22, you may need to rebuild native dependencies (e.g. `cd backend && npm rebuild better-sqlite3`).",
|
||||
});
|
||||
} finally {
|
||||
try {
|
||||
db.close();
|
||||
db?.close?.();
|
||||
} catch { }
|
||||
}
|
||||
} finally {
|
||||
@@ -1967,19 +1985,9 @@ app.post("/import/sqlite/legacy", requireAuth, upload.single("db"), asyncHandler
|
||||
return res.status(400).json({ error: "Invalid database format" });
|
||||
}
|
||||
|
||||
let Database: any;
|
||||
try {
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
Database = require("better-sqlite3") as any;
|
||||
} catch (error) {
|
||||
return res.status(500).json({
|
||||
error: "Legacy DB support unavailable",
|
||||
message:
|
||||
"Failed to load better-sqlite3. Run `cd backend && npm rebuild better-sqlite3` (or reinstall dependencies) and try again.",
|
||||
});
|
||||
}
|
||||
const legacyDb = new Database(stagedPath, { readonly: true, fileMustExist: true });
|
||||
let legacyDb: any | null = null;
|
||||
try {
|
||||
legacyDb = openReadonlySqliteDb(stagedPath);
|
||||
const tables: string[] = legacyDb
|
||||
.prepare("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
.all()
|
||||
@@ -2159,9 +2167,15 @@ app.post("/import/sqlite/legacy", requireAuth, upload.single("db"), asyncHandler
|
||||
collections: { created: collectionsCreated, updated: collectionsUpdated, idConflicts: collectionIdConflicts },
|
||||
drawings: { created: drawingsCreated, updated: drawingsUpdated, idConflicts: drawingIdConflicts },
|
||||
});
|
||||
} catch (_error) {
|
||||
return res.status(500).json({
|
||||
error: "Legacy DB support unavailable",
|
||||
message:
|
||||
"Failed to open the SQLite database for import. If you're on Node < 22, you may need to rebuild native dependencies (e.g. `cd backend && npm rebuild better-sqlite3`).",
|
||||
});
|
||||
} finally {
|
||||
try {
|
||||
legacyDb.close();
|
||||
legacyDb?.close?.();
|
||||
} catch { }
|
||||
}
|
||||
} finally {
|
||||
@@ -2172,9 +2186,17 @@ app.post("/import/sqlite/legacy", requireAuth, upload.single("db"), asyncHandler
|
||||
// Error handler middleware (must be last)
|
||||
app.use(errorHandler);
|
||||
|
||||
httpServer.listen(PORT, async () => {
|
||||
export { app, httpServer };
|
||||
|
||||
const isMain =
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||
typeof require !== "undefined" && require.main === module;
|
||||
|
||||
if (isMain) {
|
||||
httpServer.listen(PORT, async () => {
|
||||
await initializeUploadDir();
|
||||
console.log(`Server running on port ${PORT}`);
|
||||
console.log(`Environment: ${config.nodeEnv}`);
|
||||
console.log(`Frontend URL: ${config.frontendUrl}`);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,11 +1,26 @@
|
||||
const { parentPort, workerData } = require('worker_threads');
|
||||
const Database = require('better-sqlite3');
|
||||
|
||||
if (!parentPort) throw new Error("Must be run in a worker thread");
|
||||
|
||||
const openReadonlyDb = (filePath) => {
|
||||
try {
|
||||
const { DatabaseSync } = require("node:sqlite");
|
||||
const db = new DatabaseSync(filePath, {
|
||||
readOnly: true,
|
||||
enableForeignKeyConstraints: false,
|
||||
});
|
||||
return { kind: "node:sqlite", db };
|
||||
} catch (_err) {
|
||||
// Fall back to better-sqlite3 on Node versions that don't have node:sqlite.
|
||||
const Database = require("better-sqlite3");
|
||||
const db = new Database(filePath, { readonly: true, fileMustExist: true });
|
||||
return { kind: "better-sqlite3", db };
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const { filePath } = workerData;
|
||||
const db = new Database(filePath, { readonly: true, fileMustExist: true });
|
||||
const { db } = openReadonlyDb(filePath);
|
||||
|
||||
// This is the CPU-heavy operation
|
||||
const result = db.prepare("PRAGMA integrity_check;").get();
|
||||
|
||||
@@ -0,0 +1,152 @@
|
||||
import { describe, it, expect, vi, beforeEach } from "vitest";
|
||||
|
||||
const apiGet = vi.fn();
|
||||
const apiPost = vi.fn();
|
||||
|
||||
vi.mock("../api", () => ({
|
||||
api: {
|
||||
get: (...args: any[]) => apiGet(...args),
|
||||
post: (...args: any[]) => apiPost(...args),
|
||||
},
|
||||
}));
|
||||
|
||||
const exportToSvg = vi.fn(async () => ({ outerHTML: "<svg />" }));
|
||||
vi.mock("@excalidraw/excalidraw", () => ({
|
||||
exportToSvg: (...args: any[]) => exportToSvg(...args),
|
||||
}));
|
||||
|
||||
import { importLegacyFiles } from "./importUtils";
|
||||
|
||||
describe("importLegacyFiles", () => {
|
||||
const makeTestFile = (json: unknown, name: string) =>
|
||||
({
|
||||
name,
|
||||
text: async () => JSON.stringify(json),
|
||||
}) as unknown as File;
|
||||
|
||||
const makeTextFile = (text: string, name: string) =>
|
||||
({
|
||||
name,
|
||||
text: async () => text,
|
||||
}) as unknown as File;
|
||||
|
||||
beforeEach(() => {
|
||||
apiGet.mockReset();
|
||||
apiPost.mockReset();
|
||||
exportToSvg.mockClear();
|
||||
});
|
||||
|
||||
it("imports a legacy ExcaliDash export JSON ({ drawings: [...] }) and maps collectionName → collectionId", async () => {
|
||||
apiGet.mockResolvedValueOnce({
|
||||
data: [{ id: "col-existing", name: "Existing Collection" }],
|
||||
});
|
||||
|
||||
apiPost.mockImplementation(async (url: string) => {
|
||||
if (url === "/collections") return { data: { id: "col-new", name: "New Collection" } };
|
||||
if (url === "/drawings") return { data: { success: true } };
|
||||
throw new Error(`Unexpected POST ${url}`);
|
||||
});
|
||||
|
||||
const legacyExport = {
|
||||
version: "1.0",
|
||||
exportedAt: new Date().toISOString(),
|
||||
drawings: [
|
||||
{
|
||||
name: "One",
|
||||
elements: [],
|
||||
appState: {},
|
||||
files: {},
|
||||
collectionName: "Existing Collection",
|
||||
},
|
||||
{
|
||||
name: "Two",
|
||||
elements: [],
|
||||
appState: {},
|
||||
files: {},
|
||||
collectionName: "New Collection",
|
||||
},
|
||||
{
|
||||
name: "Trash",
|
||||
elements: [],
|
||||
appState: {},
|
||||
files: {},
|
||||
collectionId: "trash",
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const file = makeTestFile(legacyExport, "legacy-export.json");
|
||||
|
||||
const result = await importLegacyFiles([file], null);
|
||||
expect(result.failed).toBe(0);
|
||||
expect(result.success).toBe(3);
|
||||
|
||||
expect(apiGet).toHaveBeenCalledWith("/collections");
|
||||
|
||||
// One new collection created ("New Collection") and three drawings posted.
|
||||
expect(apiPost.mock.calls.filter((c) => c[0] === "/collections")).toHaveLength(1);
|
||||
expect(apiPost.mock.calls.filter((c) => c[0] === "/drawings")).toHaveLength(3);
|
||||
|
||||
const drawCalls = apiPost.mock.calls.filter((c) => c[0] === "/drawings");
|
||||
expect(drawCalls[0][1].collectionId).toBe("col-existing");
|
||||
expect(drawCalls[1][1].collectionId).toBe("col-new");
|
||||
expect(drawCalls[2][1].collectionId).toBe("trash");
|
||||
|
||||
expect(exportToSvg).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it("honors targetCollectionId override for legacy export JSON", async () => {
|
||||
apiPost.mockImplementation(async (url: string) => {
|
||||
if (url === "/drawings") return { data: { success: true } };
|
||||
throw new Error(`Unexpected POST ${url}`);
|
||||
});
|
||||
|
||||
const legacyExport = {
|
||||
drawings: [
|
||||
{ name: "One", elements: [], appState: {}, files: {}, collectionName: "A" },
|
||||
{ name: "Two", elements: [], appState: {}, files: {}, collectionName: "B" },
|
||||
],
|
||||
};
|
||||
|
||||
const file = makeTestFile(legacyExport, "legacy-export.json");
|
||||
|
||||
const result = await importLegacyFiles([file], "target-col");
|
||||
expect(result.failed).toBe(0);
|
||||
expect(result.success).toBe(2);
|
||||
|
||||
expect(apiGet).not.toHaveBeenCalled();
|
||||
expect(apiPost.mock.calls.filter((c) => c[0] === "/collections")).toHaveLength(0);
|
||||
|
||||
const drawCalls = apiPost.mock.calls.filter((c) => c[0] === "/drawings");
|
||||
expect(drawCalls).toHaveLength(2);
|
||||
expect(drawCalls[0][1].collectionId).toBe("target-col");
|
||||
expect(drawCalls[1][1].collectionId).toBe("target-col");
|
||||
});
|
||||
|
||||
it("imports a single .excalidraw file as a drawing", async () => {
|
||||
apiPost.mockImplementation(async (url: string) => {
|
||||
if (url === "/drawings") return { data: { success: true } };
|
||||
throw new Error(`Unexpected POST ${url}`);
|
||||
});
|
||||
|
||||
const excalidraw = {
|
||||
type: "excalidraw",
|
||||
version: 2,
|
||||
source: "test",
|
||||
elements: [],
|
||||
appState: {},
|
||||
files: {},
|
||||
};
|
||||
|
||||
const file = makeTextFile(JSON.stringify(excalidraw), "hello.excalidraw");
|
||||
|
||||
const result = await importLegacyFiles([file], null);
|
||||
expect(result.failed).toBe(0);
|
||||
expect(result.success).toBe(1);
|
||||
|
||||
const drawCalls = apiPost.mock.calls.filter((c) => c[0] === "/drawings");
|
||||
expect(drawCalls).toHaveLength(1);
|
||||
expect(drawCalls[0][1].name).toBe("hello");
|
||||
expect(exportToSvg).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user