fix: restore svg image DataURL dimensions (#8730)

pull/8745/head
David Luzar 3 months ago committed by GitHub
parent f9815b8b4f
commit 79b181bcdc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -1,6 +1,7 @@
import throttle from "lodash.throttle";
import { PureComponent } from "react";
import type {
BinaryFileData,
ExcalidrawImperativeAPI,
SocketId,
} from "../../packages/excalidraw/types";
@ -9,6 +10,7 @@ import { APP_NAME, ENV, EVENT } from "../../packages/excalidraw/constants";
import type { ImportedDataState } from "../../packages/excalidraw/data/types";
import type {
ExcalidrawElement,
FileId,
InitializedExcalidrawImageElement,
OrderedExcalidrawElement,
} from "../../packages/excalidraw/element/types";
@ -157,7 +159,7 @@ class Collab extends PureComponent<CollabProps, CollabState> {
throw new AbortError();
}
return saveFilesToFirebase({
const { savedFiles, erroredFiles } = await saveFilesToFirebase({
prefix: `${FIREBASE_STORAGE_PREFIXES.collabFiles}/${roomId}`,
files: await encodeFilesForUpload({
files: addedFiles,
@ -165,6 +167,29 @@ class Collab extends PureComponent<CollabProps, CollabState> {
maxBytes: FILE_UPLOAD_MAX_BYTES,
}),
});
return {
savedFiles: savedFiles.reduce(
(acc: Map<FileId, BinaryFileData>, id) => {
const fileData = addedFiles.get(id);
if (fileData) {
acc.set(id, fileData);
}
return acc;
},
new Map(),
),
erroredFiles: erroredFiles.reduce(
(acc: Map<FileId, BinaryFileData>, id) => {
const fileData = addedFiles.get(id);
if (fileData) {
acc.set(id, fileData);
}
return acc;
},
new Map(),
),
};
},
});
this.excalidrawAPI = props.excalidrawAPI;
@ -394,7 +419,7 @@ class Collab extends PureComponent<CollabProps, CollabState> {
.filter((element) => {
return (
isInitializedImageElement(element) &&
!this.fileManager.isFileHandled(element.fileId) &&
!this.fileManager.isFileTracked(element.fileId) &&
!element.isDeleted &&
(opts.forceFetchFiles
? element.status !== "pending" ||

@ -16,14 +16,26 @@ import type {
BinaryFiles,
} from "../../packages/excalidraw/types";
type FileVersion = Required<BinaryFileData>["version"];
export class FileManager {
/** files being fetched */
private fetchingFiles = new Map<ExcalidrawImageElement["fileId"], true>();
private erroredFiles_fetch = new Map<
ExcalidrawImageElement["fileId"],
true
>();
/** files being saved */
private savingFiles = new Map<ExcalidrawImageElement["fileId"], true>();
private savingFiles = new Map<
ExcalidrawImageElement["fileId"],
FileVersion
>();
/* files already saved to persistent storage */
private savedFiles = new Map<ExcalidrawImageElement["fileId"], true>();
private erroredFiles = new Map<ExcalidrawImageElement["fileId"], true>();
private savedFiles = new Map<ExcalidrawImageElement["fileId"], FileVersion>();
private erroredFiles_save = new Map<
ExcalidrawImageElement["fileId"],
FileVersion
>();
private _getFiles;
private _saveFiles;
@ -37,8 +49,8 @@ export class FileManager {
erroredFiles: Map<FileId, true>;
}>;
saveFiles: (data: { addedFiles: Map<FileId, BinaryFileData> }) => Promise<{
savedFiles: Map<FileId, true>;
erroredFiles: Map<FileId, true>;
savedFiles: Map<FileId, BinaryFileData>;
erroredFiles: Map<FileId, BinaryFileData>;
}>;
}) {
this._getFiles = getFiles;
@ -46,19 +58,28 @@ export class FileManager {
}
/**
* returns whether file is already saved or being processed
* returns whether file is saved/errored, or being processed
*/
isFileHandled = (id: FileId) => {
isFileTracked = (id: FileId) => {
return (
this.savedFiles.has(id) ||
this.fetchingFiles.has(id) ||
this.savingFiles.has(id) ||
this.erroredFiles.has(id)
this.fetchingFiles.has(id) ||
this.erroredFiles_fetch.has(id) ||
this.erroredFiles_save.has(id)
);
};
isFileSavedOrBeingSaved = (file: BinaryFileData) => {
const fileVersion = this.getFileVersion(file);
return (
this.savedFiles.get(file.id) === fileVersion ||
this.savingFiles.get(file.id) === fileVersion
);
};
isFileSaved = (id: FileId) => {
return this.savedFiles.has(id);
getFileVersion = (file: BinaryFileData) => {
return file.version ?? 1;
};
saveFiles = async ({
@ -71,13 +92,16 @@ export class FileManager {
const addedFiles: Map<FileId, BinaryFileData> = new Map();
for (const element of elements) {
const fileData =
isInitializedImageElement(element) && files[element.fileId];
if (
isInitializedImageElement(element) &&
files[element.fileId] &&
!this.isFileHandled(element.fileId)
fileData &&
// NOTE if errored during save, won't retry due to this check
!this.isFileSavedOrBeingSaved(fileData)
) {
addedFiles.set(element.fileId, files[element.fileId]);
this.savingFiles.set(element.fileId, true);
this.savingFiles.set(element.fileId, this.getFileVersion(fileData));
}
}
@ -86,8 +110,12 @@ export class FileManager {
addedFiles,
});
for (const [fileId] of savedFiles) {
this.savedFiles.set(fileId, true);
for (const [fileId, fileData] of savedFiles) {
this.savedFiles.set(fileId, this.getFileVersion(fileData));
}
for (const [fileId, fileData] of erroredFiles) {
this.erroredFiles_save.set(fileId, this.getFileVersion(fileData));
}
return {
@ -121,10 +149,10 @@ export class FileManager {
const { loadedFiles, erroredFiles } = await this._getFiles(ids);
for (const file of loadedFiles) {
this.savedFiles.set(file.id, true);
this.savedFiles.set(file.id, this.getFileVersion(file));
}
for (const [fileId] of erroredFiles) {
this.erroredFiles.set(fileId, true);
this.erroredFiles_fetch.set(fileId, true);
}
return { loadedFiles, erroredFiles };
@ -160,7 +188,7 @@ export class FileManager {
): element is InitializedExcalidrawImageElement => {
return (
isInitializedImageElement(element) &&
this.isFileSaved(element.fileId) &&
this.savedFiles.has(element.fileId) &&
element.status === "pending"
);
};
@ -169,7 +197,8 @@ export class FileManager {
this.fetchingFiles.clear();
this.savingFiles.clear();
this.savedFiles.clear();
this.erroredFiles.clear();
this.erroredFiles_fetch.clear();
this.erroredFiles_save.clear();
}
}

@ -183,8 +183,8 @@ export class LocalData {
);
},
async saveFiles({ addedFiles }) {
const savedFiles = new Map<FileId, true>();
const erroredFiles = new Map<FileId, true>();
const savedFiles = new Map<FileId, BinaryFileData>();
const erroredFiles = new Map<FileId, BinaryFileData>();
// before we use `storage` event synchronization, let's update the flag
// optimistically. Hopefully nothing fails, and an IDB read executed
@ -195,10 +195,10 @@ export class LocalData {
[...addedFiles].map(async ([id, fileData]) => {
try {
await set(id, fileData, filesStore);
savedFiles.set(id, true);
savedFiles.set(id, fileData);
} catch (error: any) {
console.error(error);
erroredFiles.set(id, true);
erroredFiles.set(id, fileData);
}
}),
);

@ -177,8 +177,8 @@ export const saveFilesToFirebase = async ({
}) => {
const firebase = await loadFirebaseStorage();
const erroredFiles = new Map<FileId, true>();
const savedFiles = new Map<FileId, true>();
const erroredFiles: FileId[] = [];
const savedFiles: FileId[] = [];
await Promise.all(
files.map(async ({ id, buffer }) => {
@ -194,9 +194,9 @@ export const saveFilesToFirebase = async ({
cacheControl: `public, max-age=${FILE_CACHE_MAX_AGE_SEC}`,
},
);
savedFiles.set(id, true);
savedFiles.push(id);
} catch (error: any) {
erroredFiles.set(id, true);
erroredFiles.push(id);
}
}),
);

@ -304,8 +304,10 @@ import { Toast } from "./Toast";
import { actionToggleViewMode } from "../actions/actionToggleViewMode";
import {
dataURLToFile,
dataURLToString,
generateIdFromFile,
getDataURL,
getDataURL_sync,
getFileFromEvent,
ImageURLToFile,
isImageFileHandle,
@ -2122,9 +2124,7 @@ class App extends React.Component<AppProps, AppState> {
}
if (actionResult.files) {
this.files = actionResult.replaceFiles
? actionResult.files
: { ...this.files, ...actionResult.files };
this.addMissingFiles(actionResult.files, actionResult.replaceFiles);
this.addNewImagesToImageCache();
}
@ -3237,7 +3237,7 @@ class App extends React.Component<AppProps, AppState> {
});
if (opts.files) {
this.files = { ...this.files, ...opts.files };
this.addMissingFiles(opts.files);
}
this.store.shouldCaptureIncrement();
@ -3746,23 +3746,56 @@ class App extends React.Component<AppProps, AppState> {
}
};
/** adds supplied files to existing files in the appState */
/**
* adds supplied files to existing files in the appState.
* NOTE if file already exists in editor state, the file data is not updated
* */
public addFiles: ExcalidrawImperativeAPI["addFiles"] = withBatchedUpdates(
(files) => {
const filesMap = files.reduce((acc, fileData) => {
acc.set(fileData.id, fileData);
return acc;
}, new Map<FileId, BinaryFileData>());
this.files = { ...this.files, ...Object.fromEntries(filesMap) };
const { addedFiles } = this.addMissingFiles(files);
this.clearImageShapeCache(Object.fromEntries(filesMap));
this.clearImageShapeCache(addedFiles);
this.scene.triggerUpdate();
this.addNewImagesToImageCache();
},
);
private addMissingFiles = (
files: BinaryFiles | BinaryFileData[],
replace = false,
) => {
const nextFiles = replace ? {} : { ...this.files };
const addedFiles: BinaryFiles = {};
const _files = Array.isArray(files) ? files : Object.values(files);
for (const fileData of _files) {
if (nextFiles[fileData.id]) {
continue;
}
addedFiles[fileData.id] = fileData;
nextFiles[fileData.id] = fileData;
if (fileData.mimeType === MIME_TYPES.svg) {
const restoredDataURL = getDataURL_sync(
normalizeSVG(dataURLToString(fileData.dataURL)),
MIME_TYPES.svg,
);
if (fileData.dataURL !== restoredDataURL) {
// bump version so persistence layer can update the store
fileData.version = (fileData.version ?? 1) + 1;
fileData.dataURL = restoredDataURL;
}
}
}
this.files = nextFiles;
return { addedFiles };
};
public updateScene = withBatchedUpdates(
<K extends keyof AppState>(sceneData: {
elements?: SceneData["elements"];
@ -9285,7 +9318,7 @@ class App extends React.Component<AppProps, AppState> {
if (mimeType === MIME_TYPES.svg) {
try {
imageFile = SVGStringToFile(
await normalizeSVG(await imageFile.text()),
normalizeSVG(await imageFile.text()),
imageFile.name,
);
} catch (error: any) {
@ -9353,16 +9386,15 @@ class App extends React.Component<AppProps, AppState> {
return new Promise<NonDeleted<InitializedExcalidrawImageElement>>(
async (resolve, reject) => {
try {
this.files = {
...this.files,
[fileId]: {
this.addMissingFiles([
{
mimeType,
id: fileId,
dataURL,
created: Date.now(),
lastRetrieved: Date.now(),
},
};
]);
const cachedImageData = this.imageCache.get(fileId);
if (!cachedImageData) {
this.addNewImagesToImageCache();

@ -8,13 +8,14 @@ import { calculateScrollCenter } from "../scene";
import type { AppState, DataURL, LibraryItem } from "../types";
import type { ValueOf } from "../utility-types";
import { bytesToHexString, isPromiseLike } from "../utils";
import { base64ToString, stringToBase64, toByteString } from "./encode";
import type { FileSystemHandle } from "./filesystem";
import { nativeFileSystemSupported } from "./filesystem";
import { isValidExcalidrawData, isValidLibrary } from "./json";
import { restore, restoreLibraryItems } from "./restore";
import type { ImportedLibraryData } from "./types";
const parseFileContents = async (blob: Blob | File) => {
const parseFileContents = async (blob: Blob | File): Promise<string> => {
let contents: string;
if (blob.type === MIME_TYPES.png) {
@ -46,9 +47,7 @@ const parseFileContents = async (blob: Blob | File) => {
}
if (blob.type === MIME_TYPES.svg) {
try {
return await (
await import("./image")
).decodeSvgMetadata({
return (await import("./image")).decodeSvgMetadata({
svg: contents,
});
} catch (error: any) {
@ -249,6 +248,7 @@ export const generateIdFromFile = async (file: File): Promise<FileId> => {
}
};
/** async. For sync variant, use getDataURL_sync */
export const getDataURL = async (file: Blob | File): Promise<DataURL> => {
return new Promise((resolve, reject) => {
const reader = new FileReader();
@ -261,6 +261,16 @@ export const getDataURL = async (file: Blob | File): Promise<DataURL> => {
});
};
export const getDataURL_sync = (
data: string | Uint8Array | ArrayBuffer,
mimeType: ValueOf<typeof MIME_TYPES>,
): DataURL => {
return `data:${mimeType};base64,${stringToBase64(
toByteString(data),
true,
)}` as DataURL;
};
export const dataURLToFile = (dataURL: DataURL, filename = "") => {
const dataIndexStart = dataURL.indexOf(",");
const byteString = atob(dataURL.slice(dataIndexStart + 1));
@ -274,6 +284,10 @@ export const dataURLToFile = (dataURL: DataURL, filename = "") => {
return new File([ab], filename, { type: mimeType });
};
export const dataURLToString = (dataURL: DataURL) => {
return base64ToString(dataURL.slice(dataURL.indexOf(",") + 1));
};
export const resizeImageFile = async (
file: File,
opts: {

@ -5,24 +5,23 @@ import { encryptData, decryptData } from "./encryption";
// byte (binary) strings
// -----------------------------------------------------------------------------
// fast, Buffer-compatible implem
export const toByteString = (
data: string | Uint8Array | ArrayBuffer,
): Promise<string> => {
return new Promise((resolve, reject) => {
const blob =
typeof data === "string"
? new Blob([new TextEncoder().encode(data)])
: new Blob([data instanceof Uint8Array ? data : new Uint8Array(data)]);
const reader = new FileReader();
reader.onload = (event) => {
if (!event.target || typeof event.target.result !== "string") {
return reject(new Error("couldn't convert to byte string"));
}
resolve(event.target.result);
};
reader.readAsBinaryString(blob);
});
// Buffer-compatible implem.
//
// Note that in V8, spreading the uint8array (by chunks) into
// `String.fromCharCode(...uint8array)` tends to be faster for large
// strings/buffers, in case perf is needed in the future.
export const toByteString = (data: string | Uint8Array | ArrayBuffer) => {
const bytes =
typeof data === "string"
? new TextEncoder().encode(data)
: data instanceof Uint8Array
? data
: new Uint8Array(data);
let bstring = "";
for (const byte of bytes) {
bstring += String.fromCharCode(byte);
}
return bstring;
};
const byteStringToArrayBuffer = (byteString: string) => {
@ -46,12 +45,12 @@ const byteStringToString = (byteString: string) => {
* @param isByteString set to true if already byte string to prevent bloat
* due to reencoding
*/
export const stringToBase64 = async (str: string, isByteString = false) => {
return isByteString ? window.btoa(str) : window.btoa(await toByteString(str));
export const stringToBase64 = (str: string, isByteString = false) => {
return isByteString ? window.btoa(str) : window.btoa(toByteString(str));
};
// async to align with stringToBase64
export const base64ToString = async (base64: string, isByteString = false) => {
export const base64ToString = (base64: string, isByteString = false) => {
return isByteString
? window.atob(base64)
: byteStringToString(window.atob(base64));
@ -82,18 +81,18 @@ type EncodedData = {
/**
* Encodes (and potentially compresses via zlib) text to byte string
*/
export const encode = async ({
export const encode = ({
text,
compress,
}: {
text: string;
/** defaults to `true`. If compression fails, falls back to bstring alone. */
compress?: boolean;
}): Promise<EncodedData> => {
}): EncodedData => {
let deflated!: string;
if (compress !== false) {
try {
deflated = await toByteString(deflate(text));
deflated = toByteString(deflate(text));
} catch (error: any) {
console.error("encode: cannot deflate", error);
}
@ -102,11 +101,11 @@ export const encode = async ({
version: "1",
encoding: "bstring",
compressed: !!deflated,
encoded: deflated || (await toByteString(text)),
encoded: deflated || toByteString(text),
};
};
export const decode = async (data: EncodedData): Promise<string> => {
export const decode = (data: EncodedData): string => {
let decoded: string;
switch (data.encoding) {
@ -114,7 +113,7 @@ export const decode = async (data: EncodedData): Promise<string> => {
// if compressed, do not double decode the bstring
decoded = data.compressed
? data.encoded
: await byteStringToString(data.encoded);
: byteStringToString(data.encoded);
break;
default:
throw new Error(`decode: unknown encoding "${data.encoding}"`);

@ -32,7 +32,7 @@ export const encodePngMetadata = async ({
const metadataChunk = tEXt.encode(
MIME_TYPES.excalidraw,
JSON.stringify(
await encode({
encode({
text: metadata,
compress: true,
}),
@ -59,7 +59,7 @@ export const decodePngMetadata = async (blob: Blob) => {
}
throw new Error("FAILED");
}
return await decode(encodedData);
return decode(encodedData);
} catch (error: any) {
console.error(error);
throw new Error("FAILED");
@ -72,9 +72,9 @@ export const decodePngMetadata = async (blob: Blob) => {
// SVG
// -----------------------------------------------------------------------------
export const encodeSvgMetadata = async ({ text }: { text: string }) => {
const base64 = await stringToBase64(
JSON.stringify(await encode({ text })),
export const encodeSvgMetadata = ({ text }: { text: string }) => {
const base64 = stringToBase64(
JSON.stringify(encode({ text })),
true /* is already byte string */,
);
@ -87,7 +87,7 @@ export const encodeSvgMetadata = async ({ text }: { text: string }) => {
return metadata;
};
export const decodeSvgMetadata = async ({ svg }: { svg: string }) => {
export const decodeSvgMetadata = ({ svg }: { svg: string }) => {
if (svg.includes(`payload-type:${MIME_TYPES.excalidraw}`)) {
const match = svg.match(
/<!-- payload-start -->\s*(.+?)\s*<!-- payload-end -->/,
@ -100,7 +100,7 @@ export const decodeSvgMetadata = async ({ svg }: { svg: string }) => {
const isByteString = version !== "1";
try {
const json = await base64ToString(match[1], isByteString);
const json = base64ToString(match[1], isByteString);
const encodedData = JSON.parse(json);
if (!("encoded" in encodedData)) {
// legacy, un-encoded scene JSON
@ -112,7 +112,7 @@ export const decodeSvgMetadata = async ({ svg }: { svg: string }) => {
}
throw new Error("FAILED");
}
return await decode(encodedData);
return decode(encodedData);
} catch (error: any) {
console.error(error);
throw new Error("FAILED");

@ -94,7 +94,7 @@ export const isHTMLSVGElement = (node: Node | null): node is SVGElement => {
return node?.nodeName.toLowerCase() === "svg";
};
export const normalizeSVG = async (SVGString: string) => {
export const normalizeSVG = (SVGString: string) => {
const doc = new DOMParser().parseFromString(SVGString, MIME_TYPES.svg);
const svg = doc.querySelector("svg");
const errorNode = doc.querySelector("parsererror");

@ -319,9 +319,7 @@ export const exportToSvg = async (
// the tempScene hack which duplicates and regenerates ids
if (exportEmbedScene) {
try {
metadata = await (
await import("../data/image")
).encodeSvgMetadata({
metadata = (await import("../data/image")).encodeSvgMetadata({
// when embedding scene, we want to embed the origionally supplied
// elements which don't contain the temp frame labels.
// But it also requires that the exportToSvg is being supplied with

@ -62,10 +62,10 @@ describe("export", () => {
});
it("test encoding/decoding scene for SVG export", async () => {
const encoded = await encodeSvgMetadata({
const encoded = encodeSvgMetadata({
text: serializeAsJSON(testElements, h.state, {}, "local"),
});
const decoded = JSON.parse(await decodeSvgMetadata({ svg: encoded }));
const decoded = JSON.parse(decodeSvgMetadata({ svg: encoded }));
expect(decoded.elements).toEqual([
expect.objectContaining({ type: "text", text: "😀" }),
]);

@ -106,6 +106,11 @@ export type BinaryFileData = {
* Epoch timestamp in milliseconds.
*/
lastRetrieved?: number;
/**
* indicates the version of the file. This can be used to determine whether
* the file dataURL has changed e.g. as part of restore due to schema update.
*/
version?: number;
};
export type BinaryFileMetadata = Omit<BinaryFileData, "dataURL">;

@ -27,7 +27,7 @@ describe("embedding scene data", () => {
const svg = svgNode.outerHTML;
const parsedString = await decodeSvgMetadata({ svg });
const parsedString = decodeSvgMetadata({ svg });
const importedData: ImportedDataState = JSON.parse(parsedString);
expect(sourceElements.map((x) => x.id)).toEqual(

Loading…
Cancel
Save