mirror of
https://github.com/excalidraw/excalidraw
synced 2025-07-25 13:58:22 +08:00
Compare commits
21 Commits
ryan-di/sv
...
dependabot
Author | SHA1 | Date | |
---|---|---|---|
92c7f98eb6 | |||
98c0a67333 | |||
57cf577376 | |||
6e0ee89ee4 | |||
35f778a734 | |||
ee091d0dbd | |||
ef9ea14a75 | |||
df168a6883 | |||
798f5f4dfb | |||
d9ad7c039b | |||
7c0239e693 | |||
da33481fa3 | |||
70e0e8dc29 | |||
2734e646ca | |||
dfaaff4432 | |||
03028eaa8c | |||
79b181bcdc | |||
f9815b8b4f | |||
96ed8a4331 | |||
33b01d4e80 | |||
7d52176fea |
@ -8,7 +8,7 @@ VITE_APP_LIBRARY_BACKEND=https://us-central1-excalidraw-room-persistence.cloudfu
|
||||
VITE_APP_WS_SERVER_URL=http://localhost:3002
|
||||
|
||||
VITE_APP_PLUS_LP=https://plus.excalidraw.com
|
||||
VITE_APP_PLUS_APP=https://app.excalidraw.com
|
||||
VITE_APP_PLUS_APP=http://localhost:3000
|
||||
|
||||
VITE_APP_AI_BACKEND=http://localhost:3015
|
||||
|
||||
@ -37,3 +37,14 @@ VITE_APP_COLLAPSE_OVERLAY=true
|
||||
|
||||
# Set this flag to false to disable eslint
|
||||
VITE_APP_ENABLE_ESLINT=true
|
||||
|
||||
# Enable PWA in dev server
|
||||
VITE_APP_ENABLE_PWA=false
|
||||
|
||||
VITE_APP_PLUS_EXPORT_PUBLIC_KEY='MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAm2g5T+Rub6Kbf1Mf57t0
|
||||
7r2zeHuVg4dla3r5ryXMswtzz6x767octl6oLThn33mQsPSy3GKglFZoCTXJR4ij
|
||||
ba8SxB04sL/N8eRrKja7TFWjCVtRwTTfyy771NYYNFVJclkxHyE5qw4m27crHF1y
|
||||
UNWEjuqNMi/lwAErS9fFa2oJlWyT8U7zzv/5kQREkxZI6y9v0AF3qcbsy2731FnD
|
||||
s9ChJvOUW9toIab2gsIdrKW8ZNpu084ZFVKb6LNjvIXI1Se4oMTHeszXzNptzlot
|
||||
kdxxjOoaQMAyfljFSot1F1FlU6MQlag7UnFGvFjRHN1JI5q4K+n3a67DX+TMyRqS
|
||||
HQIDAQAB'
|
||||
|
@ -15,3 +15,18 @@ VITE_APP_WS_SERVER_URL=https://oss-collab.excalidraw.com
|
||||
VITE_APP_FIREBASE_CONFIG='{"apiKey":"AIzaSyAd15pYlMci_xIp9ko6wkEsDzAAA0Dn0RU","authDomain":"excalidraw-room-persistence.firebaseapp.com","databaseURL":"https://excalidraw-room-persistence.firebaseio.com","projectId":"excalidraw-room-persistence","storageBucket":"excalidraw-room-persistence.appspot.com","messagingSenderId":"654800341332","appId":"1:654800341332:web:4a692de832b55bd57ce0c1"}'
|
||||
|
||||
VITE_APP_ENABLE_TRACKING=false
|
||||
|
||||
VITE_APP_PLUS_EXPORT_PUBLIC_KEY='MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEApQ0jM9Qz8TdFLzcuAZZX
|
||||
/WvuKSOJxiw6AR/ZcE3eFQWM/mbFdhQgyK8eHGkKQifKzH1xUZjCxyXcxW6ZO02t
|
||||
kPOPxhz+nxUrIoWCD/V4NGmUA1lxwHuO21HN1gzKrN3xHg5EGjyouR9vibT9VDGF
|
||||
gq6+4Ic/kJX+AD2MM7Yre2+FsOdysrmuW2Fu3ahuC1uQE7pOe1j0k7auNP0y1q53
|
||||
PrB8Ts2LUpepWC1l7zIXFm4ViDULuyWXTEpUcHSsEH8vpd1tckjypxCwkipfZsXx
|
||||
iPszy0o0Dx2iArPfWMXlFAI9mvyFCyFC3+nSvfyAUb2C4uZgCwAuyFh/ydPF4DEE
|
||||
PQIDAQAB'
|
||||
|
||||
# Set the below flags explicitly to false in production mode since vite loads and merges .env.local vars when running the build command
|
||||
VITE_APP_DEBUG_ENABLE_TEXT_CONTAINER_BOUNDING_BOX=false
|
||||
VITE_APP_COLLAPSE_OVERLAY=false
|
||||
# Enable eslint in dev server
|
||||
VITE_APP_ENABLE_ESLINT=false
|
||||
|
||||
|
2
.github/workflows/locales-coverage.yml
vendored
2
.github/workflows/locales-coverage.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
token: ${{ secrets.PUSH_TRANSLATIONS_COVERAGE_PAT }}
|
||||
|
||||
|
2
.github/workflows/semantic-pr-title.yml
vendored
2
.github/workflows/semantic-pr-title.yml
vendored
@ -11,6 +11,6 @@ jobs:
|
||||
semantic:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: amannn/action-semantic-pull-request@v3.0.0
|
||||
- uses: amannn/action-semantic-pull-request@v5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
@ -3278,9 +3278,9 @@ cross-fetch@^3.1.5:
|
||||
node-fetch "2.6.7"
|
||||
|
||||
cross-spawn@^7.0.3:
|
||||
version "7.0.3"
|
||||
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
|
||||
integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
|
||||
version "7.0.6"
|
||||
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f"
|
||||
integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==
|
||||
dependencies:
|
||||
path-key "^3.1.0"
|
||||
shebang-command "^2.0.0"
|
||||
|
@ -126,6 +126,7 @@ import DebugCanvas, {
|
||||
loadSavedDebugState,
|
||||
} from "./components/DebugCanvas";
|
||||
import { AIComponents } from "./components/AI";
|
||||
import { ExcalidrawPlusIframeExport } from "./ExcalidrawPlusIframeExport";
|
||||
|
||||
polyfill();
|
||||
|
||||
@ -1125,6 +1126,12 @@ const ExcalidrawWrapper = () => {
|
||||
};
|
||||
|
||||
const ExcalidrawApp = () => {
|
||||
const isCloudExportWindow =
|
||||
window.location.pathname === "/excalidraw-plus-export";
|
||||
if (isCloudExportWindow) {
|
||||
return <ExcalidrawPlusIframeExport />;
|
||||
}
|
||||
|
||||
return (
|
||||
<TopErrorBoundary>
|
||||
<Provider unstable_createStore={() => appJotaiStore}>
|
||||
|
222
excalidraw-app/ExcalidrawPlusIframeExport.tsx
Normal file
222
excalidraw-app/ExcalidrawPlusIframeExport.tsx
Normal file
@ -0,0 +1,222 @@
|
||||
import { useLayoutEffect, useRef } from "react";
|
||||
import { STORAGE_KEYS } from "./app_constants";
|
||||
import { LocalData } from "./data/LocalData";
|
||||
import type {
|
||||
FileId,
|
||||
OrderedExcalidrawElement,
|
||||
} from "../packages/excalidraw/element/types";
|
||||
import type { AppState, BinaryFileData } from "../packages/excalidraw/types";
|
||||
import { ExcalidrawError } from "../packages/excalidraw/errors";
|
||||
import { base64urlToString } from "../packages/excalidraw/data/encode";
|
||||
|
||||
const EVENT_REQUEST_SCENE = "REQUEST_SCENE";
|
||||
|
||||
const EXCALIDRAW_PLUS_ORIGIN = import.meta.env.VITE_APP_PLUS_APP;
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// outgoing message
|
||||
// -----------------------------------------------------------------------------
|
||||
type MESSAGE_REQUEST_SCENE = {
|
||||
type: "REQUEST_SCENE";
|
||||
jwt: string;
|
||||
};
|
||||
|
||||
type MESSAGE_FROM_PLUS = MESSAGE_REQUEST_SCENE;
|
||||
|
||||
// incoming messages
|
||||
// -----------------------------------------------------------------------------
|
||||
type MESSAGE_READY = { type: "READY" };
|
||||
type MESSAGE_ERROR = { type: "ERROR"; message: string };
|
||||
type MESSAGE_SCENE_DATA = {
|
||||
type: "SCENE_DATA";
|
||||
elements: OrderedExcalidrawElement[];
|
||||
appState: Pick<AppState, "viewBackgroundColor">;
|
||||
files: { loadedFiles: BinaryFileData[]; erroredFiles: Map<FileId, true> };
|
||||
};
|
||||
|
||||
type MESSAGE_FROM_EDITOR = MESSAGE_ERROR | MESSAGE_SCENE_DATA | MESSAGE_READY;
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
const parseSceneData = async ({
|
||||
rawElementsString,
|
||||
rawAppStateString,
|
||||
}: {
|
||||
rawElementsString: string | null;
|
||||
rawAppStateString: string | null;
|
||||
}): Promise<MESSAGE_SCENE_DATA> => {
|
||||
if (!rawElementsString || !rawAppStateString) {
|
||||
throw new ExcalidrawError("Elements or appstate is missing.");
|
||||
}
|
||||
|
||||
try {
|
||||
const elements = JSON.parse(
|
||||
rawElementsString,
|
||||
) as OrderedExcalidrawElement[];
|
||||
|
||||
if (!elements.length) {
|
||||
throw new ExcalidrawError("Scene is empty, nothing to export.");
|
||||
}
|
||||
|
||||
const appState = JSON.parse(rawAppStateString) as Pick<
|
||||
AppState,
|
||||
"viewBackgroundColor"
|
||||
>;
|
||||
|
||||
const fileIds = elements.reduce((acc, el) => {
|
||||
if ("fileId" in el && el.fileId) {
|
||||
acc.push(el.fileId);
|
||||
}
|
||||
return acc;
|
||||
}, [] as FileId[]);
|
||||
|
||||
const files = await LocalData.fileStorage.getFiles(fileIds);
|
||||
|
||||
return {
|
||||
type: "SCENE_DATA",
|
||||
elements,
|
||||
appState,
|
||||
files,
|
||||
};
|
||||
} catch (error: any) {
|
||||
throw error instanceof ExcalidrawError
|
||||
? error
|
||||
: new ExcalidrawError("Failed to parse scene data.");
|
||||
}
|
||||
};
|
||||
|
||||
const verifyJWT = async ({
|
||||
token,
|
||||
publicKey,
|
||||
}: {
|
||||
token: string;
|
||||
publicKey: string;
|
||||
}) => {
|
||||
try {
|
||||
if (!publicKey) {
|
||||
throw new ExcalidrawError("Public key is undefined");
|
||||
}
|
||||
|
||||
const [header, payload, signature] = token.split(".");
|
||||
|
||||
if (!header || !payload || !signature) {
|
||||
throw new ExcalidrawError("Invalid JWT format");
|
||||
}
|
||||
|
||||
// JWT is using Base64URL encoding
|
||||
const decodedPayload = base64urlToString(payload);
|
||||
const decodedSignature = base64urlToString(signature);
|
||||
|
||||
const data = `${header}.${payload}`;
|
||||
const signatureArrayBuffer = Uint8Array.from(decodedSignature, (c) =>
|
||||
c.charCodeAt(0),
|
||||
);
|
||||
|
||||
const keyData = publicKey.replace(/-----\w+ PUBLIC KEY-----/g, "");
|
||||
const keyArrayBuffer = Uint8Array.from(atob(keyData), (c) =>
|
||||
c.charCodeAt(0),
|
||||
);
|
||||
|
||||
const key = await crypto.subtle.importKey(
|
||||
"spki",
|
||||
keyArrayBuffer,
|
||||
{ name: "RSASSA-PKCS1-v1_5", hash: "SHA-256" },
|
||||
true,
|
||||
["verify"],
|
||||
);
|
||||
|
||||
const isValid = await crypto.subtle.verify(
|
||||
"RSASSA-PKCS1-v1_5",
|
||||
key,
|
||||
signatureArrayBuffer,
|
||||
new TextEncoder().encode(data),
|
||||
);
|
||||
|
||||
if (!isValid) {
|
||||
throw new Error("Invalid JWT");
|
||||
}
|
||||
|
||||
const parsedPayload = JSON.parse(decodedPayload);
|
||||
|
||||
// Check for expiration
|
||||
const currentTime = Math.floor(Date.now() / 1000);
|
||||
if (parsedPayload.exp && parsedPayload.exp < currentTime) {
|
||||
throw new Error("JWT has expired");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to verify JWT:", error);
|
||||
throw new Error(error instanceof Error ? error.message : "Invalid JWT");
|
||||
}
|
||||
};
|
||||
|
||||
export const ExcalidrawPlusIframeExport = () => {
|
||||
const readyRef = useRef(false);
|
||||
|
||||
useLayoutEffect(() => {
|
||||
const handleMessage = async (event: MessageEvent<MESSAGE_FROM_PLUS>) => {
|
||||
if (event.origin !== EXCALIDRAW_PLUS_ORIGIN) {
|
||||
throw new ExcalidrawError("Invalid origin");
|
||||
}
|
||||
|
||||
if (event.data.type === EVENT_REQUEST_SCENE) {
|
||||
if (!event.data.jwt) {
|
||||
throw new ExcalidrawError("JWT is missing");
|
||||
}
|
||||
|
||||
try {
|
||||
try {
|
||||
await verifyJWT({
|
||||
token: event.data.jwt,
|
||||
publicKey: import.meta.env.VITE_APP_PLUS_EXPORT_PUBLIC_KEY,
|
||||
});
|
||||
} catch (error: any) {
|
||||
console.error(`Failed to verify JWT: ${error.message}`);
|
||||
throw new ExcalidrawError("Failed to verify JWT");
|
||||
}
|
||||
|
||||
const parsedSceneData: MESSAGE_SCENE_DATA = await parseSceneData({
|
||||
rawAppStateString: localStorage.getItem(
|
||||
STORAGE_KEYS.LOCAL_STORAGE_APP_STATE,
|
||||
),
|
||||
rawElementsString: localStorage.getItem(
|
||||
STORAGE_KEYS.LOCAL_STORAGE_ELEMENTS,
|
||||
),
|
||||
});
|
||||
|
||||
event.source!.postMessage(parsedSceneData, {
|
||||
targetOrigin: EXCALIDRAW_PLUS_ORIGIN,
|
||||
});
|
||||
} catch (error) {
|
||||
const responseData: MESSAGE_ERROR = {
|
||||
type: "ERROR",
|
||||
message:
|
||||
error instanceof ExcalidrawError
|
||||
? error.message
|
||||
: "Failed to export scene data",
|
||||
};
|
||||
event.source!.postMessage(responseData, {
|
||||
targetOrigin: EXCALIDRAW_PLUS_ORIGIN,
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener("message", handleMessage);
|
||||
|
||||
// so we don't send twice in StrictMode
|
||||
if (!readyRef.current) {
|
||||
readyRef.current = true;
|
||||
const message: MESSAGE_FROM_EDITOR = { type: "READY" };
|
||||
window.parent.postMessage(message, EXCALIDRAW_PLUS_ORIGIN);
|
||||
}
|
||||
|
||||
return () => {
|
||||
window.removeEventListener("message", handleMessage);
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Since this component is expected to run in a hidden iframe on Excaildraw+,
|
||||
// it doesn't need to render anything. All the data we need is available in
|
||||
// LocalStorage and IndexedDB. It only needs to handle the messaging between
|
||||
// the parent window and the iframe with the relevant data.
|
||||
return null;
|
||||
};
|
@ -1,6 +1,7 @@
|
||||
import throttle from "lodash.throttle";
|
||||
import { PureComponent } from "react";
|
||||
import type {
|
||||
BinaryFileData,
|
||||
ExcalidrawImperativeAPI,
|
||||
SocketId,
|
||||
} from "../../packages/excalidraw/types";
|
||||
@ -9,6 +10,7 @@ import { APP_NAME, ENV, EVENT } from "../../packages/excalidraw/constants";
|
||||
import type { ImportedDataState } from "../../packages/excalidraw/data/types";
|
||||
import type {
|
||||
ExcalidrawElement,
|
||||
FileId,
|
||||
InitializedExcalidrawImageElement,
|
||||
OrderedExcalidrawElement,
|
||||
} from "../../packages/excalidraw/element/types";
|
||||
@ -157,7 +159,7 @@ class Collab extends PureComponent<CollabProps, CollabState> {
|
||||
throw new AbortError();
|
||||
}
|
||||
|
||||
return saveFilesToFirebase({
|
||||
const { savedFiles, erroredFiles } = await saveFilesToFirebase({
|
||||
prefix: `${FIREBASE_STORAGE_PREFIXES.collabFiles}/${roomId}`,
|
||||
files: await encodeFilesForUpload({
|
||||
files: addedFiles,
|
||||
@ -165,6 +167,29 @@ class Collab extends PureComponent<CollabProps, CollabState> {
|
||||
maxBytes: FILE_UPLOAD_MAX_BYTES,
|
||||
}),
|
||||
});
|
||||
|
||||
return {
|
||||
savedFiles: savedFiles.reduce(
|
||||
(acc: Map<FileId, BinaryFileData>, id) => {
|
||||
const fileData = addedFiles.get(id);
|
||||
if (fileData) {
|
||||
acc.set(id, fileData);
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
new Map(),
|
||||
),
|
||||
erroredFiles: erroredFiles.reduce(
|
||||
(acc: Map<FileId, BinaryFileData>, id) => {
|
||||
const fileData = addedFiles.get(id);
|
||||
if (fileData) {
|
||||
acc.set(id, fileData);
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
new Map(),
|
||||
),
|
||||
};
|
||||
},
|
||||
});
|
||||
this.excalidrawAPI = props.excalidrawAPI;
|
||||
@ -394,7 +419,7 @@ class Collab extends PureComponent<CollabProps, CollabState> {
|
||||
.filter((element) => {
|
||||
return (
|
||||
isInitializedImageElement(element) &&
|
||||
!this.fileManager.isFileHandled(element.fileId) &&
|
||||
!this.fileManager.isFileTracked(element.fileId) &&
|
||||
!element.isDeleted &&
|
||||
(opts.forceFetchFiles
|
||||
? element.status !== "pending" ||
|
||||
|
@ -16,14 +16,26 @@ import type {
|
||||
BinaryFiles,
|
||||
} from "../../packages/excalidraw/types";
|
||||
|
||||
type FileVersion = Required<BinaryFileData>["version"];
|
||||
|
||||
export class FileManager {
|
||||
/** files being fetched */
|
||||
private fetchingFiles = new Map<ExcalidrawImageElement["fileId"], true>();
|
||||
private erroredFiles_fetch = new Map<
|
||||
ExcalidrawImageElement["fileId"],
|
||||
true
|
||||
>();
|
||||
/** files being saved */
|
||||
private savingFiles = new Map<ExcalidrawImageElement["fileId"], true>();
|
||||
private savingFiles = new Map<
|
||||
ExcalidrawImageElement["fileId"],
|
||||
FileVersion
|
||||
>();
|
||||
/* files already saved to persistent storage */
|
||||
private savedFiles = new Map<ExcalidrawImageElement["fileId"], true>();
|
||||
private erroredFiles = new Map<ExcalidrawImageElement["fileId"], true>();
|
||||
private savedFiles = new Map<ExcalidrawImageElement["fileId"], FileVersion>();
|
||||
private erroredFiles_save = new Map<
|
||||
ExcalidrawImageElement["fileId"],
|
||||
FileVersion
|
||||
>();
|
||||
|
||||
private _getFiles;
|
||||
private _saveFiles;
|
||||
@ -37,8 +49,8 @@ export class FileManager {
|
||||
erroredFiles: Map<FileId, true>;
|
||||
}>;
|
||||
saveFiles: (data: { addedFiles: Map<FileId, BinaryFileData> }) => Promise<{
|
||||
savedFiles: Map<FileId, true>;
|
||||
erroredFiles: Map<FileId, true>;
|
||||
savedFiles: Map<FileId, BinaryFileData>;
|
||||
erroredFiles: Map<FileId, BinaryFileData>;
|
||||
}>;
|
||||
}) {
|
||||
this._getFiles = getFiles;
|
||||
@ -46,19 +58,28 @@ export class FileManager {
|
||||
}
|
||||
|
||||
/**
|
||||
* returns whether file is already saved or being processed
|
||||
* returns whether file is saved/errored, or being processed
|
||||
*/
|
||||
isFileHandled = (id: FileId) => {
|
||||
isFileTracked = (id: FileId) => {
|
||||
return (
|
||||
this.savedFiles.has(id) ||
|
||||
this.fetchingFiles.has(id) ||
|
||||
this.savingFiles.has(id) ||
|
||||
this.erroredFiles.has(id)
|
||||
this.fetchingFiles.has(id) ||
|
||||
this.erroredFiles_fetch.has(id) ||
|
||||
this.erroredFiles_save.has(id)
|
||||
);
|
||||
};
|
||||
|
||||
isFileSaved = (id: FileId) => {
|
||||
return this.savedFiles.has(id);
|
||||
isFileSavedOrBeingSaved = (file: BinaryFileData) => {
|
||||
const fileVersion = this.getFileVersion(file);
|
||||
return (
|
||||
this.savedFiles.get(file.id) === fileVersion ||
|
||||
this.savingFiles.get(file.id) === fileVersion
|
||||
);
|
||||
};
|
||||
|
||||
getFileVersion = (file: BinaryFileData) => {
|
||||
return file.version ?? 1;
|
||||
};
|
||||
|
||||
saveFiles = async ({
|
||||
@ -71,13 +92,16 @@ export class FileManager {
|
||||
const addedFiles: Map<FileId, BinaryFileData> = new Map();
|
||||
|
||||
for (const element of elements) {
|
||||
const fileData =
|
||||
isInitializedImageElement(element) && files[element.fileId];
|
||||
|
||||
if (
|
||||
isInitializedImageElement(element) &&
|
||||
files[element.fileId] &&
|
||||
!this.isFileHandled(element.fileId)
|
||||
fileData &&
|
||||
// NOTE if errored during save, won't retry due to this check
|
||||
!this.isFileSavedOrBeingSaved(fileData)
|
||||
) {
|
||||
addedFiles.set(element.fileId, files[element.fileId]);
|
||||
this.savingFiles.set(element.fileId, true);
|
||||
this.savingFiles.set(element.fileId, this.getFileVersion(fileData));
|
||||
}
|
||||
}
|
||||
|
||||
@ -86,8 +110,12 @@ export class FileManager {
|
||||
addedFiles,
|
||||
});
|
||||
|
||||
for (const [fileId] of savedFiles) {
|
||||
this.savedFiles.set(fileId, true);
|
||||
for (const [fileId, fileData] of savedFiles) {
|
||||
this.savedFiles.set(fileId, this.getFileVersion(fileData));
|
||||
}
|
||||
|
||||
for (const [fileId, fileData] of erroredFiles) {
|
||||
this.erroredFiles_save.set(fileId, this.getFileVersion(fileData));
|
||||
}
|
||||
|
||||
return {
|
||||
@ -121,10 +149,10 @@ export class FileManager {
|
||||
const { loadedFiles, erroredFiles } = await this._getFiles(ids);
|
||||
|
||||
for (const file of loadedFiles) {
|
||||
this.savedFiles.set(file.id, true);
|
||||
this.savedFiles.set(file.id, this.getFileVersion(file));
|
||||
}
|
||||
for (const [fileId] of erroredFiles) {
|
||||
this.erroredFiles.set(fileId, true);
|
||||
this.erroredFiles_fetch.set(fileId, true);
|
||||
}
|
||||
|
||||
return { loadedFiles, erroredFiles };
|
||||
@ -160,7 +188,7 @@ export class FileManager {
|
||||
): element is InitializedExcalidrawImageElement => {
|
||||
return (
|
||||
isInitializedImageElement(element) &&
|
||||
this.isFileSaved(element.fileId) &&
|
||||
this.savedFiles.has(element.fileId) &&
|
||||
element.status === "pending"
|
||||
);
|
||||
};
|
||||
@ -169,7 +197,8 @@ export class FileManager {
|
||||
this.fetchingFiles.clear();
|
||||
this.savingFiles.clear();
|
||||
this.savedFiles.clear();
|
||||
this.erroredFiles.clear();
|
||||
this.erroredFiles_fetch.clear();
|
||||
this.erroredFiles_save.clear();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -183,8 +183,8 @@ export class LocalData {
|
||||
);
|
||||
},
|
||||
async saveFiles({ addedFiles }) {
|
||||
const savedFiles = new Map<FileId, true>();
|
||||
const erroredFiles = new Map<FileId, true>();
|
||||
const savedFiles = new Map<FileId, BinaryFileData>();
|
||||
const erroredFiles = new Map<FileId, BinaryFileData>();
|
||||
|
||||
// before we use `storage` event synchronization, let's update the flag
|
||||
// optimistically. Hopefully nothing fails, and an IDB read executed
|
||||
@ -195,10 +195,10 @@ export class LocalData {
|
||||
[...addedFiles].map(async ([id, fileData]) => {
|
||||
try {
|
||||
await set(id, fileData, filesStore);
|
||||
savedFiles.set(id, true);
|
||||
savedFiles.set(id, fileData);
|
||||
} catch (error: any) {
|
||||
console.error(error);
|
||||
erroredFiles.set(id, true);
|
||||
erroredFiles.set(id, fileData);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
@ -177,8 +177,8 @@ export const saveFilesToFirebase = async ({
|
||||
}) => {
|
||||
const firebase = await loadFirebaseStorage();
|
||||
|
||||
const erroredFiles = new Map<FileId, true>();
|
||||
const savedFiles = new Map<FileId, true>();
|
||||
const erroredFiles: FileId[] = [];
|
||||
const savedFiles: FileId[] = [];
|
||||
|
||||
await Promise.all(
|
||||
files.map(async ({ id, buffer }) => {
|
||||
@ -194,9 +194,9 @@ export const saveFilesToFirebase = async ({
|
||||
cacheControl: `public, max-age=${FILE_CACHE_MAX_AGE_SEC}`,
|
||||
},
|
||||
);
|
||||
savedFiles.set(id, true);
|
||||
savedFiles.push(id);
|
||||
} catch (error: any) {
|
||||
erroredFiles.set(id, true);
|
||||
erroredFiles.push(id);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
@ -54,12 +54,6 @@
|
||||
content="https://excalidraw.com/og-image-3.png"
|
||||
/>
|
||||
|
||||
<!-- General tags -->
|
||||
<meta
|
||||
name="description"
|
||||
content="Excalidraw is a virtual collaborative whiteboard tool that lets you easily sketch diagrams that have a hand-drawn feel to them."
|
||||
/>
|
||||
|
||||
<!------------------------------------------------------------------------->
|
||||
<!-- to minimize white flash on load when user has dark mode enabled -->
|
||||
<script>
|
||||
@ -124,12 +118,6 @@
|
||||
<!-- Following placeholder is replaced during the build step -->
|
||||
<!-- PLACEHOLDER:EXCALIDRAW_APP_FONTS -->
|
||||
|
||||
<% } else { %>
|
||||
<script>
|
||||
window.EXCALIDRAW_ASSET_PATH = window.origin;
|
||||
</script>
|
||||
<% } %>
|
||||
|
||||
<!-- Register Assistant as the UI font, before the scene inits -->
|
||||
<link
|
||||
rel="stylesheet"
|
||||
@ -137,6 +125,12 @@
|
||||
type="text/css"
|
||||
/>
|
||||
|
||||
<% } else { %>
|
||||
<script>
|
||||
window.EXCALIDRAW_ASSET_PATH = window.origin;
|
||||
</script>
|
||||
<% } %>
|
||||
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png" />
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png" />
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png" />
|
||||
@ -218,6 +212,7 @@
|
||||
</header>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="index.tsx"></script>
|
||||
<% if (typeof PROD != 'undefined' && PROD == true) { %>
|
||||
<!-- 100% privacy friendly analytics -->
|
||||
<script>
|
||||
// need to load this script dynamically bcs. of iframe embed tracking
|
||||
@ -250,5 +245,6 @@
|
||||
}
|
||||
</script>
|
||||
<!-- end LEGACY GOOGLE ANALYTICS -->
|
||||
<% } %>
|
||||
</body>
|
||||
</html>
|
||||
|
@ -23,20 +23,20 @@
|
||||
]
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
"node": "18.0.0 - 22.x.x"
|
||||
},
|
||||
"dependencies": {
|
||||
"@excalidraw/random-username": "1.0.0",
|
||||
"@sentry/browser": "6.2.5",
|
||||
"@sentry/integrations": "6.2.5",
|
||||
"firebase": "8.3.3",
|
||||
"i18next-browser-languagedetector": "6.1.4",
|
||||
"idb-keyval": "6.0.3",
|
||||
"jotai": "1.13.1",
|
||||
"react": "18.2.0",
|
||||
"react-dom": "18.2.0",
|
||||
"vite-plugin-html": "3.2.2",
|
||||
"@excalidraw/random-username": "1.0.0",
|
||||
"@sentry/browser": "6.2.5",
|
||||
"@sentry/integrations": "6.2.5",
|
||||
"i18next-browser-languagedetector": "6.1.4",
|
||||
"socket.io-client": "4.7.2"
|
||||
"socket.io-client": "4.7.2",
|
||||
"vite-plugin-html": "3.2.2"
|
||||
},
|
||||
"prettier": "@excalidraw/prettier-config",
|
||||
"scripts": {
|
||||
@ -49,5 +49,8 @@
|
||||
"start:production": "yarn build && yarn serve",
|
||||
"serve": "npx http-server build -a localhost -p 5001 -o",
|
||||
"build:preview": "yarn build && vite preview --port 5000"
|
||||
},
|
||||
"devDependencies": {
|
||||
"vite-plugin-sitemap": "0.7.1"
|
||||
}
|
||||
}
|
||||
|
3
excalidraw-app/vite-env.d.ts
vendored
3
excalidraw-app/vite-env.d.ts
vendored
@ -29,6 +29,9 @@ interface ImportMetaEnv {
|
||||
// Enable eslint in dev server
|
||||
VITE_APP_ENABLE_ESLINT: string;
|
||||
|
||||
// Enable PWA in dev server
|
||||
VITE_APP_ENABLE_PWA: string;
|
||||
|
||||
VITE_APP_PLUS_LP: string;
|
||||
|
||||
VITE_APP_PLUS_APP: string;
|
||||
|
@ -5,221 +5,237 @@ import { ViteEjsPlugin } from "vite-plugin-ejs";
|
||||
import { VitePWA } from "vite-plugin-pwa";
|
||||
import checker from "vite-plugin-checker";
|
||||
import { createHtmlPlugin } from "vite-plugin-html";
|
||||
import Sitemap from "vite-plugin-sitemap";
|
||||
import { woff2BrowserPlugin } from "../scripts/woff2/woff2-vite-plugins";
|
||||
|
||||
// To load .env.local variables
|
||||
const envVars = loadEnv("", `../`);
|
||||
// https://vitejs.dev/config/
|
||||
export default defineConfig({
|
||||
server: {
|
||||
port: Number(envVars.VITE_APP_PORT || 3000),
|
||||
// open the browser
|
||||
open: true,
|
||||
},
|
||||
// We need to specify the envDir since now there are no
|
||||
//more located in parallel with the vite.config.ts file but in parent dir
|
||||
envDir: "../",
|
||||
build: {
|
||||
outDir: "build",
|
||||
rollupOptions: {
|
||||
output: {
|
||||
assetFileNames(chunkInfo) {
|
||||
if (chunkInfo?.name?.endsWith(".woff2")) {
|
||||
const family = chunkInfo.name.split("-")[0];
|
||||
return `fonts/${family}/[name][extname]`;
|
||||
}
|
||||
|
||||
return "assets/[name]-[hash][extname]";
|
||||
},
|
||||
// Creating separate chunk for locales except for en and percentages.json so they
|
||||
// can be cached at runtime and not merged with
|
||||
// app precache. en.json and percentages.json are needed for first load
|
||||
// or fallback hence not clubbing with locales so first load followed by offline mode works fine. This is how CRA used to work too.
|
||||
manualChunks(id) {
|
||||
if (
|
||||
id.includes("packages/excalidraw/locales") &&
|
||||
id.match(/en.json|percentages.json/) === null
|
||||
) {
|
||||
const index = id.indexOf("locales/");
|
||||
// Taking the substring after "locales/"
|
||||
return `locales/${id.substring(index + 8)}`;
|
||||
}
|
||||
},
|
||||
},
|
||||
export default defineConfig(({ mode }) => {
|
||||
// To load .env variables
|
||||
const envVars = loadEnv(mode, `../`);
|
||||
// https://vitejs.dev/config/
|
||||
return {
|
||||
server: {
|
||||
port: Number(envVars.VITE_APP_PORT || 3000),
|
||||
// open the browser
|
||||
open: true,
|
||||
},
|
||||
sourcemap: true,
|
||||
// don't auto-inline small assets (i.e. fonts hosted on CDN)
|
||||
assetsInlineLimit: 0,
|
||||
},
|
||||
plugins: [
|
||||
woff2BrowserPlugin(),
|
||||
react(),
|
||||
checker({
|
||||
typescript: true,
|
||||
eslint:
|
||||
envVars.VITE_APP_ENABLE_ESLINT === "false"
|
||||
? undefined
|
||||
: { lintCommand: 'eslint "./**/*.{js,ts,tsx}"' },
|
||||
overlay: {
|
||||
initialIsOpen: envVars.VITE_APP_COLLAPSE_OVERLAY === "false",
|
||||
badgeStyle: "margin-bottom: 4rem; margin-left: 1rem",
|
||||
},
|
||||
}),
|
||||
svgrPlugin(),
|
||||
ViteEjsPlugin(),
|
||||
VitePWA({
|
||||
registerType: "autoUpdate",
|
||||
devOptions: {
|
||||
/* set this flag to true to enable in Development mode */
|
||||
enabled: false,
|
||||
},
|
||||
// We need to specify the envDir since now there are no
|
||||
//more located in parallel with the vite.config.ts file but in parent dir
|
||||
envDir: "../",
|
||||
build: {
|
||||
outDir: "build",
|
||||
rollupOptions: {
|
||||
output: {
|
||||
assetFileNames(chunkInfo) {
|
||||
if (chunkInfo?.name?.endsWith(".woff2")) {
|
||||
const family = chunkInfo.name.split("-")[0];
|
||||
return `fonts/${family}/[name][extname]`;
|
||||
}
|
||||
|
||||
workbox: {
|
||||
// don't precache fonts, locales and separate chunks
|
||||
globIgnores: ["fonts.css", "**/locales/**", "service-worker.js", "**/*.chunk-*.js"],
|
||||
runtimeCaching: [
|
||||
{
|
||||
urlPattern: new RegExp(".+.woff2"),
|
||||
handler: 'CacheFirst',
|
||||
options: {
|
||||
cacheName: 'fonts',
|
||||
expiration: {
|
||||
maxEntries: 1000,
|
||||
maxAgeSeconds: 60 * 60 * 24 * 90, // 90 days
|
||||
},
|
||||
cacheableResponse: {
|
||||
// 0 to cache "opaque" responses from cross-origin requests (i.e. CDN)
|
||||
statuses: [0, 200],
|
||||
},
|
||||
},
|
||||
return "assets/[name]-[hash][extname]";
|
||||
},
|
||||
{
|
||||
urlPattern: new RegExp("fonts.css"),
|
||||
handler: "StaleWhileRevalidate",
|
||||
options: {
|
||||
cacheName: "fonts",
|
||||
expiration: {
|
||||
maxEntries: 50,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
urlPattern: new RegExp("locales/[^/]+.js"),
|
||||
handler: "CacheFirst",
|
||||
options: {
|
||||
cacheName: "locales",
|
||||
expiration: {
|
||||
maxEntries: 50,
|
||||
maxAgeSeconds: 60 * 60 * 24 * 30, // <== 30 days
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
urlPattern: new RegExp(".chunk-.+.js"),
|
||||
handler: "CacheFirst",
|
||||
options: {
|
||||
cacheName: "chunk",
|
||||
expiration: {
|
||||
maxEntries: 50,
|
||||
maxAgeSeconds: 60 * 60 * 24 * 90, // <== 90 days
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
manifest: {
|
||||
short_name: "Excalidraw",
|
||||
name: "Excalidraw",
|
||||
description:
|
||||
"Excalidraw is a whiteboard tool that lets you easily sketch diagrams that have a hand-drawn feel to them.",
|
||||
icons: [
|
||||
{
|
||||
src: "android-chrome-192x192.png",
|
||||
sizes: "192x192",
|
||||
type: "image/png",
|
||||
},
|
||||
{
|
||||
src: "apple-touch-icon.png",
|
||||
type: "image/png",
|
||||
sizes: "180x180",
|
||||
},
|
||||
{
|
||||
src: "favicon-32x32.png",
|
||||
sizes: "32x32",
|
||||
type: "image/png",
|
||||
},
|
||||
{
|
||||
src: "favicon-16x16.png",
|
||||
sizes: "16x16",
|
||||
type: "image/png",
|
||||
},
|
||||
],
|
||||
start_url: "/",
|
||||
display: "standalone",
|
||||
theme_color: "#121212",
|
||||
background_color: "#ffffff",
|
||||
file_handlers: [
|
||||
{
|
||||
action: "/",
|
||||
accept: {
|
||||
"application/vnd.excalidraw+json": [".excalidraw"],
|
||||
},
|
||||
},
|
||||
],
|
||||
share_target: {
|
||||
action: "/web-share-target",
|
||||
method: "POST",
|
||||
enctype: "multipart/form-data",
|
||||
params: {
|
||||
files: [
|
||||
{
|
||||
name: "file",
|
||||
accept: [
|
||||
"application/vnd.excalidraw+json",
|
||||
"application/json",
|
||||
".excalidraw",
|
||||
],
|
||||
},
|
||||
],
|
||||
// Creating separate chunk for locales except for en and percentages.json so they
|
||||
// can be cached at runtime and not merged with
|
||||
// app precache. en.json and percentages.json are needed for first load
|
||||
// or fallback hence not clubbing with locales so first load followed by offline mode works fine. This is how CRA used to work too.
|
||||
manualChunks(id) {
|
||||
if (
|
||||
id.includes("packages/excalidraw/locales") &&
|
||||
id.match(/en.json|percentages.json/) === null
|
||||
) {
|
||||
const index = id.indexOf("locales/");
|
||||
// Taking the substring after "locales/"
|
||||
return `locales/${id.substring(index + 8)}`;
|
||||
}
|
||||
},
|
||||
},
|
||||
screenshots: [
|
||||
{
|
||||
src: "/screenshots/virtual-whiteboard.png",
|
||||
type: "image/png",
|
||||
sizes: "462x945",
|
||||
},
|
||||
{
|
||||
src: "/screenshots/wireframe.png",
|
||||
type: "image/png",
|
||||
sizes: "462x945",
|
||||
},
|
||||
{
|
||||
src: "/screenshots/illustration.png",
|
||||
type: "image/png",
|
||||
sizes: "462x945",
|
||||
},
|
||||
{
|
||||
src: "/screenshots/shapes.png",
|
||||
type: "image/png",
|
||||
sizes: "462x945",
|
||||
},
|
||||
{
|
||||
src: "/screenshots/collaboration.png",
|
||||
type: "image/png",
|
||||
sizes: "462x945",
|
||||
},
|
||||
{
|
||||
src: "/screenshots/export.png",
|
||||
type: "image/png",
|
||||
sizes: "462x945",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
createHtmlPlugin({
|
||||
minify: true,
|
||||
}),
|
||||
],
|
||||
publicDir: "../public",
|
||||
sourcemap: true,
|
||||
// don't auto-inline small assets (i.e. fonts hosted on CDN)
|
||||
assetsInlineLimit: 0,
|
||||
},
|
||||
plugins: [
|
||||
Sitemap({
|
||||
hostname: "https://excalidraw.com",
|
||||
outDir: "build",
|
||||
changefreq: "monthly",
|
||||
// its static in public folder
|
||||
generateRobotsTxt: false,
|
||||
}),
|
||||
woff2BrowserPlugin(),
|
||||
react(),
|
||||
checker({
|
||||
typescript: true,
|
||||
eslint:
|
||||
envVars.VITE_APP_ENABLE_ESLINT === "false"
|
||||
? undefined
|
||||
: { lintCommand: 'eslint "./**/*.{js,ts,tsx}"' },
|
||||
overlay: {
|
||||
initialIsOpen: envVars.VITE_APP_COLLAPSE_OVERLAY === "false",
|
||||
badgeStyle: "margin-bottom: 4rem; margin-left: 1rem",
|
||||
},
|
||||
}),
|
||||
svgrPlugin(),
|
||||
ViteEjsPlugin(),
|
||||
VitePWA({
|
||||
registerType: "autoUpdate",
|
||||
devOptions: {
|
||||
/* set this flag to true to enable in Development mode */
|
||||
enabled: envVars.VITE_APP_ENABLE_PWA === "true",
|
||||
},
|
||||
|
||||
workbox: {
|
||||
// don't precache fonts, locales and separate chunks
|
||||
globIgnores: [
|
||||
"fonts.css",
|
||||
"**/locales/**",
|
||||
"service-worker.js",
|
||||
"**/*.chunk-*.js",
|
||||
],
|
||||
runtimeCaching: [
|
||||
{
|
||||
urlPattern: new RegExp(".+.woff2"),
|
||||
handler: "CacheFirst",
|
||||
options: {
|
||||
cacheName: "fonts",
|
||||
expiration: {
|
||||
maxEntries: 1000,
|
||||
maxAgeSeconds: 60 * 60 * 24 * 90, // 90 days
|
||||
},
|
||||
cacheableResponse: {
|
||||
// 0 to cache "opaque" responses from cross-origin requests (i.e. CDN)
|
||||
statuses: [0, 200],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
urlPattern: new RegExp("fonts.css"),
|
||||
handler: "StaleWhileRevalidate",
|
||||
options: {
|
||||
cacheName: "fonts",
|
||||
expiration: {
|
||||
maxEntries: 50,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
urlPattern: new RegExp("locales/[^/]+.js"),
|
||||
handler: "CacheFirst",
|
||||
options: {
|
||||
cacheName: "locales",
|
||||
expiration: {
|
||||
maxEntries: 50,
|
||||
maxAgeSeconds: 60 * 60 * 24 * 30, // <== 30 days
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
urlPattern: new RegExp(".chunk-.+.js"),
|
||||
handler: "CacheFirst",
|
||||
options: {
|
||||
cacheName: "chunk",
|
||||
expiration: {
|
||||
maxEntries: 50,
|
||||
maxAgeSeconds: 60 * 60 * 24 * 90, // <== 90 days
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
manifest: {
|
||||
short_name: "Excalidraw",
|
||||
name: "Excalidraw",
|
||||
description:
|
||||
"Excalidraw is a whiteboard tool that lets you easily sketch diagrams that have a hand-drawn feel to them.",
|
||||
icons: [
|
||||
{
|
||||
src: "android-chrome-192x192.png",
|
||||
sizes: "192x192",
|
||||
type: "image/png",
|
||||
},
|
||||
{
|
||||
src: "apple-touch-icon.png",
|
||||
type: "image/png",
|
||||
sizes: "180x180",
|
||||
},
|
||||
{
|
||||
src: "favicon-32x32.png",
|
||||
sizes: "32x32",
|
||||
type: "image/png",
|
||||
},
|
||||
{
|
||||
src: "favicon-16x16.png",
|
||||
sizes: "16x16",
|
||||
type: "image/png",
|
||||
},
|
||||
],
|
||||
start_url: "/",
|
||||
id:"excalidraw",
|
||||
display: "standalone",
|
||||
theme_color: "#121212",
|
||||
background_color: "#ffffff",
|
||||
file_handlers: [
|
||||
{
|
||||
action: "/",
|
||||
accept: {
|
||||
"application/vnd.excalidraw+json": [".excalidraw"],
|
||||
},
|
||||
},
|
||||
],
|
||||
share_target: {
|
||||
action: "/web-share-target",
|
||||
method: "POST",
|
||||
enctype: "multipart/form-data",
|
||||
params: {
|
||||
files: [
|
||||
{
|
||||
name: "file",
|
||||
accept: [
|
||||
"application/vnd.excalidraw+json",
|
||||
"application/json",
|
||||
".excalidraw",
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
screenshots: [
|
||||
{
|
||||
src: "/screenshots/virtual-whiteboard.png",
|
||||
type: "image/png",
|
||||
sizes: "462x945",
|
||||
},
|
||||
{
|
||||
src: "/screenshots/wireframe.png",
|
||||
type: "image/png",
|
||||
sizes: "462x945",
|
||||
},
|
||||
{
|
||||
src: "/screenshots/illustration.png",
|
||||
type: "image/png",
|
||||
sizes: "462x945",
|
||||
},
|
||||
{
|
||||
src: "/screenshots/shapes.png",
|
||||
type: "image/png",
|
||||
sizes: "462x945",
|
||||
},
|
||||
{
|
||||
src: "/screenshots/collaboration.png",
|
||||
type: "image/png",
|
||||
sizes: "462x945",
|
||||
},
|
||||
{
|
||||
src: "/screenshots/export.png",
|
||||
type: "image/png",
|
||||
sizes: "462x945",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
createHtmlPlugin({
|
||||
minify: true,
|
||||
}),
|
||||
],
|
||||
publicDir: "../public",
|
||||
};
|
||||
});
|
||||
|
20
package.json
20
package.json
@ -45,7 +45,7 @@
|
||||
"vitest-canvas-mock": "0.3.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": "18.0.0 - 20.x.x"
|
||||
"node": "18.0.0 - 22.x.x"
|
||||
},
|
||||
"homepage": ".",
|
||||
"prettier": "@excalidraw/prettier-config",
|
||||
@ -55,15 +55,9 @@
|
||||
"build:app": "yarn --cwd ./excalidraw-app build:app",
|
||||
"build:version": "yarn --cwd ./excalidraw-app build:version",
|
||||
"build": "yarn --cwd ./excalidraw-app build",
|
||||
"fix:code": "yarn test:code --fix",
|
||||
"fix:other": "yarn prettier --write",
|
||||
"fix": "yarn fix:other && yarn fix:code",
|
||||
"locales-coverage": "node scripts/build-locales-coverage.js",
|
||||
"locales-coverage:description": "node scripts/locales-coverage-description.js",
|
||||
"prepare": "husky install",
|
||||
"prettier": "prettier \"**/*.{css,scss,json,md,html,yml}\" --ignore-path=.eslintignore",
|
||||
"build:preview": "yarn --cwd ./excalidraw-app build:preview",
|
||||
"start": "yarn --cwd ./excalidraw-app start",
|
||||
"start:app:production": "npm run build && npx http-server build -a localhost -p 5001 -o",
|
||||
"start:production": "yarn --cwd ./excalidraw-app start:production",
|
||||
"test:all": "yarn test:typecheck && yarn test:code && yarn test:other && yarn test:app --watch=false",
|
||||
"test:app": "vitest",
|
||||
"test:code": "eslint --max-warnings=0 --ext .js,.ts,.tsx .",
|
||||
@ -74,9 +68,15 @@
|
||||
"test:coverage": "vitest --coverage",
|
||||
"test:coverage:watch": "vitest --coverage --watch",
|
||||
"test:ui": "yarn test --ui --coverage.enabled=true",
|
||||
"fix:code": "yarn test:code --fix",
|
||||
"fix:other": "yarn prettier --write",
|
||||
"fix": "yarn fix:other && yarn fix:code",
|
||||
"locales-coverage": "node scripts/build-locales-coverage.js",
|
||||
"locales-coverage:description": "node scripts/locales-coverage-description.js",
|
||||
"prepare": "husky install",
|
||||
"prettier": "prettier \"**/*.{css,scss,json,md,html,yml}\" --ignore-path=.eslintignore",
|
||||
"autorelease": "node scripts/autorelease.js",
|
||||
"prerelease:excalidraw": "node scripts/prerelease.js",
|
||||
"build:preview": "yarn build && vite preview --port 5000",
|
||||
"release:excalidraw": "node scripts/release.js",
|
||||
"rm:build": "rm -rf excalidraw-app/{build,dist,dev-dist} && rm -rf packages/*/{dist,build} && rm -rf examples/*/*/{build,dist}",
|
||||
"rm:node_modules": "rm -rf node_modules && rm -rf excalidraw-app/node_modules && rm -rf packages/*/node_modules",
|
||||
|
@ -49,7 +49,7 @@ import {
|
||||
} from "../appState";
|
||||
import type { PastedMixedContent } from "../clipboard";
|
||||
import { copyTextToSystemClipboard, parseClipboard } from "../clipboard";
|
||||
import { ARROW_TYPE, type EXPORT_IMAGE_TYPES } from "../constants";
|
||||
import { ARROW_TYPE, isSafari, type EXPORT_IMAGE_TYPES } from "../constants";
|
||||
import {
|
||||
APP_NAME,
|
||||
CURSOR_TYPE,
|
||||
@ -304,8 +304,10 @@ import { Toast } from "./Toast";
|
||||
import { actionToggleViewMode } from "../actions/actionToggleViewMode";
|
||||
import {
|
||||
dataURLToFile,
|
||||
dataURLToString,
|
||||
generateIdFromFile,
|
||||
getDataURL,
|
||||
getDataURL_sync,
|
||||
getFileFromEvent,
|
||||
ImageURLToFile,
|
||||
isImageFileHandle,
|
||||
@ -338,7 +340,6 @@ import {
|
||||
isValidTextContainer,
|
||||
measureText,
|
||||
normalizeText,
|
||||
wrapText,
|
||||
} from "../element/textElement";
|
||||
import {
|
||||
showHyperlinkTooltip,
|
||||
@ -459,6 +460,7 @@ import {
|
||||
vectorNormalize,
|
||||
} from "../../math";
|
||||
import { cropElement } from "../element/cropElement";
|
||||
import { wrapText } from "../element/textWrapping";
|
||||
|
||||
const AppContext = React.createContext<AppClassProperties>(null!);
|
||||
const AppPropsContext = React.createContext<AppProps>(null!);
|
||||
@ -2122,9 +2124,7 @@ class App extends React.Component<AppProps, AppState> {
|
||||
}
|
||||
|
||||
if (actionResult.files) {
|
||||
this.files = actionResult.replaceFiles
|
||||
? actionResult.files
|
||||
: { ...this.files, ...actionResult.files };
|
||||
this.addMissingFiles(actionResult.files, actionResult.replaceFiles);
|
||||
this.addNewImagesToImageCache();
|
||||
}
|
||||
|
||||
@ -2320,11 +2320,11 @@ class App extends React.Component<AppProps, AppState> {
|
||||
// clear the shape and image cache so that any images in initialData
|
||||
// can be loaded fresh
|
||||
this.clearImageShapeCache();
|
||||
// FontFaceSet loadingdone event we listen on may not always
|
||||
// fire (looking at you Safari), so on init we manually load all
|
||||
// fonts and rerender scene text elements once done. This also
|
||||
// seems faster even in browsers that do fire the loadingdone event.
|
||||
this.fonts.loadSceneFonts();
|
||||
|
||||
// manually loading the font faces seems faster even in browsers that do fire the loadingdone event
|
||||
this.fonts.loadSceneFonts().then((fontFaces) => {
|
||||
this.fonts.onLoaded(fontFaces);
|
||||
});
|
||||
};
|
||||
|
||||
private isMobileBreakpoint = (width: number, height: number) => {
|
||||
@ -2567,8 +2567,8 @@ class App extends React.Component<AppProps, AppState> {
|
||||
),
|
||||
// rerender text elements on font load to fix #637 && #1553
|
||||
addEventListener(document.fonts, "loadingdone", (event) => {
|
||||
const loadedFontFaces = (event as FontFaceSetLoadEvent).fontfaces;
|
||||
this.fonts.onLoaded(loadedFontFaces);
|
||||
const fontFaces = (event as FontFaceSetLoadEvent).fontfaces;
|
||||
this.fonts.onLoaded(fontFaces);
|
||||
}),
|
||||
// Safari-only desktop pinch zoom
|
||||
addEventListener(
|
||||
@ -3236,8 +3236,15 @@ class App extends React.Component<AppProps, AppState> {
|
||||
}
|
||||
});
|
||||
|
||||
// paste event may not fire FontFace loadingdone event in Safari, hence loading font faces manually
|
||||
if (isSafari) {
|
||||
Fonts.loadElementsFonts(newElements).then((fontFaces) => {
|
||||
this.fonts.onLoaded(fontFaces);
|
||||
});
|
||||
}
|
||||
|
||||
if (opts.files) {
|
||||
this.files = { ...this.files, ...opts.files };
|
||||
this.addMissingFiles(opts.files);
|
||||
}
|
||||
|
||||
this.store.shouldCaptureIncrement();
|
||||
@ -3746,23 +3753,56 @@ class App extends React.Component<AppProps, AppState> {
|
||||
}
|
||||
};
|
||||
|
||||
/** adds supplied files to existing files in the appState */
|
||||
/**
|
||||
* adds supplied files to existing files in the appState.
|
||||
* NOTE if file already exists in editor state, the file data is not updated
|
||||
* */
|
||||
public addFiles: ExcalidrawImperativeAPI["addFiles"] = withBatchedUpdates(
|
||||
(files) => {
|
||||
const filesMap = files.reduce((acc, fileData) => {
|
||||
acc.set(fileData.id, fileData);
|
||||
return acc;
|
||||
}, new Map<FileId, BinaryFileData>());
|
||||
const { addedFiles } = this.addMissingFiles(files);
|
||||
|
||||
this.files = { ...this.files, ...Object.fromEntries(filesMap) };
|
||||
|
||||
this.clearImageShapeCache(Object.fromEntries(filesMap));
|
||||
this.clearImageShapeCache(addedFiles);
|
||||
this.scene.triggerUpdate();
|
||||
|
||||
this.addNewImagesToImageCache();
|
||||
},
|
||||
);
|
||||
|
||||
private addMissingFiles = (
|
||||
files: BinaryFiles | BinaryFileData[],
|
||||
replace = false,
|
||||
) => {
|
||||
const nextFiles = replace ? {} : { ...this.files };
|
||||
const addedFiles: BinaryFiles = {};
|
||||
|
||||
const _files = Array.isArray(files) ? files : Object.values(files);
|
||||
|
||||
for (const fileData of _files) {
|
||||
if (nextFiles[fileData.id]) {
|
||||
continue;
|
||||
}
|
||||
|
||||
addedFiles[fileData.id] = fileData;
|
||||
nextFiles[fileData.id] = fileData;
|
||||
|
||||
if (fileData.mimeType === MIME_TYPES.svg) {
|
||||
const restoredDataURL = getDataURL_sync(
|
||||
normalizeSVG(dataURLToString(fileData.dataURL)),
|
||||
MIME_TYPES.svg,
|
||||
);
|
||||
if (fileData.dataURL !== restoredDataURL) {
|
||||
// bump version so persistence layer can update the store
|
||||
fileData.version = (fileData.version ?? 1) + 1;
|
||||
fileData.dataURL = restoredDataURL;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.files = nextFiles;
|
||||
|
||||
return { addedFiles };
|
||||
};
|
||||
|
||||
public updateScene = withBatchedUpdates(
|
||||
<K extends keyof AppState>(sceneData: {
|
||||
elements?: SceneData["elements"];
|
||||
@ -9285,7 +9325,7 @@ class App extends React.Component<AppProps, AppState> {
|
||||
if (mimeType === MIME_TYPES.svg) {
|
||||
try {
|
||||
imageFile = SVGStringToFile(
|
||||
await normalizeSVG(await imageFile.text()),
|
||||
normalizeSVG(await imageFile.text()),
|
||||
imageFile.name,
|
||||
);
|
||||
} catch (error: any) {
|
||||
@ -9353,16 +9393,15 @@ class App extends React.Component<AppProps, AppState> {
|
||||
return new Promise<NonDeleted<InitializedExcalidrawImageElement>>(
|
||||
async (resolve, reject) => {
|
||||
try {
|
||||
this.files = {
|
||||
...this.files,
|
||||
[fileId]: {
|
||||
this.addMissingFiles([
|
||||
{
|
||||
mimeType,
|
||||
id: fileId,
|
||||
dataURL,
|
||||
created: Date.now(),
|
||||
lastRetrieved: Date.now(),
|
||||
},
|
||||
};
|
||||
]);
|
||||
const cachedImageData = this.imageCache.get(fileId);
|
||||
if (!cachedImageData) {
|
||||
this.addNewImagesToImageCache();
|
||||
|
@ -22,7 +22,7 @@ const Header = () => (
|
||||
</a>
|
||||
<a
|
||||
className="HelpDialog__btn"
|
||||
href="https://blog.excalidraw.com"
|
||||
href="https://plus.excalidraw.com/blog"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
|
@ -2,6 +2,7 @@ import cssVariables from "./css/variables.module.scss";
|
||||
import type { AppProps, AppState } from "./types";
|
||||
import type { ExcalidrawElement, FontFamilyValues } from "./element/types";
|
||||
import { COLOR_PALETTE } from "./colors";
|
||||
|
||||
export const isDarwin = /Mac|iPod|iPhone|iPad/.test(navigator.platform);
|
||||
export const isWindows = /^Win/.test(navigator.platform);
|
||||
export const isAndroid = /\b(android)\b/i.test(navigator.userAgent);
|
||||
|
@ -8,13 +8,14 @@ import { calculateScrollCenter } from "../scene";
|
||||
import type { AppState, DataURL, LibraryItem } from "../types";
|
||||
import type { ValueOf } from "../utility-types";
|
||||
import { bytesToHexString, isPromiseLike } from "../utils";
|
||||
import { base64ToString, stringToBase64, toByteString } from "./encode";
|
||||
import type { FileSystemHandle } from "./filesystem";
|
||||
import { nativeFileSystemSupported } from "./filesystem";
|
||||
import { isValidExcalidrawData, isValidLibrary } from "./json";
|
||||
import { restore, restoreLibraryItems } from "./restore";
|
||||
import type { ImportedLibraryData } from "./types";
|
||||
|
||||
const parseFileContents = async (blob: Blob | File) => {
|
||||
const parseFileContents = async (blob: Blob | File): Promise<string> => {
|
||||
let contents: string;
|
||||
|
||||
if (blob.type === MIME_TYPES.png) {
|
||||
@ -46,9 +47,7 @@ const parseFileContents = async (blob: Blob | File) => {
|
||||
}
|
||||
if (blob.type === MIME_TYPES.svg) {
|
||||
try {
|
||||
return await (
|
||||
await import("./image")
|
||||
).decodeSvgMetadata({
|
||||
return (await import("./image")).decodeSvgMetadata({
|
||||
svg: contents,
|
||||
});
|
||||
} catch (error: any) {
|
||||
@ -249,6 +248,7 @@ export const generateIdFromFile = async (file: File): Promise<FileId> => {
|
||||
}
|
||||
};
|
||||
|
||||
/** async. For sync variant, use getDataURL_sync */
|
||||
export const getDataURL = async (file: Blob | File): Promise<DataURL> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
@ -261,6 +261,16 @@ export const getDataURL = async (file: Blob | File): Promise<DataURL> => {
|
||||
});
|
||||
};
|
||||
|
||||
export const getDataURL_sync = (
|
||||
data: string | Uint8Array | ArrayBuffer,
|
||||
mimeType: ValueOf<typeof MIME_TYPES>,
|
||||
): DataURL => {
|
||||
return `data:${mimeType};base64,${stringToBase64(
|
||||
toByteString(data),
|
||||
true,
|
||||
)}` as DataURL;
|
||||
};
|
||||
|
||||
export const dataURLToFile = (dataURL: DataURL, filename = "") => {
|
||||
const dataIndexStart = dataURL.indexOf(",");
|
||||
const byteString = atob(dataURL.slice(dataIndexStart + 1));
|
||||
@ -274,6 +284,10 @@ export const dataURLToFile = (dataURL: DataURL, filename = "") => {
|
||||
return new File([ab], filename, { type: mimeType });
|
||||
};
|
||||
|
||||
export const dataURLToString = (dataURL: DataURL) => {
|
||||
return base64ToString(dataURL.slice(dataURL.indexOf(",") + 1));
|
||||
};
|
||||
|
||||
export const resizeImageFile = async (
|
||||
file: File,
|
||||
opts: {
|
||||
|
@ -5,24 +5,23 @@ import { encryptData, decryptData } from "./encryption";
|
||||
// byte (binary) strings
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
// fast, Buffer-compatible implem
|
||||
export const toByteString = (
|
||||
data: string | Uint8Array | ArrayBuffer,
|
||||
): Promise<string> => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const blob =
|
||||
typeof data === "string"
|
||||
? new Blob([new TextEncoder().encode(data)])
|
||||
: new Blob([data instanceof Uint8Array ? data : new Uint8Array(data)]);
|
||||
const reader = new FileReader();
|
||||
reader.onload = (event) => {
|
||||
if (!event.target || typeof event.target.result !== "string") {
|
||||
return reject(new Error("couldn't convert to byte string"));
|
||||
}
|
||||
resolve(event.target.result);
|
||||
};
|
||||
reader.readAsBinaryString(blob);
|
||||
});
|
||||
// Buffer-compatible implem.
|
||||
//
|
||||
// Note that in V8, spreading the uint8array (by chunks) into
|
||||
// `String.fromCharCode(...uint8array)` tends to be faster for large
|
||||
// strings/buffers, in case perf is needed in the future.
|
||||
export const toByteString = (data: string | Uint8Array | ArrayBuffer) => {
|
||||
const bytes =
|
||||
typeof data === "string"
|
||||
? new TextEncoder().encode(data)
|
||||
: data instanceof Uint8Array
|
||||
? data
|
||||
: new Uint8Array(data);
|
||||
let bstring = "";
|
||||
for (const byte of bytes) {
|
||||
bstring += String.fromCharCode(byte);
|
||||
}
|
||||
return bstring;
|
||||
};
|
||||
|
||||
const byteStringToArrayBuffer = (byteString: string) => {
|
||||
@ -46,12 +45,12 @@ const byteStringToString = (byteString: string) => {
|
||||
* @param isByteString set to true if already byte string to prevent bloat
|
||||
* due to reencoding
|
||||
*/
|
||||
export const stringToBase64 = async (str: string, isByteString = false) => {
|
||||
return isByteString ? window.btoa(str) : window.btoa(await toByteString(str));
|
||||
export const stringToBase64 = (str: string, isByteString = false) => {
|
||||
return isByteString ? window.btoa(str) : window.btoa(toByteString(str));
|
||||
};
|
||||
|
||||
// async to align with stringToBase64
|
||||
export const base64ToString = async (base64: string, isByteString = false) => {
|
||||
export const base64ToString = (base64: string, isByteString = false) => {
|
||||
return isByteString
|
||||
? window.atob(base64)
|
||||
: byteStringToString(window.atob(base64));
|
||||
@ -66,6 +65,20 @@ export const base64ToArrayBuffer = (base64: string): ArrayBuffer => {
|
||||
return byteStringToArrayBuffer(atob(base64));
|
||||
};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// base64url
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
export const base64urlToString = (str: string) => {
|
||||
return window.atob(
|
||||
// normalize base64URL to base64
|
||||
str
|
||||
.replace(/-/g, "+")
|
||||
.replace(/_/g, "/")
|
||||
.padEnd(str.length + ((4 - (str.length % 4)) % 4), "="),
|
||||
);
|
||||
};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// text encoding
|
||||
// -----------------------------------------------------------------------------
|
||||
@ -82,18 +95,18 @@ type EncodedData = {
|
||||
/**
|
||||
* Encodes (and potentially compresses via zlib) text to byte string
|
||||
*/
|
||||
export const encode = async ({
|
||||
export const encode = ({
|
||||
text,
|
||||
compress,
|
||||
}: {
|
||||
text: string;
|
||||
/** defaults to `true`. If compression fails, falls back to bstring alone. */
|
||||
compress?: boolean;
|
||||
}): Promise<EncodedData> => {
|
||||
}): EncodedData => {
|
||||
let deflated!: string;
|
||||
if (compress !== false) {
|
||||
try {
|
||||
deflated = await toByteString(deflate(text));
|
||||
deflated = toByteString(deflate(text));
|
||||
} catch (error: any) {
|
||||
console.error("encode: cannot deflate", error);
|
||||
}
|
||||
@ -102,11 +115,11 @@ export const encode = async ({
|
||||
version: "1",
|
||||
encoding: "bstring",
|
||||
compressed: !!deflated,
|
||||
encoded: deflated || (await toByteString(text)),
|
||||
encoded: deflated || toByteString(text),
|
||||
};
|
||||
};
|
||||
|
||||
export const decode = async (data: EncodedData): Promise<string> => {
|
||||
export const decode = (data: EncodedData): string => {
|
||||
let decoded: string;
|
||||
|
||||
switch (data.encoding) {
|
||||
@ -114,7 +127,7 @@ export const decode = async (data: EncodedData): Promise<string> => {
|
||||
// if compressed, do not double decode the bstring
|
||||
decoded = data.compressed
|
||||
? data.encoded
|
||||
: await byteStringToString(data.encoded);
|
||||
: byteStringToString(data.encoded);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`decode: unknown encoding "${data.encoding}"`);
|
||||
|
@ -32,7 +32,7 @@ export const encodePngMetadata = async ({
|
||||
const metadataChunk = tEXt.encode(
|
||||
MIME_TYPES.excalidraw,
|
||||
JSON.stringify(
|
||||
await encode({
|
||||
encode({
|
||||
text: metadata,
|
||||
compress: true,
|
||||
}),
|
||||
@ -59,7 +59,7 @@ export const decodePngMetadata = async (blob: Blob) => {
|
||||
}
|
||||
throw new Error("FAILED");
|
||||
}
|
||||
return await decode(encodedData);
|
||||
return decode(encodedData);
|
||||
} catch (error: any) {
|
||||
console.error(error);
|
||||
throw new Error("FAILED");
|
||||
@ -72,9 +72,9 @@ export const decodePngMetadata = async (blob: Blob) => {
|
||||
// SVG
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
export const encodeSvgMetadata = async ({ text }: { text: string }) => {
|
||||
const base64 = await stringToBase64(
|
||||
JSON.stringify(await encode({ text })),
|
||||
export const encodeSvgMetadata = ({ text }: { text: string }) => {
|
||||
const base64 = stringToBase64(
|
||||
JSON.stringify(encode({ text })),
|
||||
true /* is already byte string */,
|
||||
);
|
||||
|
||||
@ -87,7 +87,7 @@ export const encodeSvgMetadata = async ({ text }: { text: string }) => {
|
||||
return metadata;
|
||||
};
|
||||
|
||||
export const decodeSvgMetadata = async ({ svg }: { svg: string }) => {
|
||||
export const decodeSvgMetadata = ({ svg }: { svg: string }) => {
|
||||
if (svg.includes(`payload-type:${MIME_TYPES.excalidraw}`)) {
|
||||
const match = svg.match(
|
||||
/<!-- payload-start -->\s*(.+?)\s*<!-- payload-end -->/,
|
||||
@ -100,7 +100,7 @@ export const decodeSvgMetadata = async ({ svg }: { svg: string }) => {
|
||||
const isByteString = version !== "1";
|
||||
|
||||
try {
|
||||
const json = await base64ToString(match[1], isByteString);
|
||||
const json = base64ToString(match[1], isByteString);
|
||||
const encodedData = JSON.parse(json);
|
||||
if (!("encoded" in encodedData)) {
|
||||
// legacy, un-encoded scene JSON
|
||||
@ -112,7 +112,7 @@ export const decodeSvgMetadata = async ({ svg }: { svg: string }) => {
|
||||
}
|
||||
throw new Error("FAILED");
|
||||
}
|
||||
return await decode(encodedData);
|
||||
return decode(encodedData);
|
||||
} catch (error: any) {
|
||||
console.error(error);
|
||||
throw new Error("FAILED");
|
||||
|
@ -4,7 +4,7 @@ import type { ExcalidrawProps } from "../types";
|
||||
import { getFontString, updateActiveTool } from "../utils";
|
||||
import { setCursorForShape } from "../cursor";
|
||||
import { newTextElement } from "./newElement";
|
||||
import { wrapText } from "./textElement";
|
||||
import { wrapText } from "./textWrapping";
|
||||
import { isIframeElement } from "./typeChecks";
|
||||
import type {
|
||||
ExcalidrawElement,
|
||||
|
@ -94,7 +94,7 @@ export const isHTMLSVGElement = (node: Node | null): node is SVGElement => {
|
||||
return node?.nodeName.toLowerCase() === "svg";
|
||||
};
|
||||
|
||||
export const normalizeSVG = async (SVGString: string) => {
|
||||
export const normalizeSVG = (SVGString: string) => {
|
||||
const doc = new DOMParser().parseFromString(SVGString, MIME_TYPES.svg);
|
||||
const svg = doc.querySelector("svg");
|
||||
const errorNode = doc.querySelector("parsererror");
|
||||
|
@ -34,9 +34,9 @@ import { getResizedElementAbsoluteCoords } from "./bounds";
|
||||
import {
|
||||
measureText,
|
||||
normalizeText,
|
||||
wrapText,
|
||||
getBoundTextMaxWidth,
|
||||
} from "./textElement";
|
||||
import { wrapText } from "./textWrapping";
|
||||
import {
|
||||
DEFAULT_ELEMENT_PROPS,
|
||||
DEFAULT_FONT_FAMILY,
|
||||
|
@ -47,10 +47,10 @@ import {
|
||||
handleBindTextResize,
|
||||
getBoundTextMaxWidth,
|
||||
getApproxMinLineHeight,
|
||||
wrapText,
|
||||
measureText,
|
||||
getMinTextElementWidth,
|
||||
} from "./textElement";
|
||||
import { wrapText } from "./textWrapping";
|
||||
import { LinearElementEditor } from "./linearElementEditor";
|
||||
import { isInGroup } from "../groups";
|
||||
import { mutateElbowArrow } from "./routing";
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { BOUND_TEXT_PADDING, FONT_FAMILY } from "../constants";
|
||||
import { FONT_FAMILY } from "../constants";
|
||||
import { getLineHeight } from "../fonts";
|
||||
import { API } from "../tests/helpers/api";
|
||||
import {
|
||||
@ -6,642 +6,10 @@ import {
|
||||
getContainerCoords,
|
||||
getBoundTextMaxWidth,
|
||||
getBoundTextMaxHeight,
|
||||
wrapText,
|
||||
detectLineHeight,
|
||||
getLineHeightInPx,
|
||||
parseTokens,
|
||||
} from "./textElement";
|
||||
import type { ExcalidrawTextElementWithContainer, FontString } from "./types";
|
||||
|
||||
describe("Test wrapText", () => {
|
||||
// font is irrelevant as jsdom does not support FontFace API
|
||||
// `measureText` width is mocked to return `text.length` by `jest-canvas-mock`
|
||||
// https://github.com/hustcc/jest-canvas-mock/blob/master/src/classes/TextMetrics.js
|
||||
const font = "10px Cascadia, Segoe UI Emoji" as FontString;
|
||||
|
||||
it("should wrap the text correctly when word length is exactly equal to max width", () => {
|
||||
const text = "Hello Excalidraw";
|
||||
// Length of "Excalidraw" is 100 and exacty equal to max width
|
||||
const res = wrapText(text, font, 100);
|
||||
expect(res).toEqual(`Hello\nExcalidraw`);
|
||||
});
|
||||
|
||||
it("should return the text as is if max width is invalid", () => {
|
||||
const text = "Hello Excalidraw";
|
||||
expect(wrapText(text, font, NaN)).toEqual(text);
|
||||
expect(wrapText(text, font, -1)).toEqual(text);
|
||||
expect(wrapText(text, font, Infinity)).toEqual(text);
|
||||
});
|
||||
|
||||
it("should show the text correctly when max width reached", () => {
|
||||
const text = "Hello😀";
|
||||
const maxWidth = 10;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("H\ne\nl\nl\no\n😀");
|
||||
});
|
||||
|
||||
it("should not wrap number when wrapping line", () => {
|
||||
const text = "don't wrap this number 99,100.99";
|
||||
const maxWidth = 300;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("don't wrap this number\n99,100.99");
|
||||
});
|
||||
|
||||
it("should support multiple (multi-codepoint) emojis", () => {
|
||||
const text = "😀🗺🔥👩🏽🦰👨👩👧👦🇨🇿";
|
||||
const maxWidth = 1;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("😀\n🗺\n🔥\n👩🏽🦰\n👨👩👧👦\n🇨🇿");
|
||||
});
|
||||
|
||||
it("should wrap the text correctly when text contains hyphen", () => {
|
||||
let text =
|
||||
"Wikipedia is hosted by Wikimedia- Foundation, a non-profit organization that also hosts a range-of other projects";
|
||||
const res = wrapText(text, font, 110);
|
||||
expect(res).toBe(
|
||||
`Wikipedia\nis hosted\nby\nWikimedia-\nFoundation,\na non-\nprofit\norganizatio\nn that also\nhosts a\nrange-of\nother\nprojects`,
|
||||
);
|
||||
|
||||
text = "Hello thereusing-now";
|
||||
expect(wrapText(text, font, 100)).toEqual("Hello\nthereusing\n-now");
|
||||
});
|
||||
|
||||
it("should support wrapping nested lists", () => {
|
||||
const text = `\tA) one tab\t\t- two tabs - 8 spaces`;
|
||||
|
||||
const maxWidth = 100;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe(`\tA) one\ntab\t\t- two\ntabs\n- 8 spaces`);
|
||||
|
||||
const maxWidth2 = 50;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe(`\tA)\none\ntab\n- two\ntabs\n- 8\nspace\ns`);
|
||||
});
|
||||
|
||||
describe("When text is CJK", () => {
|
||||
it("should break each CJK character when width is very small", () => {
|
||||
// "안녕하세요" (Hangul) + "こんにちは世界" (Hiragana, Kanji) + "コンニチハ" (Katakana) + "你好" (Han) = "Hello Hello World Hello Hi"
|
||||
const text = "안녕하세요こんにちは世界コンニチハ你好";
|
||||
const maxWidth = 10;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe(
|
||||
"안\n녕\n하\n세\n요\nこ\nん\nに\nち\nは\n世\n界\nコ\nン\nニ\nチ\nハ\n你\n好",
|
||||
);
|
||||
});
|
||||
|
||||
it("should break CJK text into longer segments when width is larger", () => {
|
||||
// "안녕하세요" (Hangul) + "こんにちは世界" (Hiragana, Kanji) + "コンニチハ" (Katakana) + "你好" (Han) = "Hello Hello World Hello Hi"
|
||||
const text = "안녕하세요こんにちは世界コンニチハ你好";
|
||||
const maxWidth = 30;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
|
||||
// measureText is mocked, so it's not precisely what would happen in prod
|
||||
expect(res).toBe("안녕하\n세요こ\nんにち\nは世界\nコンニ\nチハ你\n好");
|
||||
});
|
||||
|
||||
it("should handle a combination of CJK, latin, emojis and whitespaces", () => {
|
||||
const text = `a醫 醫 bb 你好 world-i-😀🗺🔥`;
|
||||
|
||||
const maxWidth = 150;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe(`a醫 醫 bb 你\n好 world-i-😀🗺\n🔥`);
|
||||
|
||||
const maxWidth2 = 50;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe(`a醫 醫\nbb 你\n好\nworld\n-i-😀\n🗺🔥`);
|
||||
|
||||
const maxWidth3 = 30;
|
||||
const res3 = wrapText(text, font, maxWidth3);
|
||||
expect(res3).toBe(`a醫\n醫\nbb\n你好\nwor\nld-\ni-\n😀\n🗺\n🔥`);
|
||||
});
|
||||
|
||||
it("should break before and after a regular CJK character", () => {
|
||||
const text = "HelloたWorld";
|
||||
const maxWidth1 = 50;
|
||||
const res1 = wrapText(text, font, maxWidth1);
|
||||
expect(res1).toBe("Hello\nた\nWorld");
|
||||
|
||||
const maxWidth2 = 60;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe("Helloた\nWorld");
|
||||
});
|
||||
|
||||
it("should break before and after certain CJK symbols", () => {
|
||||
const text = "こんにちは〃世界";
|
||||
const maxWidth1 = 50;
|
||||
const res1 = wrapText(text, font, maxWidth1);
|
||||
expect(res1).toBe("こんにちは\n〃世界");
|
||||
|
||||
const maxWidth2 = 60;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe("こんにちは〃\n世界");
|
||||
});
|
||||
|
||||
it("should break after, not before for certain CJK pairs", () => {
|
||||
const text = "Hello た。";
|
||||
const maxWidth = 70;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("Hello\nた。");
|
||||
});
|
||||
|
||||
it("should break before, not after for certain CJK pairs", () => {
|
||||
const text = "Hello「たWorld」";
|
||||
const maxWidth = 60;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("Hello\n「た\nWorld」");
|
||||
});
|
||||
|
||||
it("should break after, not before for certain CJK character pairs", () => {
|
||||
const text = "「Helloた」World";
|
||||
const maxWidth = 70;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("「Hello\nた」World");
|
||||
});
|
||||
|
||||
it("should break Chinese sentences", () => {
|
||||
const text = `中国你好!这是一个测试。
|
||||
我们来看看:人民币¥1234「很贵」
|
||||
(括号)、逗号,句号。空格 换行 全角符号…—`;
|
||||
|
||||
const maxWidth1 = 80;
|
||||
const res1 = wrapText(text, font, maxWidth1);
|
||||
expect(res1).toBe(`中国你好!这是一\n个测试。
|
||||
我们来看看:人民\n币¥1234「很\n贵」
|
||||
(括号)、逗号,\n句号。空格 换行\n全角符号…—`);
|
||||
|
||||
const maxWidth2 = 50;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe(`中国你好!\n这是一个测\n试。
|
||||
我们来看\n看:人民币\n¥1234\n「很贵」
|
||||
(括号)、\n逗号,句\n号。空格\n换行 全角\n符号…—`);
|
||||
});
|
||||
});
|
||||
|
||||
it("should break Japanese sentences", () => {
|
||||
const text = `日本こんにちは!これはテストです。
|
||||
見てみましょう:円¥1234「高い」
|
||||
(括弧)、読点、句点。
|
||||
空白 改行 全角記号…ー`;
|
||||
|
||||
const maxWidth1 = 80;
|
||||
const res1 = wrapText(text, font, maxWidth1);
|
||||
expect(res1).toBe(`日本こんにちは!\nこれはテストで\nす。
|
||||
見てみましょ\nう:円¥1234\n「高い」
|
||||
(括弧)、読\n点、句点。
|
||||
空白 改行\n全角記号…ー`);
|
||||
|
||||
const maxWidth2 = 50;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe(`日本こんに\nちは!これ\nはテストで\nす。
|
||||
見てみ\nましょう:\n円\n¥1234\n「高い」
|
||||
(括\n弧)、読\n点、句点。
|
||||
空白\n改行 全角\n記号…ー`);
|
||||
});
|
||||
|
||||
it("should break Korean sentences", () => {
|
||||
const text = `한국 안녕하세요! 이것은 테스트입니다.
|
||||
우리 보자: 원화₩1234「비싸다」
|
||||
(괄호), 쉼표, 마침표.
|
||||
공백 줄바꿈 전각기호…—`;
|
||||
|
||||
const maxWidth1 = 80;
|
||||
const res1 = wrapText(text, font, maxWidth1);
|
||||
expect(res1).toBe(`한국 안녕하세\n요! 이것은 테\n스트입니다.
|
||||
우리 보자: 원\n화₩1234「비\n싸다」
|
||||
(괄호), 쉼\n표, 마침표.
|
||||
공백 줄바꿈 전\n각기호…—`);
|
||||
|
||||
const maxWidth2 = 60;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe(`한국 안녕하\n세요! 이것\n은 테스트입\n니다.
|
||||
우리 보자:\n원화\n₩1234\n「비싸다」
|
||||
(괄호),\n쉼표, 마침\n표.
|
||||
공백 줄바꿈\n전각기호…—`);
|
||||
});
|
||||
|
||||
describe("When text contains leading whitespaces", () => {
|
||||
const text = " \t Hello world";
|
||||
|
||||
it("should preserve leading whitespaces", () => {
|
||||
const maxWidth = 120;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe(" \t Hello\nworld");
|
||||
});
|
||||
|
||||
it("should break and collapse leading whitespaces when line breaks", () => {
|
||||
const maxWidth = 60;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("\nHello\nworld");
|
||||
});
|
||||
|
||||
it("should break and collapse leading whitespaces whe words break", () => {
|
||||
const maxWidth = 30;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("\nHel\nlo\nwor\nld");
|
||||
});
|
||||
});
|
||||
|
||||
describe("When text contains trailing whitespaces", () => {
|
||||
it("shouldn't add new lines for trailing spaces", () => {
|
||||
const text = "Hello whats up ";
|
||||
const maxWidth = 200 - BOUND_TEXT_PADDING * 2;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe(text);
|
||||
});
|
||||
|
||||
it("should ignore trailing whitespaces when line breaks", () => {
|
||||
const text = "Hippopotomonstrosesquippedaliophobia ??????";
|
||||
const maxWidth = 400;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("Hippopotomonstrosesquippedaliophobia\n??????");
|
||||
});
|
||||
|
||||
it("should not ignore trailing whitespaces when word breaks", () => {
|
||||
const text = "Hippopotomonstrosesquippedaliophobia ??????";
|
||||
const maxWidth = 300;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("Hippopotomonstrosesquippedalio\nphobia ??????");
|
||||
});
|
||||
|
||||
it("should ignore trailing whitespaces when word breaks and line breaks", () => {
|
||||
const text = "Hippopotomonstrosesquippedaliophobia ??????";
|
||||
const maxWidth = 180;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("Hippopotomonstrose\nsquippedaliophobia\n??????");
|
||||
});
|
||||
});
|
||||
|
||||
describe("When text doesn't contain new lines", () => {
|
||||
const text = "Hello whats up";
|
||||
|
||||
[
|
||||
{
|
||||
desc: "break all words when width of each word is less than container width",
|
||||
width: 80,
|
||||
res: `Hello\nwhats\nup`,
|
||||
},
|
||||
{
|
||||
desc: "break all characters when width of each character is less than container width",
|
||||
width: 25,
|
||||
res: `H
|
||||
e
|
||||
l
|
||||
l
|
||||
o
|
||||
w
|
||||
h
|
||||
a
|
||||
t
|
||||
s
|
||||
u
|
||||
p`,
|
||||
},
|
||||
{
|
||||
desc: "break words as per the width",
|
||||
|
||||
width: 140,
|
||||
res: `Hello whats\nup`,
|
||||
},
|
||||
{
|
||||
desc: "fit the container",
|
||||
|
||||
width: 250,
|
||||
res: "Hello whats up",
|
||||
},
|
||||
{
|
||||
desc: "should push the word if its equal to max width",
|
||||
width: 60,
|
||||
res: `Hello
|
||||
whats
|
||||
up`,
|
||||
},
|
||||
].forEach((data) => {
|
||||
it(`should ${data.desc}`, () => {
|
||||
const res = wrapText(text, font, data.width - BOUND_TEXT_PADDING * 2);
|
||||
expect(res).toEqual(data.res);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("When text contain new lines", () => {
|
||||
const text = `Hello
|
||||
whats up`;
|
||||
[
|
||||
{
|
||||
desc: "break all words when width of each word is less than container width",
|
||||
width: 80,
|
||||
res: `Hello\nwhats\nup`,
|
||||
},
|
||||
{
|
||||
desc: "break all characters when width of each character is less than container width",
|
||||
width: 25,
|
||||
res: `H
|
||||
e
|
||||
l
|
||||
l
|
||||
o
|
||||
w
|
||||
h
|
||||
a
|
||||
t
|
||||
s
|
||||
u
|
||||
p`,
|
||||
},
|
||||
{
|
||||
desc: "break words as per the width",
|
||||
|
||||
width: 150,
|
||||
res: `Hello
|
||||
whats up`,
|
||||
},
|
||||
{
|
||||
desc: "fit the container",
|
||||
|
||||
width: 250,
|
||||
res: `Hello
|
||||
whats up`,
|
||||
},
|
||||
].forEach((data) => {
|
||||
it(`should respect new lines and ${data.desc}`, () => {
|
||||
const res = wrapText(text, font, data.width - BOUND_TEXT_PADDING * 2);
|
||||
expect(res).toEqual(data.res);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("When text is long", () => {
|
||||
const text = `hellolongtextthisiswhatsupwithyouIamtypingggggandtypinggg break it now`;
|
||||
[
|
||||
{
|
||||
desc: "fit characters of long string as per container width",
|
||||
width: 170,
|
||||
res: `hellolongtextthi\nsiswhatsupwithyo\nuIamtypingggggan\ndtypinggg break\nit now`,
|
||||
},
|
||||
{
|
||||
desc: "fit characters of long string as per container width and break words as per the width",
|
||||
|
||||
width: 130,
|
||||
res: `hellolongtex
|
||||
tthisiswhats
|
||||
upwithyouIam
|
||||
typingggggan
|
||||
dtypinggg
|
||||
break it now`,
|
||||
},
|
||||
{
|
||||
desc: "fit the long text when container width is greater than text length and move the rest to next line",
|
||||
|
||||
width: 600,
|
||||
res: `hellolongtextthisiswhatsupwithyouIamtypingggggandtypinggg\nbreak it now`,
|
||||
},
|
||||
].forEach((data) => {
|
||||
it(`should ${data.desc}`, () => {
|
||||
const res = wrapText(text, font, data.width - BOUND_TEXT_PADDING * 2);
|
||||
expect(res).toEqual(data.res);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Test parseTokens", () => {
|
||||
it("should tokenize latin", () => {
|
||||
let text = "Excalidraw is a virtual collaborative whiteboard";
|
||||
|
||||
expect(parseTokens(text)).toEqual([
|
||||
"Excalidraw",
|
||||
" ",
|
||||
"is",
|
||||
" ",
|
||||
"a",
|
||||
" ",
|
||||
"virtual",
|
||||
" ",
|
||||
"collaborative",
|
||||
" ",
|
||||
"whiteboard",
|
||||
]);
|
||||
|
||||
text =
|
||||
"Wikipedia is hosted by Wikimedia- Foundation, a non-profit organization that also hosts a range-of other projects";
|
||||
expect(parseTokens(text)).toEqual([
|
||||
"Wikipedia",
|
||||
" ",
|
||||
"is",
|
||||
" ",
|
||||
"hosted",
|
||||
" ",
|
||||
"by",
|
||||
" ",
|
||||
"Wikimedia-",
|
||||
" ",
|
||||
"Foundation,",
|
||||
" ",
|
||||
"a",
|
||||
" ",
|
||||
"non-",
|
||||
"profit",
|
||||
" ",
|
||||
"organization",
|
||||
" ",
|
||||
"that",
|
||||
" ",
|
||||
"also",
|
||||
" ",
|
||||
"hosts",
|
||||
" ",
|
||||
"a",
|
||||
" ",
|
||||
"range-",
|
||||
"of",
|
||||
" ",
|
||||
"other",
|
||||
" ",
|
||||
"projects",
|
||||
]);
|
||||
});
|
||||
|
||||
it("should not tokenize number", () => {
|
||||
const text = "99,100.99";
|
||||
const tokens = parseTokens(text);
|
||||
expect(tokens).toEqual(["99,100.99"]);
|
||||
});
|
||||
|
||||
it("should tokenize joined emojis", () => {
|
||||
const text = `😬🌍🗺🔥☂️👩🏽🦰👨👩👧👦👩🏾🔬🏳️🌈🧔♀️🧑🤝🧑🙅🏽♂️✅0️⃣🇨🇿🦅`;
|
||||
const tokens = parseTokens(text);
|
||||
|
||||
expect(tokens).toEqual([
|
||||
"😬",
|
||||
"🌍",
|
||||
"🗺",
|
||||
"🔥",
|
||||
"☂️",
|
||||
"👩🏽🦰",
|
||||
"👨👩👧👦",
|
||||
"👩🏾🔬",
|
||||
"🏳️🌈",
|
||||
"🧔♀️",
|
||||
"🧑🤝🧑",
|
||||
"🙅🏽♂️",
|
||||
"✅",
|
||||
"0️⃣",
|
||||
"🇨🇿",
|
||||
"🦅",
|
||||
]);
|
||||
});
|
||||
|
||||
it("should tokenize emojis mixed with mixed text", () => {
|
||||
const text = `😬a🌍b🗺c🔥d☂️《👩🏽🦰》👨👩👧👦德👩🏾🔬こ🏳️🌈안🧔♀️g🧑🤝🧑h🙅🏽♂️e✅f0️⃣g🇨🇿10🦅#hash`;
|
||||
const tokens = parseTokens(text);
|
||||
|
||||
expect(tokens).toEqual([
|
||||
"😬",
|
||||
"a",
|
||||
"🌍",
|
||||
"b",
|
||||
"🗺",
|
||||
"c",
|
||||
"🔥",
|
||||
"d",
|
||||
"☂️",
|
||||
"《",
|
||||
"👩🏽🦰",
|
||||
"》",
|
||||
"👨👩👧👦",
|
||||
"德",
|
||||
"👩🏾🔬",
|
||||
"こ",
|
||||
"🏳️🌈",
|
||||
"안",
|
||||
"🧔♀️",
|
||||
"g",
|
||||
"🧑🤝🧑",
|
||||
"h",
|
||||
"🙅🏽♂️",
|
||||
"e",
|
||||
"✅",
|
||||
"f0️⃣g", // bummer, but ok, as we traded kecaps not breaking (less common) for hash and numbers not breaking (more common)
|
||||
"🇨🇿",
|
||||
"10", // nice! do not break the number, as it's by default matched by \p{Emoji}
|
||||
"🦅",
|
||||
"#hash", // nice! do not break the hash, as it's by default matched by \p{Emoji}
|
||||
]);
|
||||
});
|
||||
|
||||
it("should tokenize decomposed chars into their composed variants", () => {
|
||||
// each input character is in a decomposed form
|
||||
const text = "čでäぴέ다й한";
|
||||
expect(text.normalize("NFC").length).toEqual(8);
|
||||
expect(text).toEqual(text.normalize("NFD"));
|
||||
|
||||
const tokens = parseTokens(text);
|
||||
expect(tokens.length).toEqual(8);
|
||||
expect(tokens).toEqual(["č", "で", "ä", "ぴ", "έ", "다", "й", "한"]);
|
||||
});
|
||||
|
||||
it("should tokenize artificial CJK", () => {
|
||||
const text = `《道德經》醫-醫こんにちは世界!안녕하세요세계;다.다...원/달(((다)))[[1]]〚({((한))>)〛た…[Hello] World?ニューヨーク・¥3700.55す。090-1234-5678¥1,000〜$5,000「素晴らしい!」〔重要〕#1:Taro君30%は、(たなばた)〰¥110±¥570で20℃〜9:30〜10:00【一番】`;
|
||||
|
||||
// [
|
||||
// '《道', '德', '經》', '醫-',
|
||||
// '醫', 'こ', 'ん', 'に',
|
||||
// 'ち', 'は', '世', '界!',
|
||||
// '안', '녕', '하', '세',
|
||||
// '요', '세', '계;', '다.',
|
||||
// '다...', '원/', '달', '(((다)))',
|
||||
// '[[1]]', '〚({((한))>)〛', 'た…', '[Hello]',
|
||||
// ' ', 'World?', 'ニ', 'ュ',
|
||||
// 'ー', 'ヨ', 'ー', 'ク・',
|
||||
// '¥3700.55', 'す。', '090-', '1234-',
|
||||
// '5678¥1,000', '〜', '$5,000', '「素',
|
||||
// '晴', 'ら', 'し', 'い!」',
|
||||
// '〔重', '要〕', '#', '1:',
|
||||
// 'Taro', '君', '30%', 'は、',
|
||||
// '(た', 'な', 'ば', 'た)',
|
||||
// '〰', '¥110±', '¥570', 'で',
|
||||
// '20℃', '〜', '9:30', '〜',
|
||||
// '10:00', '【一', '番】'
|
||||
// ]
|
||||
const tokens = parseTokens(text);
|
||||
|
||||
// Latin
|
||||
expect(tokens).toContain("[[1]]");
|
||||
expect(tokens).toContain("[Hello]");
|
||||
expect(tokens).toContain("World?");
|
||||
expect(tokens).toContain("Taro");
|
||||
|
||||
// Chinese
|
||||
expect(tokens).toContain("《道");
|
||||
expect(tokens).toContain("德");
|
||||
expect(tokens).toContain("經》");
|
||||
expect(tokens).toContain("醫-");
|
||||
expect(tokens).toContain("醫");
|
||||
|
||||
// Japanese
|
||||
expect(tokens).toContain("こ");
|
||||
expect(tokens).toContain("ん");
|
||||
expect(tokens).toContain("に");
|
||||
expect(tokens).toContain("ち");
|
||||
expect(tokens).toContain("は");
|
||||
expect(tokens).toContain("世");
|
||||
expect(tokens).toContain("ニ");
|
||||
expect(tokens).toContain("ク・");
|
||||
expect(tokens).toContain("界!");
|
||||
expect(tokens).toContain("た…");
|
||||
expect(tokens).toContain("す。");
|
||||
expect(tokens).toContain("ュ");
|
||||
expect(tokens).toContain("ー");
|
||||
expect(tokens).toContain("「素");
|
||||
expect(tokens).toContain("晴");
|
||||
expect(tokens).toContain("ら");
|
||||
expect(tokens).toContain("し");
|
||||
expect(tokens).toContain("い!」");
|
||||
expect(tokens).toContain("君");
|
||||
expect(tokens).toContain("は、");
|
||||
expect(tokens).toContain("(た");
|
||||
expect(tokens).toContain("な");
|
||||
expect(tokens).toContain("ば");
|
||||
expect(tokens).toContain("た)");
|
||||
expect(tokens).toContain("で");
|
||||
expect(tokens).toContain("【一");
|
||||
expect(tokens).toContain("番】");
|
||||
|
||||
// Check for Korean
|
||||
expect(tokens).toContain("안");
|
||||
expect(tokens).toContain("녕");
|
||||
expect(tokens).toContain("하");
|
||||
expect(tokens).toContain("세");
|
||||
expect(tokens).toContain("요");
|
||||
expect(tokens).toContain("세");
|
||||
expect(tokens).toContain("계;");
|
||||
expect(tokens).toContain("다.");
|
||||
expect(tokens).toContain("다...");
|
||||
expect(tokens).toContain("원/");
|
||||
expect(tokens).toContain("달");
|
||||
expect(tokens).toContain("(((다)))");
|
||||
expect(tokens).toContain("〚({((한))>)〛");
|
||||
|
||||
// Numbers and units
|
||||
expect(tokens).toContain("¥3700.55");
|
||||
expect(tokens).toContain("090-");
|
||||
expect(tokens).toContain("1234-");
|
||||
expect(tokens).toContain("5678¥1,000");
|
||||
expect(tokens).toContain("$5,000");
|
||||
expect(tokens).toContain("1:");
|
||||
expect(tokens).toContain("30%");
|
||||
expect(tokens).toContain("¥110±");
|
||||
expect(tokens).toContain("¥570");
|
||||
expect(tokens).toContain("20℃");
|
||||
expect(tokens).toContain("9:30");
|
||||
expect(tokens).toContain("10:00");
|
||||
|
||||
// Punctuation and symbols
|
||||
expect(tokens).toContain("〜");
|
||||
expect(tokens).toContain("〰");
|
||||
expect(tokens).toContain("#");
|
||||
});
|
||||
});
|
||||
});
|
||||
import type { ExcalidrawTextElementWithContainer } from "./types";
|
||||
|
||||
describe("Test measureText", () => {
|
||||
describe("Test getContainerCoords", () => {
|
||||
|
@ -16,12 +16,12 @@ import {
|
||||
BOUND_TEXT_PADDING,
|
||||
DEFAULT_FONT_FAMILY,
|
||||
DEFAULT_FONT_SIZE,
|
||||
ENV,
|
||||
TEXT_ALIGN,
|
||||
VERTICAL_ALIGN,
|
||||
} from "../constants";
|
||||
import type { MaybeTransformHandleType } from "./transformHandles";
|
||||
import { isTextElement } from ".";
|
||||
import { wrapText } from "./textWrapping";
|
||||
import { isBoundToContainer, isArrowElement } from "./typeChecks";
|
||||
import { LinearElementEditor } from "./linearElementEditor";
|
||||
import type { AppState } from "../types";
|
||||
@ -31,172 +31,6 @@ import {
|
||||
} from "./containerCache";
|
||||
import type { ExtractSetType } from "../utility-types";
|
||||
|
||||
/**
|
||||
* Matches various emoji types.
|
||||
*
|
||||
* 1. basic emojis (😀, 🌍)
|
||||
* 2. flags (🇨🇿)
|
||||
* 3. multi-codepoint emojis:
|
||||
* - skin tones (👍🏽)
|
||||
* - variation selectors (☂️)
|
||||
* - keycaps (1️⃣)
|
||||
* - tag sequences (🏴)
|
||||
* - emoji sequences (👨👩👧👦, 👩🚀, 🏳️🌈)
|
||||
*
|
||||
* Unicode points:
|
||||
* - \uFE0F: presentation selector
|
||||
* - \u20E3: enclosing keycap
|
||||
* - \u200D: ZWJ (zero width joiner)
|
||||
* - \u{E0020}-\u{E007E}: tags
|
||||
* - \u{E007F}: cancel tag
|
||||
*
|
||||
* @see https://unicode.org/reports/tr51/#EBNF_and_Regex, with changes:
|
||||
* - replaced \p{Emoji} with [\p{Extended_Pictographic}\p{Emoji_Presentation}], see more in `should tokenize emojis mixed with mixed text` test
|
||||
* - replaced \p{Emod} with \p{Emoji_Modifier} as some do not understand the abbreviation (i.e. https://devina.io/redos-checker)
|
||||
*/
|
||||
const _EMOJI_CHAR =
|
||||
/(\p{RI}\p{RI}|[\p{Extended_Pictographic}\p{Emoji_Presentation}](?:\p{Emoji_Modifier}|\uFE0F\u20E3?|[\u{E0020}-\u{E007E}]+\u{E007F})?(?:\u200D(?:\p{RI}\p{RI}|[\p{Emoji}](?:\p{Emoji_Modifier}|\uFE0F\u20E3?|[\u{E0020}-\u{E007E}]+\u{E007F})?))*)/u;
|
||||
|
||||
/**
|
||||
* Detect a CJK char, though does not include every possible char used in CJK texts,
|
||||
* such as symbols and punctuations.
|
||||
*
|
||||
* By default every CJK is a breaking point, though CJK has additional breaking points,
|
||||
* including full width punctuations or symbols (Chinese and Japanese) and western punctuations (Korean).
|
||||
*
|
||||
* Additional CJK breaking point rules:
|
||||
* - expect a break before (lookahead), but not after (negative lookbehind), i.e. "(" or "("
|
||||
* - expect a break after (lookbehind), but not before (negative lookahead), i.e. ")" or ")"
|
||||
* - expect a break always (lookahead and lookbehind), i.e. "〃"
|
||||
*/
|
||||
const _CJK_CHAR =
|
||||
/\p{Script=Han}\p{Script=Hiragana}\p{Script=Katakana}\p{Script=Hangul}/u;
|
||||
|
||||
/**
|
||||
* Following characters break only with CJK, not with alphabetic characters.
|
||||
* This is essential for Korean, as it uses alphabetic punctuation, but expects CJK-like breaking points.
|
||||
*
|
||||
* Hello((た)) → ["Hello", "((た))"]
|
||||
* Hello((World)) → ["Hello((World))"]
|
||||
*/
|
||||
const _CJK_BREAK_NOT_AFTER_BUT_BEFORE = /<\(\[\{/u;
|
||||
const _CJK_BREAK_NOT_BEFORE_BUT_AFTER = />\)\]\}.,:;\?!/u;
|
||||
const _CJK_BREAK_ALWAYS = / 〃〜~〰#&*+-ー/=|¬ ̄¦/u;
|
||||
const _CJK_SYMBOLS_AND_PUNCTUATION =
|
||||
/()[]{}〈〉《》⦅⦆「」「」『』【】〖〗〔〕〘〙〚〛<>〝〞'〟・。゚゙,、.:;?!%ー/u;
|
||||
|
||||
/**
|
||||
* Following characters break with any character, even though are mostly used with CJK.
|
||||
*
|
||||
* Hello た。→ ["Hello", "た。"]
|
||||
* ↑ DON'T BREAK "た。" (negative lookahead)
|
||||
* Hello「た」 World → ["Hello", "「た」", "World"]
|
||||
* ↑ DON'T BREAK "「た" (negative lookbehind)
|
||||
* ↑ DON'T BREAK "た」"(negative lookahead)
|
||||
* ↑ BREAK BEFORE "「" (lookahead)
|
||||
* ↑ BREAK AFTER "」" (lookbehind)
|
||||
*/
|
||||
const _ANY_BREAK_NOT_AFTER_BUT_BEFORE = /([{〈《⦅「「『【〖〔〘〚<〝/u;
|
||||
const _ANY_BREAK_NOT_BEFORE_BUT_AFTER =
|
||||
/)]}〉》⦆」」』】〗〕〙〛>〞'〟・。゚゙,、.:;?!%±‥…\//u;
|
||||
|
||||
/**
|
||||
* Natural breaking points for any grammars.
|
||||
*
|
||||
* Hello-world
|
||||
* ↑ BREAK AFTER "-" → ["Hello-", "world"]
|
||||
* Hello world
|
||||
* ↑ BREAK ALWAYS " " → ["Hello", " ", "world"]
|
||||
*/
|
||||
const _ANY_BREAK_AFTER = /-/u;
|
||||
const _ANY_BREAK_ALWAYS = /\s/u;
|
||||
|
||||
/**
|
||||
* Simple fallback for browsers (mainly Safari < 16.4) that don't support "Lookbehind assertion".
|
||||
*
|
||||
* Browser support as of 10/2024:
|
||||
* - 91% Lookbehind assertion https://caniuse.com/mdn-javascript_regular_expressions_lookbehind_assertion
|
||||
* - 94% Unicode character class escape https://caniuse.com/mdn-javascript_regular_expressions_unicode_character_class_escape
|
||||
*
|
||||
* Does not include advanced CJK breaking rules, but covers most of the core cases, especially for latin.
|
||||
*/
|
||||
const BREAK_LINE_REGEX_SIMPLE = new RegExp(
|
||||
`${_EMOJI_CHAR.source}|([${_ANY_BREAK_ALWAYS.source}${_CJK_CHAR.source}${_CJK_BREAK_ALWAYS.source}${_ANY_BREAK_AFTER.source}])`,
|
||||
"u",
|
||||
);
|
||||
|
||||
// Hello World → ["Hello", " World"]
|
||||
// ↑ BREAK BEFORE " "
|
||||
// HelloたWorld → ["Hello", "たWorld"]
|
||||
// ↑ BREAK BEFORE "た"
|
||||
// Hello「World」→ ["Hello", "「World」"]
|
||||
// ↑ BREAK BEFORE "「"
|
||||
const getLookaheadBreakingPoints = () => {
|
||||
const ANY_BREAKING_POINT = `(?<![${_ANY_BREAK_NOT_AFTER_BUT_BEFORE.source}])(?=[${_ANY_BREAK_NOT_AFTER_BUT_BEFORE.source}${_ANY_BREAK_ALWAYS.source}])`;
|
||||
const CJK_BREAKING_POINT = `(?<![${_ANY_BREAK_NOT_AFTER_BUT_BEFORE.source}${_CJK_BREAK_NOT_AFTER_BUT_BEFORE.source}])(?=[${_CJK_BREAK_NOT_AFTER_BUT_BEFORE.source}]*[${_CJK_CHAR.source}${_CJK_BREAK_ALWAYS.source}])`;
|
||||
return new RegExp(`(?:${ANY_BREAKING_POINT}|${CJK_BREAKING_POINT})`, "u");
|
||||
};
|
||||
|
||||
// Hello World → ["Hello ", "World"]
|
||||
// ↑ BREAK AFTER " "
|
||||
// Hello-World → ["Hello-", "World"]
|
||||
// ↑ BREAK AFTER "-"
|
||||
// HelloたWorld → ["Helloた", "World"]
|
||||
// ↑ BREAK AFTER "た"
|
||||
//「Hello」World → ["「Hello」", "World"]
|
||||
// ↑ BREAK AFTER "」"
|
||||
const getLookbehindBreakingPoints = () => {
|
||||
const ANY_BREAKING_POINT = `(?![${_ANY_BREAK_NOT_BEFORE_BUT_AFTER.source}])(?<=[${_ANY_BREAK_NOT_BEFORE_BUT_AFTER.source}${_ANY_BREAK_ALWAYS.source}${_ANY_BREAK_AFTER.source}])`;
|
||||
const CJK_BREAKING_POINT = `(?![${_ANY_BREAK_NOT_BEFORE_BUT_AFTER.source}${_CJK_BREAK_NOT_BEFORE_BUT_AFTER.source}${_ANY_BREAK_AFTER.source}])(?<=[${_CJK_CHAR.source}${_CJK_BREAK_ALWAYS.source}][${_CJK_BREAK_NOT_BEFORE_BUT_AFTER.source}]*)`;
|
||||
return new RegExp(`(?:${ANY_BREAKING_POINT}|${CJK_BREAKING_POINT})`, "u");
|
||||
};
|
||||
|
||||
/**
|
||||
* Break a line based on the whitespaces, CJK / emoji chars and language specific breaking points,
|
||||
* like hyphen for alphabetic and various full-width codepoints for CJK - especially Japanese, e.g.:
|
||||
*
|
||||
* "Hello 世界。🌎🗺" → ["Hello", " ", "世", "界。", "🌎", "🗺"]
|
||||
* "Hello-world" → ["Hello-", "world"]
|
||||
* "「Hello World」" → ["「Hello", " ", "World」"]
|
||||
*/
|
||||
const getBreakLineRegexAdvanced = () =>
|
||||
new RegExp(
|
||||
`${_EMOJI_CHAR.source}|${getLookaheadBreakingPoints().source}|${
|
||||
getLookbehindBreakingPoints().source
|
||||
}`,
|
||||
"u",
|
||||
);
|
||||
|
||||
let cachedBreakLineRegex: RegExp | undefined;
|
||||
|
||||
// Lazy-load for browsers that don't support "Lookbehind assertion"
|
||||
const getBreakLineRegex = () => {
|
||||
if (!cachedBreakLineRegex) {
|
||||
try {
|
||||
cachedBreakLineRegex = getBreakLineRegexAdvanced();
|
||||
} catch {
|
||||
cachedBreakLineRegex = BREAK_LINE_REGEX_SIMPLE;
|
||||
}
|
||||
}
|
||||
|
||||
return cachedBreakLineRegex;
|
||||
};
|
||||
|
||||
const CJK_REGEX = new RegExp(
|
||||
`[${_CJK_CHAR.source}${_CJK_BREAK_ALWAYS.source}${_CJK_SYMBOLS_AND_PUNCTUATION.source}]`,
|
||||
"u",
|
||||
);
|
||||
|
||||
const EMOJI_REGEX = new RegExp(`${_EMOJI_CHAR.source}`, "u");
|
||||
|
||||
export const containsCJK = (text: string) => {
|
||||
return CJK_REGEX.test(text);
|
||||
};
|
||||
|
||||
export const containsEmoji = (text: string) => {
|
||||
return EMOJI_REGEX.test(text);
|
||||
};
|
||||
|
||||
export const normalizeText = (text: string) => {
|
||||
return (
|
||||
normalizeEOL(text)
|
||||
@ -510,7 +344,7 @@ let canvas: HTMLCanvasElement | undefined;
|
||||
*
|
||||
* `Math.ceil` of the final width adds additional buffer which stabilizes slight wrapping incosistencies.
|
||||
*/
|
||||
const getLineWidth = (
|
||||
export const getLineWidth = (
|
||||
text: string,
|
||||
font: FontString,
|
||||
forceAdvanceWidth?: true,
|
||||
@ -575,164 +409,6 @@ export const getTextHeight = (
|
||||
return getLineHeightInPx(fontSize, lineHeight) * lineCount;
|
||||
};
|
||||
|
||||
export const parseTokens = (line: string) => {
|
||||
const breakLineRegex = getBreakLineRegex();
|
||||
|
||||
// normalizing to single-codepoint composed chars due to canonical equivalence of multi-codepoint versions for chars like č, で (~ so that we don't break a line in between c and ˇ)
|
||||
// filtering due to multi-codepoint chars like 👨👩👧👦, 👩🏽🦰
|
||||
return line.normalize("NFC").split(breakLineRegex).filter(Boolean);
|
||||
};
|
||||
|
||||
// handles multi-byte chars (é, 中) and purposefully does not handle multi-codepoint char (👨👩👧👦, 👩🏽🦰)
|
||||
const isSingleCharacter = (maybeSingleCharacter: string) => {
|
||||
return (
|
||||
maybeSingleCharacter.codePointAt(0) !== undefined &&
|
||||
maybeSingleCharacter.codePointAt(1) === undefined
|
||||
);
|
||||
};
|
||||
|
||||
const satisfiesWordInvariant = (word: string) => {
|
||||
if (import.meta.env.MODE === ENV.TEST || import.meta.env.DEV) {
|
||||
if (/\s/.test(word)) {
|
||||
throw new Error("Word should not contain any whitespaces!");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const wrapWord = (
|
||||
word: string,
|
||||
font: FontString,
|
||||
maxWidth: number,
|
||||
): Array<string> => {
|
||||
// multi-codepoint emojis are already broken apart and shouldn't be broken further
|
||||
if (EMOJI_REGEX.test(word)) {
|
||||
return [word];
|
||||
}
|
||||
|
||||
satisfiesWordInvariant(word);
|
||||
|
||||
const lines: Array<string> = [];
|
||||
const chars = Array.from(word);
|
||||
|
||||
let currentLine = "";
|
||||
let currentLineWidth = 0;
|
||||
|
||||
for (const char of chars) {
|
||||
const _charWidth = charWidth.calculate(char, font);
|
||||
const testLineWidth = currentLineWidth + _charWidth;
|
||||
|
||||
if (testLineWidth <= maxWidth) {
|
||||
currentLine = currentLine + char;
|
||||
currentLineWidth = testLineWidth;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (currentLine) {
|
||||
lines.push(currentLine);
|
||||
}
|
||||
|
||||
currentLine = char;
|
||||
currentLineWidth = _charWidth;
|
||||
}
|
||||
|
||||
if (currentLine) {
|
||||
lines.push(currentLine);
|
||||
}
|
||||
|
||||
return lines;
|
||||
};
|
||||
|
||||
const wrapLine = (
|
||||
line: string,
|
||||
font: FontString,
|
||||
maxWidth: number,
|
||||
): string[] => {
|
||||
const lines: Array<string> = [];
|
||||
const tokens = parseTokens(line);
|
||||
const tokenIterator = tokens[Symbol.iterator]();
|
||||
|
||||
let currentLine = "";
|
||||
let currentLineWidth = 0;
|
||||
|
||||
let iterator = tokenIterator.next();
|
||||
|
||||
while (!iterator.done) {
|
||||
const token = iterator.value;
|
||||
const testLine = currentLine + token;
|
||||
|
||||
// cache single codepoint whitespace, CJK or emoji width calc. as kerning should not apply here
|
||||
const testLineWidth = isSingleCharacter(token)
|
||||
? currentLineWidth + charWidth.calculate(token, font)
|
||||
: getLineWidth(testLine, font, true);
|
||||
|
||||
// build up the current line, skipping length check for possibly trailing whitespaces
|
||||
if (/\s/.test(token) || testLineWidth <= maxWidth) {
|
||||
currentLine = testLine;
|
||||
currentLineWidth = testLineWidth;
|
||||
iterator = tokenIterator.next();
|
||||
continue;
|
||||
}
|
||||
|
||||
// current line is empty => just the token (word) is longer than `maxWidth` and needs to be wrapped
|
||||
if (!currentLine) {
|
||||
const wrappedWord = wrapWord(token, font, maxWidth);
|
||||
const trailingLine = wrappedWord[wrappedWord.length - 1] ?? "";
|
||||
const precedingLines = wrappedWord.slice(0, -1);
|
||||
|
||||
lines.push(...precedingLines);
|
||||
|
||||
// trailing line of the wrapped word might still be joined with next token/s
|
||||
currentLine = trailingLine;
|
||||
currentLineWidth = getLineWidth(trailingLine, font, true);
|
||||
iterator = tokenIterator.next();
|
||||
} else {
|
||||
// push & reset, but don't iterate on the next token, as we didn't use it yet!
|
||||
lines.push(currentLine.trimEnd());
|
||||
|
||||
// purposefully not iterating and not setting `currentLine` to `token`, so that we could use a simple !currentLine check above
|
||||
currentLine = "";
|
||||
currentLineWidth = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// iterator done, push the trailing line if exists
|
||||
if (currentLine) {
|
||||
lines.push(currentLine.trimEnd());
|
||||
}
|
||||
|
||||
return lines;
|
||||
};
|
||||
|
||||
export const wrapText = (
|
||||
text: string,
|
||||
font: FontString,
|
||||
maxWidth: number,
|
||||
): string => {
|
||||
// if maxWidth is not finite or NaN which can happen in case of bugs in
|
||||
// computation, we need to make sure we don't continue as we'll end up
|
||||
// in an infinite loop
|
||||
if (!Number.isFinite(maxWidth) || maxWidth < 0) {
|
||||
return text;
|
||||
}
|
||||
|
||||
const lines: Array<string> = [];
|
||||
const originalLines = text.split("\n");
|
||||
|
||||
for (const originalLine of originalLines) {
|
||||
const currentLineWidth = getLineWidth(originalLine, font, true);
|
||||
|
||||
if (currentLineWidth <= maxWidth) {
|
||||
lines.push(originalLine);
|
||||
continue;
|
||||
}
|
||||
|
||||
const wrappedLine = wrapLine(originalLine, font, maxWidth);
|
||||
lines.push(...wrappedLine);
|
||||
}
|
||||
|
||||
return lines.join("\n");
|
||||
};
|
||||
|
||||
export const charWidth = (() => {
|
||||
const cachedCharWidth: { [key: FontString]: Array<number> } = {};
|
||||
|
||||
|
633
packages/excalidraw/element/textWrapping.test.ts
Normal file
633
packages/excalidraw/element/textWrapping.test.ts
Normal file
@ -0,0 +1,633 @@
|
||||
import { wrapText, parseTokens } from "./textWrapping";
|
||||
import type { FontString } from "./types";
|
||||
|
||||
describe("Test wrapText", () => {
|
||||
// font is irrelevant as jsdom does not support FontFace API
|
||||
// `measureText` width is mocked to return `text.length` by `jest-canvas-mock`
|
||||
// https://github.com/hustcc/jest-canvas-mock/blob/master/src/classes/TextMetrics.js
|
||||
const font = "10px Cascadia, Segoe UI Emoji" as FontString;
|
||||
|
||||
it("should wrap the text correctly when word length is exactly equal to max width", () => {
|
||||
const text = "Hello Excalidraw";
|
||||
// Length of "Excalidraw" is 100 and exacty equal to max width
|
||||
const res = wrapText(text, font, 100);
|
||||
expect(res).toEqual(`Hello\nExcalidraw`);
|
||||
});
|
||||
|
||||
it("should return the text as is if max width is invalid", () => {
|
||||
const text = "Hello Excalidraw";
|
||||
expect(wrapText(text, font, NaN)).toEqual(text);
|
||||
expect(wrapText(text, font, -1)).toEqual(text);
|
||||
expect(wrapText(text, font, Infinity)).toEqual(text);
|
||||
});
|
||||
|
||||
it("should show the text correctly when max width reached", () => {
|
||||
const text = "Hello😀";
|
||||
const maxWidth = 10;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("H\ne\nl\nl\no\n😀");
|
||||
});
|
||||
|
||||
it("should not wrap number when wrapping line", () => {
|
||||
const text = "don't wrap this number 99,100.99";
|
||||
const maxWidth = 300;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("don't wrap this number\n99,100.99");
|
||||
});
|
||||
|
||||
it("should trim all trailing whitespaces", () => {
|
||||
const text = "Hello ";
|
||||
const maxWidth = 50;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("Hello");
|
||||
});
|
||||
|
||||
it("should trim all but one trailing whitespaces", () => {
|
||||
const text = "Hello ";
|
||||
const maxWidth = 60;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("Hello ");
|
||||
});
|
||||
|
||||
it("should keep preceding whitespaces and trim all trailing whitespaces", () => {
|
||||
const text = " Hello World";
|
||||
const maxWidth = 90;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe(" Hello\nWorld");
|
||||
});
|
||||
|
||||
it("should keep some preceding whitespaces, trim trailing whitespaces, but kep those that fit in the trailing line", () => {
|
||||
const text = " Hello World ";
|
||||
const maxWidth = 90;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe(" Hello\nWorld ");
|
||||
});
|
||||
|
||||
it("should trim keep those whitespace that fit in the trailing line", () => {
|
||||
const text = "Hello Wo rl d ";
|
||||
const maxWidth = 100;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("Hello Wo\nrl d ");
|
||||
});
|
||||
|
||||
it("should support multiple (multi-codepoint) emojis", () => {
|
||||
const text = "😀🗺🔥👩🏽🦰👨👩👧👦🇨🇿";
|
||||
const maxWidth = 1;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("😀\n🗺\n🔥\n👩🏽🦰\n👨👩👧👦\n🇨🇿");
|
||||
});
|
||||
|
||||
it("should wrap the text correctly when text contains hyphen", () => {
|
||||
let text =
|
||||
"Wikipedia is hosted by Wikimedia- Foundation, a non-profit organization that also hosts a range-of other projects";
|
||||
const res = wrapText(text, font, 110);
|
||||
expect(res).toBe(
|
||||
`Wikipedia\nis hosted\nby\nWikimedia-\nFoundation,\na non-\nprofit\norganizatio\nn that also\nhosts a\nrange-of\nother\nprojects`,
|
||||
);
|
||||
|
||||
text = "Hello thereusing-now";
|
||||
expect(wrapText(text, font, 100)).toEqual("Hello\nthereusing\n-now");
|
||||
});
|
||||
|
||||
it("should support wrapping nested lists", () => {
|
||||
const text = `\tA) one tab\t\t- two tabs - 8 spaces`;
|
||||
|
||||
const maxWidth = 100;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe(`\tA) one\ntab\t\t- two\ntabs\n- 8 spaces`);
|
||||
|
||||
const maxWidth2 = 50;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe(`\tA)\none\ntab\n- two\ntabs\n- 8\nspace\ns`);
|
||||
});
|
||||
|
||||
describe("When text is CJK", () => {
|
||||
it("should break each CJK character when width is very small", () => {
|
||||
// "안녕하세요" (Hangul) + "こんにちは世界" (Hiragana, Kanji) + "コンニチハ" (Katakana) + "你好" (Han) = "Hello Hello World Hello Hi"
|
||||
const text = "안녕하세요こんにちは世界コンニチハ你好";
|
||||
const maxWidth = 10;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe(
|
||||
"안\n녕\n하\n세\n요\nこ\nん\nに\nち\nは\n世\n界\nコ\nン\nニ\nチ\nハ\n你\n好",
|
||||
);
|
||||
});
|
||||
|
||||
it("should break CJK text into longer segments when width is larger", () => {
|
||||
// "안녕하세요" (Hangul) + "こんにちは世界" (Hiragana, Kanji) + "コンニチハ" (Katakana) + "你好" (Han) = "Hello Hello World Hello Hi"
|
||||
const text = "안녕하세요こんにちは世界コンニチハ你好";
|
||||
const maxWidth = 30;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
|
||||
// measureText is mocked, so it's not precisely what would happen in prod
|
||||
expect(res).toBe("안녕하\n세요こ\nんにち\nは世界\nコンニ\nチハ你\n好");
|
||||
});
|
||||
|
||||
it("should handle a combination of CJK, latin, emojis and whitespaces", () => {
|
||||
const text = `a醫 醫 bb 你好 world-i-😀🗺🔥`;
|
||||
|
||||
const maxWidth = 150;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe(`a醫 醫 bb 你\n好 world-i-😀🗺\n🔥`);
|
||||
|
||||
const maxWidth2 = 50;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe(`a醫 醫\nbb 你\n好\nworld\n-i-😀\n🗺🔥`);
|
||||
|
||||
const maxWidth3 = 30;
|
||||
const res3 = wrapText(text, font, maxWidth3);
|
||||
expect(res3).toBe(`a醫\n醫\nbb\n你好\nwor\nld-\ni-\n😀\n🗺\n🔥`);
|
||||
});
|
||||
|
||||
it("should break before and after a regular CJK character", () => {
|
||||
const text = "HelloたWorld";
|
||||
const maxWidth1 = 50;
|
||||
const res1 = wrapText(text, font, maxWidth1);
|
||||
expect(res1).toBe("Hello\nた\nWorld");
|
||||
|
||||
const maxWidth2 = 60;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe("Helloた\nWorld");
|
||||
});
|
||||
|
||||
it("should break before and after certain CJK symbols", () => {
|
||||
const text = "こんにちは〃世界";
|
||||
const maxWidth1 = 50;
|
||||
const res1 = wrapText(text, font, maxWidth1);
|
||||
expect(res1).toBe("こんにちは\n〃世界");
|
||||
|
||||
const maxWidth2 = 60;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe("こんにちは〃\n世界");
|
||||
});
|
||||
|
||||
it("should break after, not before for certain CJK pairs", () => {
|
||||
const text = "Hello た。";
|
||||
const maxWidth = 70;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("Hello\nた。");
|
||||
});
|
||||
|
||||
it("should break before, not after for certain CJK pairs", () => {
|
||||
const text = "Hello「たWorld」";
|
||||
const maxWidth = 60;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("Hello\n「た\nWorld」");
|
||||
});
|
||||
|
||||
it("should break after, not before for certain CJK character pairs", () => {
|
||||
const text = "「Helloた」World";
|
||||
const maxWidth = 70;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("「Hello\nた」World");
|
||||
});
|
||||
|
||||
it("should break Chinese sentences", () => {
|
||||
const text = `中国你好!这是一个测试。
|
||||
我们来看看:人民币¥1234「很贵」
|
||||
(括号)、逗号,句号。空格 换行 全角符号…—`;
|
||||
|
||||
const maxWidth1 = 80;
|
||||
const res1 = wrapText(text, font, maxWidth1);
|
||||
expect(res1).toBe(`中国你好!这是一\n个测试。
|
||||
我们来看看:人民\n币¥1234「很\n贵」
|
||||
(括号)、逗号,\n句号。空格 换行\n全角符号…—`);
|
||||
|
||||
const maxWidth2 = 50;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe(`中国你好!\n这是一个测\n试。
|
||||
我们来看\n看:人民币\n¥1234\n「很贵」
|
||||
(括号)、\n逗号,句\n号。空格\n换行 全角\n符号…—`);
|
||||
});
|
||||
|
||||
it("should break Japanese sentences", () => {
|
||||
const text = `日本こんにちは!これはテストです。
|
||||
見てみましょう:円¥1234「高い」
|
||||
(括弧)、読点、句点。
|
||||
空白 改行 全角記号…ー`;
|
||||
|
||||
const maxWidth1 = 80;
|
||||
const res1 = wrapText(text, font, maxWidth1);
|
||||
expect(res1).toBe(`日本こんにちは!\nこれはテストで\nす。
|
||||
見てみましょ\nう:円¥1234\n「高い」
|
||||
(括弧)、読\n点、句点。
|
||||
空白 改行\n全角記号…ー`);
|
||||
|
||||
const maxWidth2 = 50;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe(`日本こんに\nちは!これ\nはテストで\nす。
|
||||
見てみ\nましょう:\n円\n¥1234\n「高い」
|
||||
(括\n弧)、読\n点、句点。
|
||||
空白\n改行 全角\n記号…ー`);
|
||||
});
|
||||
|
||||
it("should break Korean sentences", () => {
|
||||
const text = `한국 안녕하세요! 이것은 테스트입니다.
|
||||
우리 보자: 원화₩1234「비싸다」
|
||||
(괄호), 쉼표, 마침표.
|
||||
공백 줄바꿈 전각기호…—`;
|
||||
|
||||
const maxWidth1 = 80;
|
||||
const res1 = wrapText(text, font, maxWidth1);
|
||||
expect(res1).toBe(`한국 안녕하세\n요! 이것은 테\n스트입니다.
|
||||
우리 보자: 원\n화₩1234「비\n싸다」
|
||||
(괄호), 쉼\n표, 마침표.
|
||||
공백 줄바꿈 전\n각기호…—`);
|
||||
|
||||
const maxWidth2 = 60;
|
||||
const res2 = wrapText(text, font, maxWidth2);
|
||||
expect(res2).toBe(`한국 안녕하\n세요! 이것\n은 테스트입\n니다.
|
||||
우리 보자:\n원화\n₩1234\n「비싸다」
|
||||
(괄호),\n쉼표, 마침\n표.
|
||||
공백 줄바꿈\n전각기호…—`);
|
||||
});
|
||||
});
|
||||
|
||||
describe("When text contains leading whitespaces", () => {
|
||||
const text = " \t Hello world";
|
||||
|
||||
it("should preserve leading whitespaces", () => {
|
||||
const maxWidth = 120;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe(" \t Hello\nworld");
|
||||
});
|
||||
|
||||
it("should break and collapse leading whitespaces when line breaks", () => {
|
||||
const maxWidth = 60;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("\nHello\nworld");
|
||||
});
|
||||
|
||||
it("should break and collapse leading whitespaces whe words break", () => {
|
||||
const maxWidth = 30;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("\nHel\nlo\nwor\nld");
|
||||
});
|
||||
});
|
||||
|
||||
describe("When text contains trailing whitespaces", () => {
|
||||
it("shouldn't add new lines for trailing spaces", () => {
|
||||
const text = "Hello whats up ";
|
||||
const maxWidth = 190;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe(text);
|
||||
});
|
||||
|
||||
it("should ignore trailing whitespaces when line breaks", () => {
|
||||
const text = "Hippopotomonstrosesquippedaliophobia ??????";
|
||||
const maxWidth = 400;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("Hippopotomonstrosesquippedaliophobia\n??????");
|
||||
});
|
||||
|
||||
it("should not ignore trailing whitespaces when word breaks", () => {
|
||||
const text = "Hippopotomonstrosesquippedaliophobia ??????";
|
||||
const maxWidth = 300;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("Hippopotomonstrosesquippedalio\nphobia ??????");
|
||||
});
|
||||
|
||||
it("should ignore trailing whitespaces when word breaks and line breaks", () => {
|
||||
const text = "Hippopotomonstrosesquippedaliophobia ??????";
|
||||
const maxWidth = 180;
|
||||
const res = wrapText(text, font, maxWidth);
|
||||
expect(res).toBe("Hippopotomonstrose\nsquippedaliophobia\n??????");
|
||||
});
|
||||
});
|
||||
|
||||
describe("When text doesn't contain new lines", () => {
|
||||
const text = "Hello whats up";
|
||||
|
||||
[
|
||||
{
|
||||
desc: "break all words when width of each word is less than container width",
|
||||
width: 70,
|
||||
res: `Hello\nwhats\nup`,
|
||||
},
|
||||
{
|
||||
desc: "break all characters when width of each character is less than container width",
|
||||
width: 15,
|
||||
res: `H\ne\nl\nl\no\nw\nh\na\nt\ns\nu\np`,
|
||||
},
|
||||
{
|
||||
desc: "break words as per the width",
|
||||
|
||||
width: 130,
|
||||
res: `Hello whats\nup`,
|
||||
},
|
||||
{
|
||||
desc: "fit the container",
|
||||
|
||||
width: 240,
|
||||
res: "Hello whats up",
|
||||
},
|
||||
{
|
||||
desc: "push the word if its equal to max width",
|
||||
width: 50,
|
||||
res: `Hello\nwhats\nup`,
|
||||
},
|
||||
].forEach((data) => {
|
||||
it(`should ${data.desc}`, () => {
|
||||
const res = wrapText(text, font, data.width);
|
||||
expect(res).toEqual(data.res);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("When text contain new lines", () => {
|
||||
const text = `Hello\n whats up`;
|
||||
[
|
||||
{
|
||||
desc: "break all words when width of each word is less than container width",
|
||||
width: 70,
|
||||
res: `Hello\n whats\nup`,
|
||||
},
|
||||
{
|
||||
desc: "break all characters when width of each character is less than container width",
|
||||
width: 15,
|
||||
res: `H\ne\nl\nl\no\n\nw\nh\na\nt\ns\nu\np`,
|
||||
},
|
||||
{
|
||||
desc: "break words as per the width",
|
||||
width: 140,
|
||||
res: `Hello\n whats up`,
|
||||
},
|
||||
].forEach((data) => {
|
||||
it(`should respect new lines and ${data.desc}`, () => {
|
||||
const res = wrapText(text, font, data.width);
|
||||
expect(res).toEqual(data.res);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("When text is long", () => {
|
||||
const text = `hellolongtextthisiswhatsupwithyouIamtypingggggandtypinggg break it now`;
|
||||
[
|
||||
{
|
||||
desc: "fit characters of long string as per container width",
|
||||
width: 160,
|
||||
res: `hellolongtextthi\nsiswhatsupwithyo\nuIamtypingggggan\ndtypinggg break\nit now`,
|
||||
},
|
||||
{
|
||||
desc: "fit characters of long string as per container width and break words as per the width",
|
||||
|
||||
width: 120,
|
||||
res: `hellolongtex\ntthisiswhats\nupwithyouIam\ntypingggggan\ndtypinggg\nbreak it now`,
|
||||
},
|
||||
{
|
||||
desc: "fit the long text when container width is greater than text length and move the rest to next line",
|
||||
|
||||
width: 590,
|
||||
res: `hellolongtextthisiswhatsupwithyouIamtypingggggandtypinggg\nbreak it now`,
|
||||
},
|
||||
].forEach((data) => {
|
||||
it(`should ${data.desc}`, () => {
|
||||
const res = wrapText(text, font, data.width);
|
||||
expect(res).toEqual(data.res);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Test parseTokens", () => {
|
||||
it("should tokenize latin", () => {
|
||||
let text = "Excalidraw is a virtual collaborative whiteboard";
|
||||
|
||||
expect(parseTokens(text)).toEqual([
|
||||
"Excalidraw",
|
||||
" ",
|
||||
"is",
|
||||
" ",
|
||||
"a",
|
||||
" ",
|
||||
"virtual",
|
||||
" ",
|
||||
"collaborative",
|
||||
" ",
|
||||
"whiteboard",
|
||||
]);
|
||||
|
||||
text =
|
||||
"Wikipedia is hosted by Wikimedia- Foundation, a non-profit organization that also hosts a range-of other projects";
|
||||
expect(parseTokens(text)).toEqual([
|
||||
"Wikipedia",
|
||||
" ",
|
||||
"is",
|
||||
" ",
|
||||
"hosted",
|
||||
" ",
|
||||
"by",
|
||||
" ",
|
||||
"Wikimedia-",
|
||||
" ",
|
||||
"Foundation,",
|
||||
" ",
|
||||
"a",
|
||||
" ",
|
||||
"non-",
|
||||
"profit",
|
||||
" ",
|
||||
"organization",
|
||||
" ",
|
||||
"that",
|
||||
" ",
|
||||
"also",
|
||||
" ",
|
||||
"hosts",
|
||||
" ",
|
||||
"a",
|
||||
" ",
|
||||
"range-",
|
||||
"of",
|
||||
" ",
|
||||
"other",
|
||||
" ",
|
||||
"projects",
|
||||
]);
|
||||
});
|
||||
|
||||
it("should not tokenize number", () => {
|
||||
const text = "99,100.99";
|
||||
const tokens = parseTokens(text);
|
||||
expect(tokens).toEqual(["99,100.99"]);
|
||||
});
|
||||
|
||||
it("should tokenize joined emojis", () => {
|
||||
const text = `😬🌍🗺🔥☂️👩🏽🦰👨👩👧👦👩🏾🔬🏳️🌈🧔♀️🧑🤝🧑🙅🏽♂️✅0️⃣🇨🇿🦅`;
|
||||
const tokens = parseTokens(text);
|
||||
|
||||
expect(tokens).toEqual([
|
||||
"😬",
|
||||
"🌍",
|
||||
"🗺",
|
||||
"🔥",
|
||||
"☂️",
|
||||
"👩🏽🦰",
|
||||
"👨👩👧👦",
|
||||
"👩🏾🔬",
|
||||
"🏳️🌈",
|
||||
"🧔♀️",
|
||||
"🧑🤝🧑",
|
||||
"🙅🏽♂️",
|
||||
"✅",
|
||||
"0️⃣",
|
||||
"🇨🇿",
|
||||
"🦅",
|
||||
]);
|
||||
});
|
||||
|
||||
it("should tokenize emojis mixed with mixed text", () => {
|
||||
const text = `😬a🌍b🗺c🔥d☂️《👩🏽🦰》👨👩👧👦德👩🏾🔬こ🏳️🌈안🧔♀️g🧑🤝🧑h🙅🏽♂️e✅f0️⃣g🇨🇿10🦅#hash`;
|
||||
const tokens = parseTokens(text);
|
||||
|
||||
expect(tokens).toEqual([
|
||||
"😬",
|
||||
"a",
|
||||
"🌍",
|
||||
"b",
|
||||
"🗺",
|
||||
"c",
|
||||
"🔥",
|
||||
"d",
|
||||
"☂️",
|
||||
"《",
|
||||
"👩🏽🦰",
|
||||
"》",
|
||||
"👨👩👧👦",
|
||||
"德",
|
||||
"👩🏾🔬",
|
||||
"こ",
|
||||
"🏳️🌈",
|
||||
"안",
|
||||
"🧔♀️",
|
||||
"g",
|
||||
"🧑🤝🧑",
|
||||
"h",
|
||||
"🙅🏽♂️",
|
||||
"e",
|
||||
"✅",
|
||||
"f0️⃣g", // bummer, but ok, as we traded kecaps not breaking (less common) for hash and numbers not breaking (more common)
|
||||
"🇨🇿",
|
||||
"10", // nice! do not break the number, as it's by default matched by \p{Emoji}
|
||||
"🦅",
|
||||
"#hash", // nice! do not break the hash, as it's by default matched by \p{Emoji}
|
||||
]);
|
||||
});
|
||||
|
||||
it("should tokenize decomposed chars into their composed variants", () => {
|
||||
// each input character is in a decomposed form
|
||||
const text = "čでäぴέ다й한";
|
||||
expect(text.normalize("NFC").length).toEqual(8);
|
||||
expect(text).toEqual(text.normalize("NFD"));
|
||||
|
||||
const tokens = parseTokens(text);
|
||||
expect(tokens.length).toEqual(8);
|
||||
expect(tokens).toEqual(["č", "で", "ä", "ぴ", "έ", "다", "й", "한"]);
|
||||
});
|
||||
|
||||
it("should tokenize artificial CJK", () => {
|
||||
const text = `《道德經》醫-醫こんにちは世界!안녕하세요세계;요』,다.다...원/달(((다)))[[1]]〚({((한))>)〛(「た」)た…[Hello] \t World?ニューヨーク・¥3700.55す。090-1234-5678¥1,000〜$5,000「素晴らしい!」〔重要〕#1:Taro君30%は、(たなばた)〰¥110±¥570で20℃〜9:30〜10:00【一番】`;
|
||||
// [
|
||||
// '《道', '德', '經》', '醫-',
|
||||
// '醫', 'こ', 'ん', 'に',
|
||||
// 'ち', 'は', '世', '界!',
|
||||
// '안', '녕', '하', '세',
|
||||
// '요', '세', '계;', '요』,',
|
||||
// '다.', '다...', '원/', '달',
|
||||
// '(((다)))', '[[1]]', '〚({((한))>)〛', '(「た」)',
|
||||
// 'た…', '[Hello]', ' ', '\t',
|
||||
// ' ', 'World?', 'ニ', 'ュ',
|
||||
// 'ー', 'ヨ', 'ー', 'ク・',
|
||||
// '¥3700.55', 'す。', '090-', '1234-',
|
||||
// '5678', '¥1,000〜', '$5,000', '「素',
|
||||
// '晴', 'ら', 'し', 'い!」',
|
||||
// '〔重', '要〕', '#', '1:',
|
||||
// 'Taro', '君', '30%', 'は、',
|
||||
// '(た', 'な', 'ば', 'た)',
|
||||
// '〰', '¥110±', '¥570', 'で',
|
||||
// '20℃〜', '9:30〜', '10:00', '【一',
|
||||
// '番】'
|
||||
// ]
|
||||
const tokens = parseTokens(text);
|
||||
|
||||
// Latin
|
||||
expect(tokens).toContain("[[1]]");
|
||||
expect(tokens).toContain("[Hello]");
|
||||
expect(tokens).toContain("World?");
|
||||
expect(tokens).toContain("Taro");
|
||||
|
||||
// Chinese
|
||||
expect(tokens).toContain("《道");
|
||||
expect(tokens).toContain("德");
|
||||
expect(tokens).toContain("經》");
|
||||
expect(tokens).toContain("醫-");
|
||||
expect(tokens).toContain("醫");
|
||||
|
||||
// Japanese
|
||||
expect(tokens).toContain("こ");
|
||||
expect(tokens).toContain("ん");
|
||||
expect(tokens).toContain("に");
|
||||
expect(tokens).toContain("ち");
|
||||
expect(tokens).toContain("は");
|
||||
expect(tokens).toContain("世");
|
||||
expect(tokens).toContain("ク・");
|
||||
expect(tokens).toContain("界!");
|
||||
expect(tokens).toContain("た…");
|
||||
expect(tokens).toContain("す。");
|
||||
expect(tokens).toContain("ュ");
|
||||
expect(tokens).toContain("「素");
|
||||
expect(tokens).toContain("晴");
|
||||
expect(tokens).toContain("ら");
|
||||
expect(tokens).toContain("し");
|
||||
expect(tokens).toContain("い!」");
|
||||
expect(tokens).toContain("君");
|
||||
expect(tokens).toContain("は、");
|
||||
expect(tokens).toContain("(た");
|
||||
expect(tokens).toContain("な");
|
||||
expect(tokens).toContain("ば");
|
||||
expect(tokens).toContain("た)");
|
||||
expect(tokens).toContain("で");
|
||||
expect(tokens).toContain("【一");
|
||||
expect(tokens).toContain("番】");
|
||||
|
||||
// Check for Korean
|
||||
expect(tokens).toContain("안");
|
||||
expect(tokens).toContain("녕");
|
||||
expect(tokens).toContain("하");
|
||||
expect(tokens).toContain("세");
|
||||
expect(tokens).toContain("요");
|
||||
expect(tokens).toContain("세");
|
||||
expect(tokens).toContain("계;");
|
||||
expect(tokens).toContain("요』,");
|
||||
expect(tokens).toContain("다.");
|
||||
expect(tokens).toContain("다...");
|
||||
expect(tokens).toContain("원/");
|
||||
expect(tokens).toContain("달");
|
||||
expect(tokens).toContain("(((다)))");
|
||||
expect(tokens).toContain("〚({((한))>)〛");
|
||||
expect(tokens).toContain("(「た」)");
|
||||
|
||||
// Numbers and units
|
||||
expect(tokens).toContain("¥3700.55");
|
||||
expect(tokens).toContain("090-");
|
||||
expect(tokens).toContain("1234-");
|
||||
expect(tokens).toContain("5678");
|
||||
expect(tokens).toContain("¥1,000〜");
|
||||
expect(tokens).toContain("$5,000");
|
||||
expect(tokens).toContain("1:");
|
||||
expect(tokens).toContain("30%");
|
||||
expect(tokens).toContain("¥110±");
|
||||
expect(tokens).toContain("20℃〜");
|
||||
expect(tokens).toContain("9:30〜");
|
||||
expect(tokens).toContain("10:00");
|
||||
|
||||
// Punctuation and symbols
|
||||
expect(tokens).toContain(" ");
|
||||
expect(tokens).toContain("\t");
|
||||
expect(tokens).toContain(" ");
|
||||
expect(tokens).toContain("ニ");
|
||||
expect(tokens).toContain("ー");
|
||||
expect(tokens).toContain("ヨ");
|
||||
expect(tokens).toContain("〰");
|
||||
expect(tokens).toContain("#");
|
||||
});
|
||||
});
|
||||
});
|
568
packages/excalidraw/element/textWrapping.ts
Normal file
568
packages/excalidraw/element/textWrapping.ts
Normal file
@ -0,0 +1,568 @@
|
||||
import { ENV } from "../constants";
|
||||
import { charWidth, getLineWidth } from "./textElement";
|
||||
import type { FontString } from "./types";
|
||||
|
||||
let cachedCjkRegex: RegExp | undefined;
|
||||
let cachedLineBreakRegex: RegExp | undefined;
|
||||
let cachedEmojiRegex: RegExp | undefined;
|
||||
|
||||
/**
|
||||
* Test if a given text contains any CJK characters (including symbols, punctuation, etc,).
|
||||
*/
|
||||
export const containsCJK = (text: string) => {
|
||||
if (!cachedCjkRegex) {
|
||||
cachedCjkRegex = Regex.class(...Object.values(CJK));
|
||||
}
|
||||
|
||||
return cachedCjkRegex.test(text);
|
||||
};
|
||||
|
||||
const getLineBreakRegex = () => {
|
||||
if (!cachedLineBreakRegex) {
|
||||
try {
|
||||
cachedLineBreakRegex = getLineBreakRegexAdvanced();
|
||||
} catch {
|
||||
cachedLineBreakRegex = getLineBreakRegexSimple();
|
||||
}
|
||||
}
|
||||
|
||||
return cachedLineBreakRegex;
|
||||
};
|
||||
|
||||
const getEmojiRegex = () => {
|
||||
if (!cachedEmojiRegex) {
|
||||
cachedEmojiRegex = getEmojiRegexUnicode();
|
||||
}
|
||||
|
||||
return cachedEmojiRegex;
|
||||
};
|
||||
|
||||
/**
|
||||
* Common symbols used across different languages.
|
||||
*/
|
||||
const COMMON = {
|
||||
/**
|
||||
* Natural breaking points for any grammars.
|
||||
*
|
||||
* Hello world
|
||||
* ↑ BREAK ALWAYS " " → ["Hello", " ", "world"]
|
||||
* Hello-world
|
||||
* ↑ BREAK AFTER "-" → ["Hello-", "world"]
|
||||
*/
|
||||
WHITESPACE: /\s/u,
|
||||
HYPHEN: /-/u,
|
||||
/**
|
||||
* Generally do not break, unless closed symbol is followed by an opening symbol.
|
||||
*
|
||||
* Also, western punctation is often used in modern Korean and expects to be treated
|
||||
* similarly to the CJK opening and closing symbols.
|
||||
*
|
||||
* Hello(한글)→ ["Hello", "(한", "글)"]
|
||||
* ↑ BREAK BEFORE "("
|
||||
* ↑ BREAK AFTER ")"
|
||||
*/
|
||||
OPENING: /<\(\[\{/u,
|
||||
CLOSING: />\)\]\}.,:;!\?…\//u,
|
||||
};
|
||||
|
||||
/**
|
||||
* Characters and symbols used in Chinese, Japanese and Korean.
|
||||
*/
|
||||
const CJK = {
|
||||
/**
|
||||
* Every CJK breaks before and after, unless it's paired with an opening or closing symbol.
|
||||
*
|
||||
* Does not include every possible char used in CJK texts, such as currency, parentheses or punctuation.
|
||||
*/
|
||||
CHAR: /\p{Script=Han}\p{Script=Hiragana}\p{Script=Katakana}\p{Script=Hangul}`'^〃〰〆#&*+-ー/\=|¦〒¬ ̄/u,
|
||||
/**
|
||||
* Opening and closing CJK punctuation breaks before and after all such characters (in case of many),
|
||||
* and creates pairs with neighboring characters.
|
||||
*
|
||||
* Hello た。→ ["Hello", "た。"]
|
||||
* ↑ DON'T BREAK "た。"
|
||||
* * Hello「た」 World → ["Hello", "「た」", "World"]
|
||||
* ↑ DON'T BREAK "「た"
|
||||
* ↑ DON'T BREAK "た"
|
||||
* ↑ BREAK BEFORE "「"
|
||||
* ↑ BREAK AFTER "」"
|
||||
*/
|
||||
// eslint-disable-next-line prettier/prettier
|
||||
OPENING:/([{〈《⦅「「『【〖〔〘〚<〝/u,
|
||||
CLOSING: /)]}〉》⦆」」』】〗〕〙〛>。.,、〟‥?!:;・〜〞/u,
|
||||
/**
|
||||
* Currency symbols break before, not after
|
||||
*
|
||||
* Price¥100 → ["Price", "¥100"]
|
||||
* ↑ BREAK BEFORE "¥"
|
||||
*/
|
||||
CURRENCY: /¥₩£¢$/u,
|
||||
};
|
||||
|
||||
const EMOJI = {
|
||||
FLAG: /\p{RI}\p{RI}/u,
|
||||
JOINER:
|
||||
/(?:\p{Emoji_Modifier}|\uFE0F\u20E3?|[\u{E0020}-\u{E007E}]+\u{E007F})?/u,
|
||||
ZWJ: /\u200D/u,
|
||||
ANY: /[\p{Emoji}]/u,
|
||||
MOST: /[\p{Extended_Pictographic}\p{Emoji_Presentation}]/u,
|
||||
};
|
||||
|
||||
/**
|
||||
* Simple fallback for browsers (mainly Safari < 16.4) that don't support "Lookbehind assertion".
|
||||
*
|
||||
* Browser support as of 10/2024:
|
||||
* - 91% Lookbehind assertion https://caniuse.com/mdn-javascript_regular_expressions_lookbehind_assertion
|
||||
* - 94% Unicode character class escape https://caniuse.com/mdn-javascript_regular_expressions_unicode_character_class_escape
|
||||
*
|
||||
* Does not include advanced CJK breaking rules, but covers most of the core cases, especially for latin.
|
||||
*/
|
||||
const getLineBreakRegexSimple = () =>
|
||||
Regex.or(
|
||||
getEmojiRegex(),
|
||||
Break.On(COMMON.HYPHEN, COMMON.WHITESPACE, CJK.CHAR),
|
||||
);
|
||||
|
||||
/**
|
||||
* Specifies the line breaking rules based for alphabetic-based languages,
|
||||
* Chinese, Japanese, Korean and Emojis.
|
||||
*
|
||||
* "Hello-world" → ["Hello-", "world"]
|
||||
* "Hello 「世界。」🌎🗺" → ["Hello", " ", "「世", "界。」", "🌎", "🗺"]
|
||||
*/
|
||||
const getLineBreakRegexAdvanced = () =>
|
||||
Regex.or(
|
||||
// Unicode-defined regex for (multi-codepoint) Emojis
|
||||
getEmojiRegex(),
|
||||
// Rules for whitespace and hyphen
|
||||
Break.Before(COMMON.WHITESPACE).Build(),
|
||||
Break.After(COMMON.WHITESPACE, COMMON.HYPHEN).Build(),
|
||||
// Rules for CJK (chars, symbols, currency)
|
||||
Break.Before(CJK.CHAR, CJK.CURRENCY)
|
||||
.NotPrecededBy(COMMON.OPENING, CJK.OPENING)
|
||||
.Build(),
|
||||
Break.After(CJK.CHAR)
|
||||
.NotFollowedBy(COMMON.HYPHEN, COMMON.CLOSING, CJK.CLOSING)
|
||||
.Build(),
|
||||
// Rules for opening and closing punctuation
|
||||
Break.BeforeMany(CJK.OPENING).NotPrecededBy(COMMON.OPENING).Build(),
|
||||
Break.AfterMany(CJK.CLOSING).NotFollowedBy(COMMON.CLOSING).Build(),
|
||||
Break.AfterMany(COMMON.CLOSING).FollowedBy(COMMON.OPENING).Build(),
|
||||
);
|
||||
|
||||
/**
|
||||
* Matches various emoji types.
|
||||
*
|
||||
* 1. basic emojis (😀, 🌍)
|
||||
* 2. flags (🇨🇿)
|
||||
* 3. multi-codepoint emojis:
|
||||
* - skin tones (👍🏽)
|
||||
* - variation selectors (☂️)
|
||||
* - keycaps (1️⃣)
|
||||
* - tag sequences (🏴)
|
||||
* - emoji sequences (👨👩👧👦, 👩🚀, 🏳️🌈)
|
||||
*
|
||||
* Unicode points:
|
||||
* - \uFE0F: presentation selector
|
||||
* - \u20E3: enclosing keycap
|
||||
* - \u200D: zero width joiner
|
||||
* - \u{E0020}-\u{E007E}: tags
|
||||
* - \u{E007F}: cancel tag
|
||||
*
|
||||
* @see https://unicode.org/reports/tr51/#EBNF_and_Regex, with changes:
|
||||
* - replaced \p{Emoji} with [\p{Extended_Pictographic}\p{Emoji_Presentation}], see more in `should tokenize emojis mixed with mixed text` test
|
||||
* - replaced \p{Emod} with \p{Emoji_Modifier} as some engines do not understand the abbreviation (i.e. https://devina.io/redos-checker)
|
||||
*/
|
||||
const getEmojiRegexUnicode = () =>
|
||||
Regex.group(
|
||||
Regex.or(
|
||||
EMOJI.FLAG,
|
||||
Regex.and(
|
||||
EMOJI.MOST,
|
||||
EMOJI.JOINER,
|
||||
Regex.build(
|
||||
`(?:${EMOJI.ZWJ.source}(?:${EMOJI.FLAG.source}|${EMOJI.ANY.source}${EMOJI.JOINER.source}))*`,
|
||||
),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
/**
|
||||
* Regex utilities for unicode character classes.
|
||||
*/
|
||||
const Regex = {
|
||||
/**
|
||||
* Builds a regex from a string.
|
||||
*/
|
||||
build: (regex: string): RegExp => new RegExp(regex, "u"),
|
||||
/**
|
||||
* Joins regexes into a single string.
|
||||
*/
|
||||
join: (...regexes: RegExp[]): string => regexes.map((x) => x.source).join(""),
|
||||
/**
|
||||
* Joins regexes into a single regex as with "and" operator.
|
||||
*/
|
||||
and: (...regexes: RegExp[]): RegExp => Regex.build(Regex.join(...regexes)),
|
||||
/**
|
||||
* Joins regexes into a single regex with "or" operator.
|
||||
*/
|
||||
or: (...regexes: RegExp[]): RegExp =>
|
||||
Regex.build(regexes.map((x) => x.source).join("|")),
|
||||
/**
|
||||
* Puts regexes into a matching group.
|
||||
*/
|
||||
group: (...regexes: RegExp[]): RegExp =>
|
||||
Regex.build(`(${Regex.join(...regexes)})`),
|
||||
/**
|
||||
* Puts regexes into a character class.
|
||||
*/
|
||||
class: (...regexes: RegExp[]): RegExp =>
|
||||
Regex.build(`[${Regex.join(...regexes)}]`),
|
||||
};
|
||||
|
||||
/**
|
||||
* Human-readable lookahead and lookbehind utilities for defining line break
|
||||
* opportunities between pairs of character classes.
|
||||
*/
|
||||
const Break = {
|
||||
/**
|
||||
* Break on the given class of characters.
|
||||
*/
|
||||
On: (...regexes: RegExp[]) => {
|
||||
const joined = Regex.join(...regexes);
|
||||
return Regex.build(`([${joined}])`);
|
||||
},
|
||||
/**
|
||||
* Break before the given class of characters.
|
||||
*/
|
||||
Before: (...regexes: RegExp[]) => {
|
||||
const joined = Regex.join(...regexes);
|
||||
const builder = () => Regex.build(`(?=[${joined}])`);
|
||||
return Break.Chain(builder) as Omit<
|
||||
ReturnType<typeof Break.Chain>,
|
||||
"FollowedBy"
|
||||
>;
|
||||
},
|
||||
/**
|
||||
* Break after the given class of characters.
|
||||
*/
|
||||
After: (...regexes: RegExp[]) => {
|
||||
const joined = Regex.join(...regexes);
|
||||
const builder = () => Regex.build(`(?<=[${joined}])`);
|
||||
return Break.Chain(builder) as Omit<
|
||||
ReturnType<typeof Break.Chain>,
|
||||
"PreceededBy"
|
||||
>;
|
||||
},
|
||||
/**
|
||||
* Break before one or multiple characters of the same class.
|
||||
*/
|
||||
BeforeMany: (...regexes: RegExp[]) => {
|
||||
const joined = Regex.join(...regexes);
|
||||
const builder = () => Regex.build(`(?<![${joined}])(?=[${joined}])`);
|
||||
return Break.Chain(builder) as Omit<
|
||||
ReturnType<typeof Break.Chain>,
|
||||
"FollowedBy"
|
||||
>;
|
||||
},
|
||||
/**
|
||||
* Break after one or multiple character from the same class.
|
||||
*/
|
||||
AfterMany: (...regexes: RegExp[]) => {
|
||||
const joined = Regex.join(...regexes);
|
||||
const builder = () => Regex.build(`(?<=[${joined}])(?![${joined}])`);
|
||||
return Break.Chain(builder) as Omit<
|
||||
ReturnType<typeof Break.Chain>,
|
||||
"PreceededBy"
|
||||
>;
|
||||
},
|
||||
/**
|
||||
* Do not break before the given class of characters.
|
||||
*/
|
||||
NotBefore: (...regexes: RegExp[]) => {
|
||||
const joined = Regex.join(...regexes);
|
||||
const builder = () => Regex.build(`(?![${joined}])`);
|
||||
return Break.Chain(builder) as Omit<
|
||||
ReturnType<typeof Break.Chain>,
|
||||
"NotFollowedBy"
|
||||
>;
|
||||
},
|
||||
/**
|
||||
* Do not break after the given class of characters.
|
||||
*/
|
||||
NotAfter: (...regexes: RegExp[]) => {
|
||||
const joined = Regex.join(...regexes);
|
||||
const builder = () => Regex.build(`(?<![${joined}])`);
|
||||
return Break.Chain(builder) as Omit<
|
||||
ReturnType<typeof Break.Chain>,
|
||||
"NotPrecededBy"
|
||||
>;
|
||||
},
|
||||
Chain: (rootBuilder: () => RegExp) => ({
|
||||
/**
|
||||
* Build the root regex.
|
||||
*/
|
||||
Build: rootBuilder,
|
||||
/**
|
||||
* Specify additional class of characters that should precede the root regex.
|
||||
*/
|
||||
PreceededBy: (...regexes: RegExp[]) => {
|
||||
const root = rootBuilder();
|
||||
const preceeded = Break.After(...regexes).Build();
|
||||
const builder = () => Regex.and(preceeded, root);
|
||||
return Break.Chain(builder) as Omit<
|
||||
ReturnType<typeof Break.Chain>,
|
||||
"PreceededBy"
|
||||
>;
|
||||
},
|
||||
/**
|
||||
* Specify additional class of characters that should follow the root regex.
|
||||
*/
|
||||
FollowedBy: (...regexes: RegExp[]) => {
|
||||
const root = rootBuilder();
|
||||
const followed = Break.Before(...regexes).Build();
|
||||
const builder = () => Regex.and(root, followed);
|
||||
return Break.Chain(builder) as Omit<
|
||||
ReturnType<typeof Break.Chain>,
|
||||
"FollowedBy"
|
||||
>;
|
||||
},
|
||||
/**
|
||||
* Specify additional class of characters that should not precede the root regex.
|
||||
*/
|
||||
NotPrecededBy: (...regexes: RegExp[]) => {
|
||||
const root = rootBuilder();
|
||||
const notPreceeded = Break.NotAfter(...regexes).Build();
|
||||
const builder = () => Regex.and(notPreceeded, root);
|
||||
return Break.Chain(builder) as Omit<
|
||||
ReturnType<typeof Break.Chain>,
|
||||
"NotPrecededBy"
|
||||
>;
|
||||
},
|
||||
/**
|
||||
* Specify additional class of characters that should not follow the root regex.
|
||||
*/
|
||||
NotFollowedBy: (...regexes: RegExp[]) => {
|
||||
const root = rootBuilder();
|
||||
const notFollowed = Break.NotBefore(...regexes).Build();
|
||||
const builder = () => Regex.and(root, notFollowed);
|
||||
return Break.Chain(builder) as Omit<
|
||||
ReturnType<typeof Break.Chain>,
|
||||
"NotFollowedBy"
|
||||
>;
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
/**
|
||||
* Breaks the line into the tokens based on the found line break opporutnities.
|
||||
*/
|
||||
export const parseTokens = (line: string) => {
|
||||
const breakLineRegex = getLineBreakRegex();
|
||||
|
||||
// normalizing to single-codepoint composed chars due to canonical equivalence
|
||||
// of multi-codepoint versions for chars like č, で (~ so that we don't break a line in between c and ˇ)
|
||||
// filtering due to multi-codepoint chars like 👨👩👧👦, 👩🏽🦰
|
||||
return line.normalize("NFC").split(breakLineRegex).filter(Boolean);
|
||||
};
|
||||
|
||||
/**
|
||||
* Wraps the original text into the lines based on the given width.
|
||||
*/
|
||||
export const wrapText = (
|
||||
text: string,
|
||||
font: FontString,
|
||||
maxWidth: number,
|
||||
): string => {
|
||||
// if maxWidth is not finite or NaN which can happen in case of bugs in
|
||||
// computation, we need to make sure we don't continue as we'll end up
|
||||
// in an infinite loop
|
||||
if (!Number.isFinite(maxWidth) || maxWidth < 0) {
|
||||
return text;
|
||||
}
|
||||
|
||||
const lines: Array<string> = [];
|
||||
const originalLines = text.split("\n");
|
||||
|
||||
for (const originalLine of originalLines) {
|
||||
const currentLineWidth = getLineWidth(originalLine, font, true);
|
||||
|
||||
if (currentLineWidth <= maxWidth) {
|
||||
lines.push(originalLine);
|
||||
continue;
|
||||
}
|
||||
|
||||
const wrappedLine = wrapLine(originalLine, font, maxWidth);
|
||||
lines.push(...wrappedLine);
|
||||
}
|
||||
|
||||
return lines.join("\n");
|
||||
};
|
||||
|
||||
/**
|
||||
* Wraps the original line into the lines based on the given width.
|
||||
*/
|
||||
const wrapLine = (
|
||||
line: string,
|
||||
font: FontString,
|
||||
maxWidth: number,
|
||||
): string[] => {
|
||||
const lines: Array<string> = [];
|
||||
const tokens = parseTokens(line);
|
||||
const tokenIterator = tokens[Symbol.iterator]();
|
||||
|
||||
let currentLine = "";
|
||||
let currentLineWidth = 0;
|
||||
|
||||
let iterator = tokenIterator.next();
|
||||
|
||||
while (!iterator.done) {
|
||||
const token = iterator.value;
|
||||
const testLine = currentLine + token;
|
||||
|
||||
// cache single codepoint whitespace, CJK or emoji width calc. as kerning should not apply here
|
||||
const testLineWidth = isSingleCharacter(token)
|
||||
? currentLineWidth + charWidth.calculate(token, font)
|
||||
: getLineWidth(testLine, font, true);
|
||||
|
||||
// build up the current line, skipping length check for possibly trailing whitespaces
|
||||
if (/\s/.test(token) || testLineWidth <= maxWidth) {
|
||||
currentLine = testLine;
|
||||
currentLineWidth = testLineWidth;
|
||||
iterator = tokenIterator.next();
|
||||
continue;
|
||||
}
|
||||
|
||||
// current line is empty => just the token (word) is longer than `maxWidth` and needs to be wrapped
|
||||
if (!currentLine) {
|
||||
const wrappedWord = wrapWord(token, font, maxWidth);
|
||||
const trailingLine = wrappedWord[wrappedWord.length - 1] ?? "";
|
||||
const precedingLines = wrappedWord.slice(0, -1);
|
||||
|
||||
lines.push(...precedingLines);
|
||||
|
||||
// trailing line of the wrapped word might still be joined with next token/s
|
||||
currentLine = trailingLine;
|
||||
currentLineWidth = getLineWidth(trailingLine, font, true);
|
||||
iterator = tokenIterator.next();
|
||||
} else {
|
||||
// push & reset, but don't iterate on the next token, as we didn't use it yet!
|
||||
lines.push(currentLine.trimEnd());
|
||||
|
||||
// purposefully not iterating and not setting `currentLine` to `token`, so that we could use a simple !currentLine check above
|
||||
currentLine = "";
|
||||
currentLineWidth = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// iterator done, push the trailing line if exists
|
||||
if (currentLine) {
|
||||
const trailingLine = trimLine(currentLine, font, maxWidth);
|
||||
lines.push(trailingLine);
|
||||
}
|
||||
|
||||
return lines;
|
||||
};
|
||||
|
||||
/**
|
||||
* Wraps the word into the lines based on the given width.
|
||||
*/
|
||||
const wrapWord = (
|
||||
word: string,
|
||||
font: FontString,
|
||||
maxWidth: number,
|
||||
): Array<string> => {
|
||||
// multi-codepoint emojis are already broken apart and shouldn't be broken further
|
||||
if (getEmojiRegex().test(word)) {
|
||||
return [word];
|
||||
}
|
||||
|
||||
satisfiesWordInvariant(word);
|
||||
|
||||
const lines: Array<string> = [];
|
||||
const chars = Array.from(word);
|
||||
|
||||
let currentLine = "";
|
||||
let currentLineWidth = 0;
|
||||
|
||||
for (const char of chars) {
|
||||
const _charWidth = charWidth.calculate(char, font);
|
||||
const testLineWidth = currentLineWidth + _charWidth;
|
||||
|
||||
if (testLineWidth <= maxWidth) {
|
||||
currentLine = currentLine + char;
|
||||
currentLineWidth = testLineWidth;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (currentLine) {
|
||||
lines.push(currentLine);
|
||||
}
|
||||
|
||||
currentLine = char;
|
||||
currentLineWidth = _charWidth;
|
||||
}
|
||||
|
||||
if (currentLine) {
|
||||
lines.push(currentLine);
|
||||
}
|
||||
|
||||
return lines;
|
||||
};
|
||||
|
||||
/**
|
||||
* Similarly to browsers, does not trim all trailing whitespaces, but only those exceeding the `maxWidth`.
|
||||
*/
|
||||
const trimLine = (line: string, font: FontString, maxWidth: number) => {
|
||||
const shouldTrimWhitespaces = getLineWidth(line, font, true) > maxWidth;
|
||||
|
||||
if (!shouldTrimWhitespaces) {
|
||||
return line;
|
||||
}
|
||||
|
||||
// defensively default to `trimeEnd` in case the regex does not match
|
||||
let [, trimmedLine, whitespaces] = line.match(/^(.+?)(\s+)$/) ?? [
|
||||
line,
|
||||
line.trimEnd(),
|
||||
"",
|
||||
];
|
||||
|
||||
let trimmedLineWidth = getLineWidth(trimmedLine, font, true);
|
||||
|
||||
for (const whitespace of Array.from(whitespaces)) {
|
||||
const _charWidth = charWidth.calculate(whitespace, font);
|
||||
const testLineWidth = trimmedLineWidth + _charWidth;
|
||||
|
||||
if (testLineWidth > maxWidth) {
|
||||
break;
|
||||
}
|
||||
|
||||
trimmedLine = trimmedLine + whitespace;
|
||||
trimmedLineWidth = testLineWidth;
|
||||
}
|
||||
|
||||
return trimmedLine;
|
||||
};
|
||||
|
||||
/**
|
||||
* Check if the given string is a single character.
|
||||
*
|
||||
* Handles multi-byte chars (é, 中) and purposefully does not handle multi-codepoint char (👨👩👧👦, 👩🏽🦰).
|
||||
*/
|
||||
const isSingleCharacter = (maybeSingleCharacter: string) => {
|
||||
return (
|
||||
maybeSingleCharacter.codePointAt(0) !== undefined &&
|
||||
maybeSingleCharacter.codePointAt(1) === undefined
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
* Invariant for the word wrapping algorithm.
|
||||
*/
|
||||
const satisfiesWordInvariant = (word: string) => {
|
||||
if (import.meta.env.MODE === ENV.TEST || import.meta.env.DEV) {
|
||||
if (/\s/.test(word)) {
|
||||
throw new Error("Word should not contain any whitespaces!");
|
||||
}
|
||||
}
|
||||
};
|
@ -11,7 +11,7 @@ import {
|
||||
isBoundToContainer,
|
||||
isTextElement,
|
||||
} from "./typeChecks";
|
||||
import { CLASSES, isSafari, POINTER_BUTTON } from "../constants";
|
||||
import { CLASSES, POINTER_BUTTON } from "../constants";
|
||||
import type {
|
||||
ExcalidrawElement,
|
||||
ExcalidrawLinearElement,
|
||||
@ -27,13 +27,13 @@ import {
|
||||
getTextWidth,
|
||||
normalizeText,
|
||||
redrawTextBoundingBox,
|
||||
wrapText,
|
||||
getBoundTextMaxHeight,
|
||||
getBoundTextMaxWidth,
|
||||
computeContainerDimensionForBoundText,
|
||||
computeBoundTextPosition,
|
||||
getBoundTextElement,
|
||||
} from "./textElement";
|
||||
import { wrapText } from "./textWrapping";
|
||||
import {
|
||||
actionDecreaseFontSize,
|
||||
actionIncreaseFontSize,
|
||||
@ -245,11 +245,6 @@ export const textWysiwyg = ({
|
||||
|
||||
const font = getFontString(updatedTextElement);
|
||||
|
||||
// adding left and right padding buffer, so that browser does not cut the glyphs (does not work in Safari)
|
||||
const padding = !isSafari
|
||||
? Math.ceil(updatedTextElement.fontSize / appState.zoom.value / 2)
|
||||
: 0;
|
||||
|
||||
// Make sure text editor height doesn't go beyond viewport
|
||||
const editorMaxHeight =
|
||||
(appState.height - viewportY) / appState.zoom.value;
|
||||
@ -259,7 +254,7 @@ export const textWysiwyg = ({
|
||||
lineHeight: updatedTextElement.lineHeight,
|
||||
width: `${width}px`,
|
||||
height: `${height}px`,
|
||||
left: `${viewportX - padding}px`,
|
||||
left: `${viewportX}px`,
|
||||
top: `${viewportY}px`,
|
||||
transform: getTransform(
|
||||
width,
|
||||
@ -269,7 +264,6 @@ export const textWysiwyg = ({
|
||||
maxWidth,
|
||||
editorMaxHeight,
|
||||
),
|
||||
padding: `0 ${padding}px`,
|
||||
textAlign,
|
||||
verticalAlign,
|
||||
color: updatedTextElement.strokeColor,
|
||||
@ -310,6 +304,7 @@ export const textWysiwyg = ({
|
||||
minHeight: "1em",
|
||||
backfaceVisibility: "hidden",
|
||||
margin: 0,
|
||||
padding: 0,
|
||||
border: 0,
|
||||
outline: 0,
|
||||
resize: "none",
|
||||
|
@ -50,6 +50,7 @@ export class WorkerUrlNotDefinedError extends Error {
|
||||
this.code = code;
|
||||
}
|
||||
}
|
||||
|
||||
export class WorkerInTheMainChunkError extends Error {
|
||||
public code;
|
||||
constructor(
|
||||
@ -61,3 +62,14 @@ export class WorkerInTheMainChunkError extends Error {
|
||||
this.code = code;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Use this for generic, handled errors, so you can check against them
|
||||
* and rethrow if needed
|
||||
*/
|
||||
export class ExcalidrawError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = "ExcalidrawError";
|
||||
}
|
||||
}
|
||||
|
@ -3,11 +3,14 @@ import {
|
||||
FONT_FAMILY_FALLBACKS,
|
||||
CJK_HAND_DRAWN_FALLBACK_FONT,
|
||||
WINDOWS_EMOJI_FALLBACK_FONT,
|
||||
isSafari,
|
||||
getFontFamilyFallbacks,
|
||||
} from "../constants";
|
||||
import { isTextElement } from "../element";
|
||||
import { charWidth, getContainerElement } from "../element/textElement";
|
||||
import { containsCJK } from "../element/textWrapping";
|
||||
import { ShapeCache } from "../scene/ShapeCache";
|
||||
import { getFontString } from "../utils";
|
||||
import { getFontString, PromisePool, promiseTry } from "../utils";
|
||||
import { ExcalidrawFontFace } from "./ExcalidrawFontFace";
|
||||
|
||||
import { CascadiaFontFaces } from "./Cascadia";
|
||||
@ -73,6 +76,13 @@ export class Fonts {
|
||||
this.scene = scene;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all the font families for the given scene.
|
||||
*/
|
||||
public getSceneFamilies = () => {
|
||||
return Fonts.getUniqueFamilies(this.scene.getNonDeletedElements());
|
||||
};
|
||||
|
||||
/**
|
||||
* if we load a (new) font, it's likely that text elements using it have
|
||||
* already been rendered using a fallback font. Thus, we want invalidate
|
||||
@ -81,7 +91,7 @@ export class Fonts {
|
||||
* Invalidates text elements and rerenders scene, provided that at least one
|
||||
* of the supplied fontFaces has not already been processed.
|
||||
*/
|
||||
public onLoaded = (fontFaces: readonly FontFace[]) => {
|
||||
public onLoaded = (fontFaces: readonly FontFace[]): void => {
|
||||
// bail if all fonts with have been processed. We're checking just a
|
||||
// subset of the font properties (though it should be enough), so it
|
||||
// can technically bail on a false positive.
|
||||
@ -127,12 +137,40 @@ export class Fonts {
|
||||
|
||||
/**
|
||||
* Load font faces for a given scene and trigger scene update.
|
||||
*
|
||||
* FontFaceSet loadingdone event we listen on may not always
|
||||
* fire (looking at you Safari), so on init we manually load all
|
||||
* fonts and rerender scene text elements once done.
|
||||
*
|
||||
* For Safari we make sure to check against each loaded font face
|
||||
* with the unique characters per family in the scene,
|
||||
* otherwise fonts might remain unloaded.
|
||||
*/
|
||||
public loadSceneFonts = async (): Promise<FontFace[]> => {
|
||||
const sceneFamilies = this.getSceneFamilies();
|
||||
const loaded = await Fonts.loadFontFaces(sceneFamilies);
|
||||
this.onLoaded(loaded);
|
||||
return loaded;
|
||||
const charsPerFamily = isSafari
|
||||
? Fonts.getCharsPerFamily(this.scene.getNonDeletedElements())
|
||||
: undefined;
|
||||
|
||||
return Fonts.loadFontFaces(sceneFamilies, charsPerFamily);
|
||||
};
|
||||
|
||||
/**
|
||||
* Load font faces for passed elements - use when the scene is unavailable (i.e. export).
|
||||
*
|
||||
* For Safari we make sure to check against each loaded font face,
|
||||
* with the unique characters per family in the elements
|
||||
* otherwise fonts might remain unloaded.
|
||||
*/
|
||||
public static loadElementsFonts = async (
|
||||
elements: readonly ExcalidrawElement[],
|
||||
): Promise<FontFace[]> => {
|
||||
const fontFamilies = Fonts.getUniqueFamilies(elements);
|
||||
const charsPerFamily = isSafari
|
||||
? Fonts.getCharsPerFamily(elements)
|
||||
: undefined;
|
||||
|
||||
return Fonts.loadFontFaces(fontFamilies, charsPerFamily);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -144,17 +182,48 @@ export class Fonts {
|
||||
};
|
||||
|
||||
/**
|
||||
* Load font faces for passed elements - use when the scene is unavailable (i.e. export).
|
||||
* Generate CSS @font-face declarations for the given elements.
|
||||
*/
|
||||
public static loadElementsFonts = async (
|
||||
public static async generateFontFaceDeclarations(
|
||||
elements: readonly ExcalidrawElement[],
|
||||
): Promise<FontFace[]> => {
|
||||
const fontFamilies = Fonts.getElementsFamilies(elements);
|
||||
return await Fonts.loadFontFaces(fontFamilies);
|
||||
};
|
||||
) {
|
||||
const families = Fonts.getUniqueFamilies(elements);
|
||||
const charsPerFamily = Fonts.getCharsPerFamily(elements);
|
||||
|
||||
// for simplicity, assuming we have just one family with the CJK handdrawn fallback
|
||||
const familyWithCJK = families.find((x) =>
|
||||
getFontFamilyFallbacks(x).includes(CJK_HAND_DRAWN_FALLBACK_FONT),
|
||||
);
|
||||
|
||||
if (familyWithCJK) {
|
||||
const characters = Fonts.getCharacters(charsPerFamily, familyWithCJK);
|
||||
|
||||
if (containsCJK(characters)) {
|
||||
const family = FONT_FAMILY_FALLBACKS[CJK_HAND_DRAWN_FALLBACK_FONT];
|
||||
|
||||
// adding the same characters to the CJK handrawn family
|
||||
charsPerFamily[family] = new Set(characters);
|
||||
|
||||
// the order between the families and fallbacks is important, as fallbacks need to be defined first and in the reversed order
|
||||
// so that they get overriden with the later defined font faces, i.e. in case they share some codepoints
|
||||
families.unshift(FONT_FAMILY_FALLBACKS[CJK_HAND_DRAWN_FALLBACK_FONT]);
|
||||
}
|
||||
}
|
||||
|
||||
// don't trigger hundreds of concurrent requests (each performing fetch, creating a worker, etc.),
|
||||
// instead go three requests at a time, in a controlled manner, without completely blocking the main thread
|
||||
// and avoiding potential issues such as rate limits
|
||||
const iterator = Fonts.fontFacesStylesGenerator(families, charsPerFamily);
|
||||
const concurrency = 3;
|
||||
const fontFaces = await new PromisePool(iterator, concurrency).all();
|
||||
|
||||
// dedup just in case (i.e. could be the same font faces with 0 glyphs)
|
||||
return Array.from(new Set(fontFaces));
|
||||
}
|
||||
|
||||
private static async loadFontFaces(
|
||||
fontFamilies: Array<ExcalidrawTextElement["fontFamily"]>,
|
||||
charsPerFamily?: Record<number, Set<string>>,
|
||||
) {
|
||||
// add all registered font faces into the `document.fonts` (if not added already)
|
||||
for (const { fontFaces, metadata } of Fonts.registered.values()) {
|
||||
@ -170,35 +239,136 @@ export class Fonts {
|
||||
}
|
||||
}
|
||||
|
||||
const loadedFontFaces = await Promise.all(
|
||||
fontFamilies.map(async (fontFamily) => {
|
||||
const fontString = getFontString({
|
||||
fontFamily,
|
||||
fontSize: 16,
|
||||
});
|
||||
// loading 10 font faces at a time, in a controlled manner
|
||||
const iterator = Fonts.fontFacesLoader(fontFamilies, charsPerFamily);
|
||||
const concurrency = 10;
|
||||
const fontFaces = await new PromisePool(iterator, concurrency).all();
|
||||
return fontFaces.flat().filter(Boolean);
|
||||
}
|
||||
|
||||
// WARN: without "text" param it does not have to mean that all font faces are loaded, instead it could be just one!
|
||||
if (!window.document.fonts.check(fontString)) {
|
||||
private static *fontFacesLoader(
|
||||
fontFamilies: Array<ExcalidrawTextElement["fontFamily"]>,
|
||||
charsPerFamily?: Record<number, Set<string>>,
|
||||
): Generator<Promise<void | readonly [number, FontFace[]]>> {
|
||||
for (const [index, fontFamily] of fontFamilies.entries()) {
|
||||
const font = getFontString({
|
||||
fontFamily,
|
||||
fontSize: 16,
|
||||
});
|
||||
|
||||
// WARN: without "text" param it does not have to mean that all font faces are loaded, instead it could be just one!
|
||||
// for Safari on init, we rather check with the "text" param, even though it's less efficient, as otherwise fonts might remain unloaded
|
||||
const text =
|
||||
isSafari && charsPerFamily
|
||||
? Fonts.getCharacters(charsPerFamily, fontFamily)
|
||||
: "";
|
||||
|
||||
if (!window.document.fonts.check(font, text)) {
|
||||
yield promiseTry(async () => {
|
||||
try {
|
||||
// WARN: browser prioritizes loading only font faces with unicode ranges for characters which are present in the document (html & canvas), other font faces could stay unloaded
|
||||
// we might want to retry here, i.e. in case CDN is down, but so far I didn't experience any issues - maybe it handles retry-like logic under the hood
|
||||
return await window.document.fonts.load(fontString);
|
||||
const fontFaces = await window.document.fonts.load(font, text);
|
||||
|
||||
return [index, fontFaces];
|
||||
} catch (e) {
|
||||
// don't let it all fail if just one font fails to load
|
||||
console.error(
|
||||
`Failed to load font "${fontString}" from urls "${Fonts.registered
|
||||
`Failed to load font "${font}" from urls "${Fonts.registered
|
||||
.get(fontFamily)
|
||||
?.fontFaces.map((x) => x.urls)}"`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Promise.resolve();
|
||||
}),
|
||||
);
|
||||
private static *fontFacesStylesGenerator(
|
||||
families: Array<number>,
|
||||
charsPerFamily: Record<number, Set<string>>,
|
||||
): Generator<Promise<void | readonly [number, string]>> {
|
||||
for (const [familyIndex, family] of families.entries()) {
|
||||
const { fontFaces, metadata } = Fonts.registered.get(family) ?? {};
|
||||
|
||||
return loadedFontFaces.flat().filter(Boolean) as FontFace[];
|
||||
if (!Array.isArray(fontFaces)) {
|
||||
console.error(
|
||||
`Couldn't find registered fonts for font-family "${family}"`,
|
||||
Fonts.registered,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (metadata?.local) {
|
||||
// don't inline local fonts
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const [fontFaceIndex, fontFace] of fontFaces.entries()) {
|
||||
yield promiseTry(async () => {
|
||||
try {
|
||||
const characters = Fonts.getCharacters(charsPerFamily, family);
|
||||
const fontFaceCSS = await fontFace.toCSS(characters);
|
||||
|
||||
if (!fontFaceCSS) {
|
||||
return;
|
||||
}
|
||||
|
||||
// giving a buffer of 10K font faces per family
|
||||
const fontFaceOrder = familyIndex * 10_000 + fontFaceIndex;
|
||||
const fontFaceTuple = [fontFaceOrder, fontFaceCSS] as const;
|
||||
|
||||
return fontFaceTuple;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Couldn't transform font-face to css for family "${fontFace.fontFace.family}"`,
|
||||
error,
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a new font.
|
||||
*
|
||||
* @param family font family
|
||||
* @param metadata font metadata
|
||||
* @param fontFacesDecriptors font faces descriptors
|
||||
*/
|
||||
private static register(
|
||||
this:
|
||||
| Fonts
|
||||
| {
|
||||
registered: Map<
|
||||
number,
|
||||
{ metadata: FontMetadata; fontFaces: ExcalidrawFontFace[] }
|
||||
>;
|
||||
},
|
||||
family: string,
|
||||
metadata: FontMetadata,
|
||||
...fontFacesDecriptors: ExcalidrawFontFaceDescriptor[]
|
||||
) {
|
||||
// TODO: likely we will need to abandon number value in order to support custom fonts
|
||||
const fontFamily =
|
||||
FONT_FAMILY[family as keyof typeof FONT_FAMILY] ??
|
||||
FONT_FAMILY_FALLBACKS[family as keyof typeof FONT_FAMILY_FALLBACKS];
|
||||
|
||||
const registeredFamily = this.registered.get(fontFamily);
|
||||
|
||||
if (!registeredFamily) {
|
||||
this.registered.set(fontFamily, {
|
||||
metadata,
|
||||
fontFaces: fontFacesDecriptors.map(
|
||||
({ uri, descriptors }) =>
|
||||
new ExcalidrawFontFace(family, uri, descriptors),
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
return this.registered;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -248,57 +418,9 @@ export class Fonts {
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a new font.
|
||||
*
|
||||
* @param family font family
|
||||
* @param metadata font metadata
|
||||
* @param fontFacesDecriptors font faces descriptors
|
||||
* Get all the unique font families for the given elements.
|
||||
*/
|
||||
private static register(
|
||||
this:
|
||||
| Fonts
|
||||
| {
|
||||
registered: Map<
|
||||
number,
|
||||
{ metadata: FontMetadata; fontFaces: ExcalidrawFontFace[] }
|
||||
>;
|
||||
},
|
||||
family: string,
|
||||
metadata: FontMetadata,
|
||||
...fontFacesDecriptors: ExcalidrawFontFaceDescriptor[]
|
||||
) {
|
||||
// TODO: likely we will need to abandon number value in order to support custom fonts
|
||||
const fontFamily =
|
||||
FONT_FAMILY[family as keyof typeof FONT_FAMILY] ??
|
||||
FONT_FAMILY_FALLBACKS[family as keyof typeof FONT_FAMILY_FALLBACKS];
|
||||
|
||||
const registeredFamily = this.registered.get(fontFamily);
|
||||
|
||||
if (!registeredFamily) {
|
||||
this.registered.set(fontFamily, {
|
||||
metadata,
|
||||
fontFaces: fontFacesDecriptors.map(
|
||||
({ uri, descriptors }) =>
|
||||
new ExcalidrawFontFace(family, uri, descriptors),
|
||||
),
|
||||
});
|
||||
}
|
||||
|
||||
return this.registered;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets all the font families for the given scene.
|
||||
*/
|
||||
public getSceneFamilies = () => {
|
||||
return Fonts.getElementsFamilies(this.scene.getNonDeletedElements());
|
||||
};
|
||||
|
||||
private static getAllFamilies() {
|
||||
return Array.from(Fonts.registered.keys());
|
||||
}
|
||||
|
||||
private static getElementsFamilies(
|
||||
private static getUniqueFamilies(
|
||||
elements: ReadonlyArray<ExcalidrawElement>,
|
||||
): Array<ExcalidrawTextElement["fontFamily"]> {
|
||||
return Array.from(
|
||||
@ -310,6 +432,51 @@ export class Fonts {
|
||||
}, new Set<number>()),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all the unique characters per font family for the given scene.
|
||||
*/
|
||||
private static getCharsPerFamily(
|
||||
elements: ReadonlyArray<ExcalidrawElement>,
|
||||
): Record<number, Set<string>> {
|
||||
const charsPerFamily: Record<number, Set<string>> = {};
|
||||
|
||||
for (const element of elements) {
|
||||
if (!isTextElement(element)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// gather unique codepoints only when inlining fonts
|
||||
for (const char of element.originalText) {
|
||||
if (!charsPerFamily[element.fontFamily]) {
|
||||
charsPerFamily[element.fontFamily] = new Set();
|
||||
}
|
||||
|
||||
charsPerFamily[element.fontFamily].add(char);
|
||||
}
|
||||
}
|
||||
|
||||
return charsPerFamily;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get characters for a given family.
|
||||
*/
|
||||
private static getCharacters(
|
||||
charsPerFamily: Record<number, Set<string>>,
|
||||
family: number,
|
||||
) {
|
||||
return charsPerFamily[family]
|
||||
? Array.from(charsPerFamily[family]).join("")
|
||||
: "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered font families.
|
||||
*/
|
||||
private static getAllFamilies() {
|
||||
return Array.from(Fonts.registered.keys());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -7,7 +7,7 @@ import {
|
||||
SVG_NS,
|
||||
} from "../constants";
|
||||
import { normalizeLink, toValidURL } from "../data/url";
|
||||
import { getElementAbsoluteCoords } from "../element";
|
||||
import { getElementAbsoluteCoords, hashString } from "../element";
|
||||
import {
|
||||
createPlaceholderEmbeddableLabel,
|
||||
getEmbedLink,
|
||||
@ -411,7 +411,25 @@ const renderElementToSvg = (
|
||||
const fileData =
|
||||
isInitializedImageElement(element) && files[element.fileId];
|
||||
if (fileData) {
|
||||
const symbolId = `image-${fileData.id}`;
|
||||
const { reuseImages = true } = renderConfig;
|
||||
|
||||
let symbolId = `image-${fileData.id}`;
|
||||
|
||||
let uncroppedWidth = element.width;
|
||||
let uncroppedHeight = element.height;
|
||||
if (element.crop) {
|
||||
({ width: uncroppedWidth, height: uncroppedHeight } =
|
||||
getUncroppedWidthAndHeight(element));
|
||||
|
||||
symbolId = `image-crop-${fileData.id}-${hashString(
|
||||
`${uncroppedWidth}x${uncroppedHeight}`,
|
||||
)}`;
|
||||
}
|
||||
|
||||
if (!reuseImages) {
|
||||
symbolId = `image-${element.id}`;
|
||||
}
|
||||
|
||||
let symbol = svgRoot.querySelector(`#${symbolId}`);
|
||||
if (!symbol) {
|
||||
symbol = svgRoot.ownerDocument!.createElementNS(SVG_NS, "symbol");
|
||||
@ -421,18 +439,7 @@ const renderElementToSvg = (
|
||||
image.setAttribute("href", fileData.dataURL);
|
||||
image.setAttribute("preserveAspectRatio", "none");
|
||||
|
||||
if (element.crop) {
|
||||
const { width: uncroppedWidth, height: uncroppedHeight } =
|
||||
getUncroppedWidthAndHeight(element);
|
||||
|
||||
symbol.setAttribute(
|
||||
"viewBox",
|
||||
`${
|
||||
element.crop.x / (element.crop.naturalWidth / uncroppedWidth)
|
||||
} ${
|
||||
element.crop.y / (element.crop.naturalHeight / uncroppedHeight)
|
||||
} ${width} ${height}`,
|
||||
);
|
||||
if (element.crop || !reuseImages) {
|
||||
image.setAttribute("width", `${uncroppedWidth}`);
|
||||
image.setAttribute("height", `${uncroppedHeight}`);
|
||||
} else {
|
||||
@ -456,8 +463,23 @@ const renderElementToSvg = (
|
||||
use.setAttribute("filter", IMAGE_INVERT_FILTER);
|
||||
}
|
||||
|
||||
use.setAttribute("width", `${width}`);
|
||||
use.setAttribute("height", `${height}`);
|
||||
let normalizedCropX = 0;
|
||||
let normalizedCropY = 0;
|
||||
|
||||
if (element.crop) {
|
||||
const { width: uncroppedWidth, height: uncroppedHeight } =
|
||||
getUncroppedWidthAndHeight(element);
|
||||
normalizedCropX =
|
||||
element.crop.x / (element.crop.naturalWidth / uncroppedWidth);
|
||||
normalizedCropY =
|
||||
element.crop.y / (element.crop.naturalHeight / uncroppedHeight);
|
||||
}
|
||||
|
||||
const adjustedCenterX = cx + normalizedCropX;
|
||||
const adjustedCenterY = cy + normalizedCropY;
|
||||
|
||||
use.setAttribute("width", `${width + normalizedCropX}`);
|
||||
use.setAttribute("height", `${height + normalizedCropY}`);
|
||||
use.setAttribute("opacity", `${opacity}`);
|
||||
|
||||
// We first apply `scale` transforms (horizontal/vertical mirroring)
|
||||
@ -467,21 +489,43 @@ const renderElementToSvg = (
|
||||
// the transformations correctly (the transform-origin was not being
|
||||
// applied correctly).
|
||||
if (element.scale[0] !== 1 || element.scale[1] !== 1) {
|
||||
const translateX = element.scale[0] !== 1 ? -width : 0;
|
||||
const translateY = element.scale[1] !== 1 ? -height : 0;
|
||||
use.setAttribute(
|
||||
"transform",
|
||||
`scale(${element.scale[0]}, ${element.scale[1]}) translate(${translateX} ${translateY})`,
|
||||
`translate(${adjustedCenterX} ${adjustedCenterY}) scale(${
|
||||
element.scale[0]
|
||||
} ${
|
||||
element.scale[1]
|
||||
}) translate(${-adjustedCenterX} ${-adjustedCenterY})`,
|
||||
);
|
||||
}
|
||||
|
||||
const g = svgRoot.ownerDocument!.createElementNS(SVG_NS, "g");
|
||||
|
||||
if (element.crop) {
|
||||
const mask = svgRoot.ownerDocument!.createElementNS(SVG_NS, "mask");
|
||||
mask.setAttribute("id", `mask-image-crop-${element.id}`);
|
||||
mask.setAttribute("fill", "#fff");
|
||||
const maskRect = svgRoot.ownerDocument!.createElementNS(
|
||||
SVG_NS,
|
||||
"rect",
|
||||
);
|
||||
|
||||
maskRect.setAttribute("x", `${normalizedCropX}`);
|
||||
maskRect.setAttribute("y", `${normalizedCropY}`);
|
||||
maskRect.setAttribute("width", `${width}`);
|
||||
maskRect.setAttribute("height", `${height}`);
|
||||
|
||||
mask.appendChild(maskRect);
|
||||
root.appendChild(mask);
|
||||
g.setAttribute("mask", `url(#${mask.id})`);
|
||||
}
|
||||
|
||||
g.appendChild(use);
|
||||
g.setAttribute(
|
||||
"transform",
|
||||
`translate(${offsetX || 0} ${
|
||||
offsetY || 0
|
||||
}) rotate(${degree} ${cx} ${cy})`,
|
||||
`translate(${offsetX - normalizedCropX} ${
|
||||
offsetY - normalizedCropY
|
||||
}) rotate(${degree} ${adjustedCenterX} ${adjustedCenterY})`,
|
||||
);
|
||||
|
||||
if (element.roundness) {
|
||||
|
@ -9,14 +9,7 @@ import type {
|
||||
import type { Bounds } from "../element/bounds";
|
||||
import { getCommonBounds, getElementAbsoluteCoords } from "../element/bounds";
|
||||
import { renderSceneToSvg } from "../renderer/staticSvgScene";
|
||||
import {
|
||||
arrayToMap,
|
||||
distance,
|
||||
getFontString,
|
||||
PromisePool,
|
||||
promiseTry,
|
||||
toBrandedType,
|
||||
} from "../utils";
|
||||
import { arrayToMap, distance, getFontString, toBrandedType } from "../utils";
|
||||
import type { AppState, BinaryFiles } from "../types";
|
||||
import {
|
||||
DEFAULT_EXPORT_PADDING,
|
||||
@ -25,9 +18,6 @@ import {
|
||||
SVG_NS,
|
||||
THEME,
|
||||
THEME_FILTER,
|
||||
FONT_FAMILY_FALLBACKS,
|
||||
getFontFamilyFallbacks,
|
||||
CJK_HAND_DRAWN_FALLBACK_FONT,
|
||||
} from "../constants";
|
||||
import { getDefaultAppState } from "../appState";
|
||||
import { serializeAsJSON } from "../data/json";
|
||||
@ -44,12 +34,11 @@ import {
|
||||
import { newTextElement } from "../element";
|
||||
import { type Mutable } from "../utility-types";
|
||||
import { newElementWith } from "../element/mutateElement";
|
||||
import { isFrameLikeElement, isTextElement } from "../element/typeChecks";
|
||||
import { isFrameLikeElement } from "../element/typeChecks";
|
||||
import type { RenderableElementsMap } from "./types";
|
||||
import { syncInvalidIndices } from "../fractionalIndex";
|
||||
import { renderStaticScene } from "../renderer/staticScene";
|
||||
import { Fonts } from "../fonts";
|
||||
import { containsCJK } from "../element/textElement";
|
||||
|
||||
const SVG_EXPORT_TAG = `<!-- svg-source:excalidraw -->`;
|
||||
|
||||
@ -284,6 +273,7 @@ export const exportToSvg = async (
|
||||
renderEmbeddables?: boolean;
|
||||
exportingFrame?: ExcalidrawFrameLikeElement | null;
|
||||
skipInliningFonts?: true;
|
||||
reuseImages?: boolean;
|
||||
},
|
||||
): Promise<SVGSVGElement> => {
|
||||
const frameRendering = getFrameRenderingConfig(
|
||||
@ -318,9 +308,7 @@ export const exportToSvg = async (
|
||||
// the tempScene hack which duplicates and regenerates ids
|
||||
if (exportEmbedScene) {
|
||||
try {
|
||||
metadata = await (
|
||||
await import("../data/image")
|
||||
).encodeSvgMetadata({
|
||||
metadata = (await import("../data/image")).encodeSvgMetadata({
|
||||
// when embedding scene, we want to embed the origionally supplied
|
||||
// elements which don't contain the temp frame labels.
|
||||
// But it also requires that the exportToSvg is being supplied with
|
||||
@ -376,7 +364,10 @@ export const exportToSvg = async (
|
||||
</clipPath>`;
|
||||
}
|
||||
|
||||
const fontFaces = opts?.skipInliningFonts ? [] : await getFontFaces(elements);
|
||||
const fontFaces = !opts?.skipInliningFonts
|
||||
? await Fonts.generateFontFaceDeclarations(elements)
|
||||
: [];
|
||||
|
||||
const delimiter = "\n "; // 6 spaces
|
||||
|
||||
svgRoot.innerHTML = `
|
||||
@ -425,6 +416,7 @@ export const exportToSvg = async (
|
||||
.map((element) => [element.id, true]),
|
||||
)
|
||||
: new Map(),
|
||||
reuseImages: opts?.reuseImages ?? true,
|
||||
},
|
||||
);
|
||||
|
||||
@ -454,111 +446,3 @@ export const getExportSize = (
|
||||
|
||||
return [width, height];
|
||||
};
|
||||
|
||||
const getFontFaces = async (
|
||||
elements: readonly ExcalidrawElement[],
|
||||
): Promise<string[]> => {
|
||||
const fontFamilies = new Set<number>();
|
||||
const charsPerFamily: Record<number, Set<string>> = {};
|
||||
|
||||
for (const element of elements) {
|
||||
if (!isTextElement(element)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
fontFamilies.add(element.fontFamily);
|
||||
|
||||
// gather unique codepoints only when inlining fonts
|
||||
for (const char of element.originalText) {
|
||||
if (!charsPerFamily[element.fontFamily]) {
|
||||
charsPerFamily[element.fontFamily] = new Set();
|
||||
}
|
||||
|
||||
charsPerFamily[element.fontFamily].add(char);
|
||||
}
|
||||
}
|
||||
|
||||
const orderedFamilies = Array.from(fontFamilies);
|
||||
|
||||
// for simplicity, assuming we have just one family with the CJK handdrawn fallback
|
||||
const familyWithCJK = orderedFamilies.find((x) =>
|
||||
getFontFamilyFallbacks(x).includes(CJK_HAND_DRAWN_FALLBACK_FONT),
|
||||
);
|
||||
|
||||
if (familyWithCJK) {
|
||||
const characters = getChars(charsPerFamily[familyWithCJK]);
|
||||
|
||||
if (containsCJK(characters)) {
|
||||
const family = FONT_FAMILY_FALLBACKS[CJK_HAND_DRAWN_FALLBACK_FONT];
|
||||
|
||||
// adding the same characters to the CJK handrawn family
|
||||
charsPerFamily[family] = new Set(characters);
|
||||
|
||||
// the order between the families and fallbacks is important, as fallbacks need to be defined first and in the reversed order
|
||||
// so that they get overriden with the later defined font faces, i.e. in case they share some codepoints
|
||||
orderedFamilies.unshift(
|
||||
FONT_FAMILY_FALLBACKS[CJK_HAND_DRAWN_FALLBACK_FONT],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const iterator = fontFacesIterator(orderedFamilies, charsPerFamily);
|
||||
|
||||
// don't trigger hundreds of concurrent requests (each performing fetch, creating a worker, etc.),
|
||||
// instead go three requests at a time, in a controlled manner, without completely blocking the main thread
|
||||
// and avoiding potential issues such as rate limits
|
||||
const concurrency = 3;
|
||||
const fontFaces = await new PromisePool(iterator, concurrency).all();
|
||||
|
||||
// dedup just in case (i.e. could be the same font faces with 0 glyphs)
|
||||
return Array.from(new Set(fontFaces));
|
||||
};
|
||||
|
||||
function* fontFacesIterator(
|
||||
families: Array<number>,
|
||||
charsPerFamily: Record<number, Set<string>>,
|
||||
): Generator<Promise<void | readonly [number, string]>> {
|
||||
for (const [familyIndex, family] of families.entries()) {
|
||||
const { fontFaces, metadata } = Fonts.registered.get(family) ?? {};
|
||||
|
||||
if (!Array.isArray(fontFaces)) {
|
||||
console.error(
|
||||
`Couldn't find registered fonts for font-family "${family}"`,
|
||||
Fonts.registered,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (metadata?.local) {
|
||||
// don't inline local fonts
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const [fontFaceIndex, fontFace] of fontFaces.entries()) {
|
||||
yield promiseTry(async () => {
|
||||
try {
|
||||
const characters = getChars(charsPerFamily[family]);
|
||||
const fontFaceCSS = await fontFace.toCSS(characters);
|
||||
|
||||
if (!fontFaceCSS) {
|
||||
return;
|
||||
}
|
||||
|
||||
// giving a buffer of 10K font faces per family
|
||||
const fontFaceOrder = familyIndex * 10_000 + fontFaceIndex;
|
||||
const fontFaceTuple = [fontFaceOrder, fontFaceCSS] as const;
|
||||
|
||||
return fontFaceTuple;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Couldn't transform font-face to css for family "${fontFace.fontFace.family}"`,
|
||||
error,
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const getChars = (characterSet: Set<string>) =>
|
||||
Array.from(characterSet).join("");
|
||||
|
@ -46,6 +46,13 @@ export type SVGRenderConfig = {
|
||||
frameRendering: AppState["frameRendering"];
|
||||
canvasBackgroundColor: AppState["viewBackgroundColor"];
|
||||
embedsValidationStatus: EmbedsValidationStatus;
|
||||
/**
|
||||
* whether to attempt to reuse images as much as possible through symbols
|
||||
* (reduces SVG size, but may be incompoatible with some SVG renderers)
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
reuseImages: boolean;
|
||||
};
|
||||
|
||||
export type InteractiveCanvasRenderConfig = {
|
||||
|
@ -10,5 +10,5 @@ exports[`export > exporting svg containing transformed images > svg export outpu
|
||||
</style>
|
||||
|
||||
</defs>
|
||||
<clipPath id="image-clipPath-id1" data-id="id1"><rect width="100" height="100" rx="25" ry="25"></rect></clipPath><g transform="translate(30.710678118654755 30.710678118654755) rotate(315 50 50)" clip-path="url(#image-clipPath-id1)" data-id="id1"><use href="#image-file_A" width="100" height="100" opacity="1"></use></g><clipPath id="image-clipPath-id2" data-id="id2"><rect width="50" height="50" rx="12.5" ry="12.5"></rect></clipPath><g transform="translate(130.71067811865476 30.710678118654755) rotate(45 25 25)" clip-path="url(#image-clipPath-id2)" data-id="id2"><use href="#image-file_A" width="50" height="50" opacity="1" transform="scale(-1, 1) translate(-50 0)"></use></g><clipPath id="image-clipPath-id3" data-id="id3"><rect width="100" height="100" rx="25" ry="25"></rect></clipPath><g transform="translate(30.710678118654755 130.71067811865476) rotate(45 50 50)" clip-path="url(#image-clipPath-id3)" data-id="id3"><use href="#image-file_A" width="100" height="100" opacity="1" transform="scale(1, -1) translate(0 -100)"></use></g><clipPath id="image-clipPath-id4" data-id="id4"><rect width="50" height="50" rx="12.5" ry="12.5"></rect></clipPath><g transform="translate(130.71067811865476 130.71067811865476) rotate(315 25 25)" clip-path="url(#image-clipPath-id4)" data-id="id4"><use href="#image-file_A" width="50" height="50" opacity="1" transform="scale(-1, -1) translate(-50 -50)"></use></g></svg>"
|
||||
<clipPath id="image-clipPath-id1" data-id="id1"><rect width="100" height="100" rx="25" ry="25"></rect></clipPath><g transform="translate(30.710678118654755 30.710678118654755) rotate(315 50 50)" clip-path="url(#image-clipPath-id1)" data-id="id1"><use href="#image-file_A" width="100" height="100" opacity="1"></use></g><clipPath id="image-clipPath-id2" data-id="id2"><rect width="50" height="50" rx="12.5" ry="12.5"></rect></clipPath><g transform="translate(130.71067811865476 30.710678118654755) rotate(45 25 25)" clip-path="url(#image-clipPath-id2)" data-id="id2"><use href="#image-file_A" width="50" height="50" opacity="1" transform="translate(25 25) scale(-1 1) translate(-25 -25)"></use></g><clipPath id="image-clipPath-id3" data-id="id3"><rect width="100" height="100" rx="25" ry="25"></rect></clipPath><g transform="translate(30.710678118654755 130.71067811865476) rotate(45 50 50)" clip-path="url(#image-clipPath-id3)" data-id="id3"><use href="#image-file_A" width="100" height="100" opacity="1" transform="translate(50 50) scale(1 -1) translate(-50 -50)"></use></g><clipPath id="image-clipPath-id4" data-id="id4"><rect width="50" height="50" rx="12.5" ry="12.5"></rect></clipPath><g transform="translate(130.71067811865476 130.71067811865476) rotate(315 25 25)" clip-path="url(#image-clipPath-id4)" data-id="id4"><use href="#image-file_A" width="50" height="50" opacity="1" transform="translate(25 25) scale(-1 -1) translate(-25 -25)"></use></g></svg>"
|
||||
`;
|
||||
|
@ -17,7 +17,7 @@ exports[`Test Linear Elements > Test bound text element > should match styles fo
|
||||
class="excalidraw-wysiwyg"
|
||||
data-type="wysiwyg"
|
||||
dir="auto"
|
||||
style="position: absolute; display: inline-block; min-height: 1em; backface-visibility: hidden; margin: 0px; border: 0px; outline: 0; resize: none; background: transparent; overflow: hidden; z-index: var(--zIndex-wysiwyg); word-break: break-word; white-space: pre-wrap; overflow-wrap: break-word; box-sizing: content-box; width: 10.5px; height: 26.25px; left: 25px; top: 7.5px; transform: translate(0px, 0px) scale(1) rotate(0deg); padding: 0px 10px; text-align: center; vertical-align: middle; color: rgb(30, 30, 30); opacity: 1; filter: var(--theme-filter); max-height: 992.5px; font: Emoji 20px 20px; line-height: 1.25; font-family: Excalifont, Xiaolai, Segoe UI Emoji;"
|
||||
style="position: absolute; display: inline-block; min-height: 1em; backface-visibility: hidden; margin: 0px; padding: 0px; border: 0px; outline: 0; resize: none; background: transparent; overflow: hidden; z-index: var(--zIndex-wysiwyg); word-break: break-word; white-space: pre-wrap; overflow-wrap: break-word; box-sizing: content-box; width: 10.5px; height: 26.25px; left: 35px; top: 7.5px; transform: translate(0px, 0px) scale(1) rotate(0deg); text-align: center; vertical-align: middle; color: rgb(30, 30, 30); opacity: 1; filter: var(--theme-filter); max-height: 992.5px; font: Emoji 20px 20px; line-height: 1.25; font-family: Excalifont, Xiaolai, Segoe UI Emoji;"
|
||||
tabindex="0"
|
||||
wrap="off"
|
||||
/>
|
||||
|
@ -62,10 +62,10 @@ describe("export", () => {
|
||||
});
|
||||
|
||||
it("test encoding/decoding scene for SVG export", async () => {
|
||||
const encoded = await encodeSvgMetadata({
|
||||
const encoded = encodeSvgMetadata({
|
||||
text: serializeAsJSON(testElements, h.state, {}, "local"),
|
||||
});
|
||||
const decoded = JSON.parse(await decodeSvgMetadata({ svg: encoded }));
|
||||
const decoded = JSON.parse(decodeSvgMetadata({ svg: encoded }));
|
||||
expect(decoded.elements).toEqual([
|
||||
expect.objectContaining({ type: "text", text: "😀" }),
|
||||
]);
|
||||
|
@ -20,7 +20,6 @@ import { LinearElementEditor } from "../element/linearElementEditor";
|
||||
import { act, queryByTestId, queryByText } from "@testing-library/react";
|
||||
import {
|
||||
getBoundTextElementPosition,
|
||||
wrapText,
|
||||
getBoundTextMaxWidth,
|
||||
} from "../element/textElement";
|
||||
import * as textElementUtils from "../element/textElement";
|
||||
@ -29,6 +28,7 @@ import { vi } from "vitest";
|
||||
import { arrayToMap } from "../utils";
|
||||
import type { GlobalPoint } from "../../math";
|
||||
import { pointCenter, pointFrom } from "../../math";
|
||||
import { wrapText } from "../element/textWrapping";
|
||||
|
||||
const renderInteractiveScene = vi.spyOn(
|
||||
InteractiveCanvas,
|
||||
|
@ -106,6 +106,11 @@ export type BinaryFileData = {
|
||||
* Epoch timestamp in milliseconds.
|
||||
*/
|
||||
lastRetrieved?: number;
|
||||
/**
|
||||
* indicates the version of the file. This can be used to determine whether
|
||||
* the file dataURL has changed e.g. as part of restore due to schema update.
|
||||
*/
|
||||
version?: number;
|
||||
};
|
||||
|
||||
export type BinaryFileMetadata = Omit<BinaryFileData, "dataURL">;
|
||||
|
@ -167,10 +167,12 @@ export const exportToSvg = async ({
|
||||
renderEmbeddables,
|
||||
exportingFrame,
|
||||
skipInliningFonts,
|
||||
reuseImages,
|
||||
}: Omit<ExportOpts, "getDimensions"> & {
|
||||
exportPadding?: number;
|
||||
renderEmbeddables?: boolean;
|
||||
skipInliningFonts?: true;
|
||||
reuseImages?: boolean;
|
||||
}): Promise<SVGSVGElement> => {
|
||||
const { elements: restoredElements, appState: restoredAppState } = restore(
|
||||
{ elements, appState },
|
||||
@ -187,6 +189,7 @@ export const exportToSvg = async ({
|
||||
exportingFrame,
|
||||
renderEmbeddables,
|
||||
skipInliningFonts,
|
||||
reuseImages,
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -27,7 +27,7 @@ describe("embedding scene data", () => {
|
||||
|
||||
const svg = svgNode.outerHTML;
|
||||
|
||||
const parsedString = await decodeSvgMetadata({ svg });
|
||||
const parsedString = decodeSvgMetadata({ svg });
|
||||
const importedData: ImportedDataState = JSON.parse(parsedString);
|
||||
|
||||
expect(sourceElements.map((x) => x.id)).toEqual(
|
||||
|
@ -1,74 +0,0 @@
|
||||
{
|
||||
"short_name": "Excalidraw",
|
||||
"name": "Excalidraw",
|
||||
"description": "Excalidraw is a whiteboard tool that lets you easily sketch diagrams that have a hand-drawn feel to them.",
|
||||
"icons": [
|
||||
{
|
||||
"src": "android-chrome-192x192.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "apple-touch-icon.png",
|
||||
"type": "image/png",
|
||||
"sizes": "180x180"
|
||||
}
|
||||
],
|
||||
"start_url": "/",
|
||||
"display": "standalone",
|
||||
"theme_color": "#000000",
|
||||
"background_color": "#ffffff",
|
||||
"file_handlers": [
|
||||
{
|
||||
"action": "/",
|
||||
"accept": {
|
||||
"application/vnd.excalidraw+json": [".excalidraw"]
|
||||
}
|
||||
}
|
||||
],
|
||||
"share_target": {
|
||||
"action": "/web-share-target",
|
||||
"method": "POST",
|
||||
"enctype": "multipart/form-data",
|
||||
"params": {
|
||||
"files": [
|
||||
{
|
||||
"name": "file",
|
||||
"accept": ["application/vnd.excalidraw+json", "application/json", ".excalidraw"]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"screenshots": [
|
||||
{
|
||||
"src": "/screenshots/virtual-whiteboard.png",
|
||||
"type": "image/png",
|
||||
"sizes": "462x945"
|
||||
},
|
||||
{
|
||||
"src": "/screenshots/wireframe.png",
|
||||
"type": "image/png",
|
||||
"sizes": "462x945"
|
||||
},
|
||||
{
|
||||
"src": "/screenshots/illustration.png",
|
||||
"type": "image/png",
|
||||
"sizes": "462x945"
|
||||
},
|
||||
{
|
||||
"src": "/screenshots/shapes.png",
|
||||
"type": "image/png",
|
||||
"sizes": "462x945"
|
||||
},
|
||||
{
|
||||
"src": "/screenshots/collaboration.png",
|
||||
"type": "image/png",
|
||||
"sizes": "462x945"
|
||||
},
|
||||
{
|
||||
"src": "/screenshots/export.png",
|
||||
"type": "image/png",
|
||||
"sizes": "462x945"
|
||||
}
|
||||
]
|
||||
}
|
@ -4,6 +4,10 @@ Disallow:
|
||||
User-agent: facebookexternalhit
|
||||
Disallow:
|
||||
|
||||
user-agent: *
|
||||
User-agent: *
|
||||
Allow: /$
|
||||
Allow: /
|
||||
Allow: /sitemap.xml
|
||||
Disallow: /
|
||||
|
||||
Sitemap: https://excalidraw.com/sitemap.xml
|
||||
|
42
yarn.lock
42
yarn.lock
@ -4819,9 +4819,9 @@ cross-fetch@3.1.5:
|
||||
node-fetch "2.6.7"
|
||||
|
||||
cross-spawn@^7.0.0, cross-spawn@^7.0.1, cross-spawn@^7.0.2, cross-spawn@^7.0.3:
|
||||
version "7.0.3"
|
||||
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
|
||||
integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
|
||||
version "7.0.6"
|
||||
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f"
|
||||
integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==
|
||||
dependencies:
|
||||
path-key "^3.1.0"
|
||||
shebang-command "^2.0.0"
|
||||
@ -9638,7 +9638,7 @@ string-natural-compare@^3.0.1:
|
||||
resolved "https://registry.yarnpkg.com/string-natural-compare/-/string-natural-compare-3.0.1.tgz#7a42d58474454963759e8e8b7ae63d71c1e7fdf4"
|
||||
integrity sha512-n3sPwynL1nwKi3WJ6AIsClwBMa0zTi54fn2oLU6ndfTSIO05xaznjSf15PcBZU6FNWbmN5Q6cxT4V5hGvB4taw==
|
||||
|
||||
"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.2.3:
|
||||
"string-width-cjs@npm:string-width@^4.2.0":
|
||||
version "4.2.3"
|
||||
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
|
||||
integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
|
||||
@ -9656,6 +9656,15 @@ string-width@^4.1.0, string-width@^4.2.0:
|
||||
is-fullwidth-code-point "^3.0.0"
|
||||
strip-ansi "^6.0.0"
|
||||
|
||||
string-width@^4.2.3:
|
||||
version "4.2.3"
|
||||
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
|
||||
integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
|
||||
dependencies:
|
||||
emoji-regex "^8.0.0"
|
||||
is-fullwidth-code-point "^3.0.0"
|
||||
strip-ansi "^6.0.1"
|
||||
|
||||
string-width@^5.0.0, string-width@^5.0.1, string-width@^5.1.2:
|
||||
version "5.1.2"
|
||||
resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794"
|
||||
@ -9727,7 +9736,14 @@ stringify-object@^3.3.0:
|
||||
is-obj "^1.0.1"
|
||||
is-regexp "^1.0.0"
|
||||
|
||||
"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@6.0.1, strip-ansi@^3.0.0, strip-ansi@^6.0.0, strip-ansi@^6.0.1, strip-ansi@^7.0.1:
|
||||
"strip-ansi-cjs@npm:strip-ansi@^6.0.1":
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
|
||||
integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
|
||||
dependencies:
|
||||
ansi-regex "^5.0.1"
|
||||
|
||||
strip-ansi@6.0.1, strip-ansi@^3.0.0, strip-ansi@^6.0.0, strip-ansi@^6.0.1, strip-ansi@^7.0.1:
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
|
||||
integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
|
||||
@ -10388,6 +10404,11 @@ vite-plugin-pwa@0.17.4:
|
||||
workbox-build "^7.0.0"
|
||||
workbox-window "^7.0.0"
|
||||
|
||||
vite-plugin-sitemap@0.7.1:
|
||||
version "0.7.1"
|
||||
resolved "https://registry.yarnpkg.com/vite-plugin-sitemap/-/vite-plugin-sitemap-0.7.1.tgz#3b3cf6d2e067ec835f810e5eaaacf05485c819fe"
|
||||
integrity sha512-4NRTkiWytLuAmcikckrLcLl9iYA20+5v6l8XshcOrzxH1WR8H0O3S6sTQYfjMrE8su/LG6Y0cTodvOdcOIxaLw==
|
||||
|
||||
vite-plugin-svgr@4.2.0:
|
||||
version "4.2.0"
|
||||
resolved "https://registry.yarnpkg.com/vite-plugin-svgr/-/vite-plugin-svgr-4.2.0.tgz#9f3bf5206b0ec510287e56d16f1915e729bb4e6b"
|
||||
@ -10959,7 +10980,7 @@ workbox-window@7.1.0, workbox-window@^7.0.0:
|
||||
"@types/trusted-types" "^2.0.2"
|
||||
workbox-core "7.1.0"
|
||||
|
||||
"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0:
|
||||
"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0":
|
||||
version "7.0.0"
|
||||
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
|
||||
integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==
|
||||
@ -10977,6 +10998,15 @@ wrap-ansi@^6.2.0:
|
||||
string-width "^4.1.0"
|
||||
strip-ansi "^6.0.0"
|
||||
|
||||
wrap-ansi@^7.0.0:
|
||||
version "7.0.0"
|
||||
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
|
||||
integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==
|
||||
dependencies:
|
||||
ansi-styles "^4.0.0"
|
||||
string-width "^4.1.0"
|
||||
strip-ansi "^6.0.0"
|
||||
|
||||
wrap-ansi@^8.1.0:
|
||||
version "8.1.0"
|
||||
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214"
|
||||
|
Reference in New Issue
Block a user