[web] Refactoring in preparation of introducing a cgroup sync (#3156)
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
import log from "@/base/log";
|
||||
import {
|
||||
decryptPublicMagicMetadata,
|
||||
getUICreationDate,
|
||||
fileCreationPhotoDate,
|
||||
updateRemotePublicMagicMetadata,
|
||||
type ParsedMetadataDate,
|
||||
} from "@/media/file-metadata";
|
||||
@@ -352,11 +352,11 @@ const updateEnteFileDate = async (
|
||||
|
||||
if (!newDate) return;
|
||||
|
||||
const existingUIDate = getUICreationDate(
|
||||
const existingDate = fileCreationPhotoDate(
|
||||
enteFile,
|
||||
await decryptPublicMagicMetadata(enteFile),
|
||||
);
|
||||
if (newDate.timestamp == existingUIDate.getTime()) return;
|
||||
if (newDate.timestamp == existingDate.getTime()) return;
|
||||
|
||||
await updateRemotePublicMagicMetadata(enteFile, {
|
||||
dateTime: newDate.dateTime,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { haveWindow } from "@/base/env";
|
||||
import { type Location } from "@/base/types";
|
||||
import { styled } from "@mui/material";
|
||||
import { useEffect, useRef } from "react";
|
||||
import { MapButton } from "./MapButton";
|
||||
@@ -29,7 +30,7 @@ const MapBoxEnableContainer = styled(MapBoxContainer)`
|
||||
`;
|
||||
|
||||
interface MapBoxProps {
|
||||
location: { latitude: number; longitude: number };
|
||||
location: Location;
|
||||
mapEnabled: boolean;
|
||||
openUpdateMapConfirmationDialog: () => void;
|
||||
}
|
||||
|
||||
@@ -3,9 +3,11 @@ import { Titlebar } from "@/base/components/Titlebar";
|
||||
import { EllipsizedTypography } from "@/base/components/Typography";
|
||||
import { nameAndExtension } from "@/base/file";
|
||||
import log from "@/base/log";
|
||||
import type { Location } from "@/base/types";
|
||||
import type { ParsedMetadata } from "@/media/file-metadata";
|
||||
import {
|
||||
getUICreationDate,
|
||||
fileCreationPhotoDate,
|
||||
fileLocation,
|
||||
updateRemotePublicMagicMetadata,
|
||||
type ParsedMetadataDate,
|
||||
} from "@/media/file-metadata";
|
||||
@@ -97,16 +99,9 @@ export const FileInfo: React.FC<FileInfoProps> = ({
|
||||
const [openRawExif, setOpenRawExif] = useState(false);
|
||||
|
||||
const location = useMemo(() => {
|
||||
if (file && file.metadata) {
|
||||
if (
|
||||
(file.metadata.latitude || file.metadata.latitude === 0) &&
|
||||
!(file.metadata.longitude === 0 && file.metadata.latitude === 0)
|
||||
) {
|
||||
return {
|
||||
latitude: file.metadata.latitude,
|
||||
longitude: file.metadata.longitude,
|
||||
};
|
||||
}
|
||||
if (file) {
|
||||
const location = fileLocation(file);
|
||||
if (location) return location;
|
||||
}
|
||||
return exif?.parsed?.location;
|
||||
}, [file, exif]);
|
||||
@@ -181,7 +176,7 @@ export const FileInfo: React.FC<FileInfoProps> = ({
|
||||
!mapEnabled ||
|
||||
publicCollectionGalleryContext.accessedThroughSharedURL ? (
|
||||
<Link
|
||||
href={getOpenStreetMapLink(location)}
|
||||
href={openStreetMapLink(location)}
|
||||
target="_blank"
|
||||
rel="noopener"
|
||||
sx={{ fontWeight: "bold" }}
|
||||
@@ -205,7 +200,7 @@ export const FileInfo: React.FC<FileInfoProps> = ({
|
||||
}
|
||||
customEndButton={
|
||||
<CopyButton
|
||||
code={getOpenStreetMapLink(location)}
|
||||
code={openStreetMapLink(location)}
|
||||
color="secondary"
|
||||
size="medium"
|
||||
/>
|
||||
@@ -367,7 +362,7 @@ export const CreationTime: React.FC<CreationTimeProps> = ({
|
||||
const closeEditMode = () => setIsInEditMode(false);
|
||||
|
||||
const publicMagicMetadata = getPublicMagicMetadataSync(enteFile);
|
||||
const originalDate = getUICreationDate(enteFile, publicMagicMetadata);
|
||||
const originalDate = fileCreationPhotoDate(enteFile, publicMagicMetadata);
|
||||
|
||||
const saveEdits = async (pickedTime: ParsedMetadataDate) => {
|
||||
try {
|
||||
@@ -531,11 +526,8 @@ const BasicDeviceCamera: React.FC<{ parsedExif: ExifInfo }> = ({
|
||||
);
|
||||
};
|
||||
|
||||
const getOpenStreetMapLink = (location: {
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
}) =>
|
||||
`https://www.openstreetmap.org/?mlat=${location.latitude}&mlon=${location.longitude}#map=15/${location.latitude}/${location.longitude}`;
|
||||
const openStreetMapLink = ({ latitude, longitude }: Location) =>
|
||||
`https://www.openstreetmap.org/?mlat=${latitude}&mlon=${longitude}#map=15/${latitude}/${longitude}`;
|
||||
|
||||
interface RawExifProps {
|
||||
open: boolean;
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { Collection } from "types/collection";
|
||||
import { SearchBarMobile } from "./searchBarMobile";
|
||||
|
||||
import { UpdateSearch } from "@/new/photos/services/search/types";
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import { UpdateSearch } from "types/search";
|
||||
import SearchInput from "./searchInput";
|
||||
import { SearchBarWrapper } from "./styledComponents";
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { PeopleList } from "@/new/photos/components/PeopleList";
|
||||
import { isMLEnabled } from "@/new/photos/services/ml";
|
||||
import { Suggestion, SuggestionType } from "@/new/photos/services/search/types";
|
||||
import { Row } from "@ente/shared/components/Container";
|
||||
import { Box, styled } from "@mui/material";
|
||||
import { t } from "i18next";
|
||||
import { components } from "react-select";
|
||||
import { Suggestion, SuggestionType } from "types/search";
|
||||
|
||||
const { Menu } = components;
|
||||
|
||||
|
||||
@@ -1,9 +1,18 @@
|
||||
import { FileType } from "@/media/file-type";
|
||||
import { isMLEnabled } from "@/new/photos/services/ml";
|
||||
import type {
|
||||
City,
|
||||
SearchDateComponents,
|
||||
SearchPerson,
|
||||
} from "@/new/photos/services/search/types";
|
||||
import {
|
||||
ClipSearchScores,
|
||||
SearchOption,
|
||||
SearchQuery,
|
||||
SuggestionType,
|
||||
UpdateSearch,
|
||||
} from "@/new/photos/services/search/types";
|
||||
import type { LocationTag } from "@/new/photos/services/user-entity";
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import CloseIcon from "@mui/icons-material/Close";
|
||||
import { IconButton } from "@mui/material";
|
||||
@@ -15,20 +24,11 @@ import { useCallback, useContext, useEffect, useRef, useState } from "react";
|
||||
import { components } from "react-select";
|
||||
import AsyncSelect from "react-select/async";
|
||||
import { InputActionMeta } from "react-select/src/types";
|
||||
import { City } from "services/locationSearchService";
|
||||
import {
|
||||
getAutoCompleteSuggestions,
|
||||
getDefaultOptions,
|
||||
} from "services/searchService";
|
||||
import { Collection } from "types/collection";
|
||||
import { LocationTagData } from "types/entity";
|
||||
import {
|
||||
ClipSearchScores,
|
||||
Search,
|
||||
SearchOption,
|
||||
SuggestionType,
|
||||
UpdateSearch,
|
||||
} from "types/search";
|
||||
import { SelectStyles } from "../../../../styles/search";
|
||||
import { SearchInputWrapper } from "../styledComponents";
|
||||
import MenuWithPeople from "./MenuWithPeople";
|
||||
@@ -116,7 +116,7 @@ export default function SearchInput(props: Iprops) {
|
||||
if (!selectedOption) {
|
||||
return;
|
||||
}
|
||||
let search: Search;
|
||||
let search: SearchQuery;
|
||||
switch (selectedOption.type) {
|
||||
case SuggestionType.DATE:
|
||||
search = {
|
||||
@@ -126,7 +126,7 @@ export default function SearchInput(props: Iprops) {
|
||||
break;
|
||||
case SuggestionType.LOCATION:
|
||||
search = {
|
||||
location: selectedOption.value as LocationTagData,
|
||||
location: selectedOption.value as LocationTag,
|
||||
};
|
||||
props.setIsOpen(true);
|
||||
break;
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { SearchOption } from "@/new/photos/services/search/types";
|
||||
import {
|
||||
FreeFlowText,
|
||||
SpaceBetweenFlex,
|
||||
@@ -6,7 +7,6 @@ import { Box, Divider, Stack, Typography } from "@mui/material";
|
||||
import CollectionCard from "components/Collections/CollectionCard";
|
||||
import { ResultPreviewTile } from "components/Collections/styledComponents";
|
||||
import { t } from "i18next";
|
||||
import { SearchOption } from "types/search";
|
||||
|
||||
import { components } from "react-select";
|
||||
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
import {
|
||||
SearchOption,
|
||||
SuggestionType,
|
||||
} from "@/new/photos/services/search/types";
|
||||
import { FlexWrapper } from "@ente/shared/components/Container";
|
||||
import CalendarIcon from "@mui/icons-material/CalendarMonth";
|
||||
import FolderIcon from "@mui/icons-material/Folder";
|
||||
@@ -7,7 +11,6 @@ import SearchIcon from "@mui/icons-material/SearchOutlined";
|
||||
import { Box } from "@mui/material";
|
||||
import { components } from "react-select";
|
||||
import { SelectComponents } from "react-select/src/components";
|
||||
import { SearchOption, SuggestionType } from "types/search";
|
||||
|
||||
const { ValueContainer } = components;
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { SearchResultSummary } from "@/new/photos/services/search/types";
|
||||
import { Typography } from "@mui/material";
|
||||
import { CollectionInfo } from "components/Collections/CollectionInfo";
|
||||
import { CollectionInfoBarWrapper } from "components/Collections/styledComponents";
|
||||
import { t } from "i18next";
|
||||
import { SearchResultSummary } from "types/search";
|
||||
|
||||
interface Iprops {
|
||||
searchResultSummary: SearchResultSummary;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { NavbarBase } from "@/base/components/Navbar";
|
||||
import { UpdateSearch } from "@/new/photos/services/search/types";
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import { FlexWrapper, HorizontalFlex } from "@ente/shared/components/Container";
|
||||
import ArrowBack from "@mui/icons-material/ArrowBack";
|
||||
@@ -8,7 +9,6 @@ import SearchBar from "components/Search/SearchBar";
|
||||
import UploadButton from "components/Upload/UploadButton";
|
||||
import { t } from "i18next";
|
||||
import { Collection } from "types/collection";
|
||||
import { UpdateSearch } from "types/search";
|
||||
|
||||
interface Iprops {
|
||||
openSidebar: () => void;
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
} from "@/base/log-web";
|
||||
import { AppUpdate } from "@/base/types/ipc";
|
||||
import DownloadManager from "@/new/photos/services/download";
|
||||
import { runMigrations } from "@/new/photos/services/migrations";
|
||||
import { initML, isMLSupported } from "@/new/photos/services/ml";
|
||||
import { ensure } from "@/utils/ensure";
|
||||
import { Overlay } from "@ente/shared/components/Container";
|
||||
@@ -141,6 +142,7 @@ export default function App({ Component, pageProps }: AppProps) {
|
||||
logStartupBanner(user?.id);
|
||||
HTTPService.setHeaders({ "X-Client-Package": clientPackageName });
|
||||
logUnhandledErrorsAndRejections(true);
|
||||
void runMigrations();
|
||||
return () => logUnhandledErrorsAndRejections(false);
|
||||
}, []);
|
||||
|
||||
|
||||
@@ -7,6 +7,12 @@ import {
|
||||
getLocalTrashedFiles,
|
||||
} from "@/new/photos/services/files";
|
||||
import { wipHasSwitchedOnceCmpAndSet } from "@/new/photos/services/ml";
|
||||
import { search, setSearchableFiles } from "@/new/photos/services/search";
|
||||
import {
|
||||
SearchQuery,
|
||||
SearchResultSummary,
|
||||
UpdateSearch,
|
||||
} from "@/new/photos/services/search/types";
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import { mergeMetadata } from "@/new/photos/utils/file";
|
||||
import { CenteredFlex } from "@ente/shared/components/Container";
|
||||
@@ -31,7 +37,6 @@ import {
|
||||
getKey,
|
||||
} from "@ente/shared/storage/sessionStorage";
|
||||
import type { User } from "@ente/shared/user/types";
|
||||
import { isPromise } from "@ente/shared/utils";
|
||||
import { Typography, styled } from "@mui/material";
|
||||
import AuthenticateUserModal from "components/AuthenticateUserModal";
|
||||
import Collections from "components/Collections";
|
||||
@@ -94,7 +99,6 @@ import {
|
||||
getSectionSummaries,
|
||||
} from "services/collectionService";
|
||||
import { syncFiles } from "services/fileService";
|
||||
import locationSearchService from "services/locationSearchService";
|
||||
import { sync, triggerPreFileInfoSync } from "services/sync";
|
||||
import { syncTrash } from "services/trashService";
|
||||
import uploadManager from "services/upload/uploadManager";
|
||||
@@ -106,7 +110,6 @@ import {
|
||||
SetFilesDownloadProgressAttributes,
|
||||
SetFilesDownloadProgressAttributesCreator,
|
||||
} from "types/gallery";
|
||||
import { Search, SearchResultSummary, UpdateSearch } from "types/search";
|
||||
import { FamilyData } from "types/user";
|
||||
import { checkSubscriptionPurchase } from "utils/billing";
|
||||
import {
|
||||
@@ -119,7 +122,6 @@ import {
|
||||
hasNonSystemCollections,
|
||||
splitNormalAndHiddenCollections,
|
||||
} from "utils/collection";
|
||||
import ComlinkSearchWorker from "utils/comlink/ComlinkSearchWorker";
|
||||
import {
|
||||
FILE_OPS_TYPE,
|
||||
constructFileToCollectionMap,
|
||||
@@ -193,7 +195,7 @@ export default function Gallery() {
|
||||
const [collectionNamerAttributes, setCollectionNamerAttributes] =
|
||||
useState<CollectionNamerAttributes>(null);
|
||||
const [collectionNamerView, setCollectionNamerView] = useState(false);
|
||||
const [search, setSearch] = useState<Search>(null);
|
||||
const [searchQuery, setSearchQuery] = useState<SearchQuery>(null);
|
||||
const [shouldDisableDropzone, setShouldDisableDropzone] = useState(false);
|
||||
const [isPhotoSwipeOpen, setIsPhotoSwipeOpen] = useState(false);
|
||||
// TODO(MR): This is never true currently, this is the WIP ability to show
|
||||
@@ -384,7 +386,6 @@ export default function Gallery() {
|
||||
setIsFirstLoad(false);
|
||||
setJustSignedUp(false);
|
||||
setIsFirstFetch(false);
|
||||
locationSearchService.loadCities();
|
||||
syncInterval.current = setInterval(() => {
|
||||
syncWithRemote(false, true);
|
||||
}, SYNC_INTERVAL_IN_MICROSECONDS);
|
||||
@@ -400,13 +401,7 @@ export default function Gallery() {
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffectSingleThreaded(
|
||||
async ([files]: [files: EnteFile[]]) => {
|
||||
const searchWorker = await ComlinkSearchWorker.getInstance();
|
||||
await searchWorker.setFiles(files);
|
||||
},
|
||||
[files],
|
||||
);
|
||||
useEffect(() => setSearchableFiles(files), [files]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!user || !files || !collections || !hiddenFiles || !trashedFiles) {
|
||||
@@ -523,11 +518,9 @@ export default function Gallery() {
|
||||
]);
|
||||
}
|
||||
|
||||
const searchWorker = await ComlinkSearchWorker.getInstance();
|
||||
|
||||
let filteredFiles: EnteFile[] = [];
|
||||
if (isInSearchMode) {
|
||||
filteredFiles = getUniqueFiles(await searchWorker.search(search));
|
||||
filteredFiles = getUniqueFiles(await search(searchQuery));
|
||||
} else {
|
||||
filteredFiles = getUniqueFiles(
|
||||
(isInHiddenSection ? hiddenFiles : files).filter((item) => {
|
||||
@@ -587,9 +580,9 @@ export default function Gallery() {
|
||||
}),
|
||||
);
|
||||
}
|
||||
if (search?.clip) {
|
||||
if (searchQuery?.clip) {
|
||||
return filteredFiles.sort((a, b) => {
|
||||
return search.clip.get(b.id) - search.clip.get(a.id);
|
||||
return searchQuery.clip.get(b.id) - searchQuery.clip.get(a.id);
|
||||
});
|
||||
}
|
||||
const sortAsc = activeCollection?.pubMagicMetadata?.data?.asc ?? false;
|
||||
@@ -605,7 +598,7 @@ export default function Gallery() {
|
||||
tempDeletedFileIds,
|
||||
tempHiddenFileIds,
|
||||
hiddenFileIds,
|
||||
search,
|
||||
searchQuery,
|
||||
activeCollectionID,
|
||||
archivedCollections,
|
||||
]);
|
||||
@@ -975,7 +968,7 @@ export default function Gallery() {
|
||||
if (newSearch?.collection) {
|
||||
setActiveCollectionID(newSearch?.collection);
|
||||
} else {
|
||||
setSearch(newSearch);
|
||||
setSearchQuery(newSearch);
|
||||
}
|
||||
setIsClipSearchResult(!!newSearch?.clip);
|
||||
if (!newSearch?.collection) {
|
||||
@@ -1243,37 +1236,6 @@ export default function Gallery() {
|
||||
);
|
||||
}
|
||||
|
||||
// useEffectSingleThreaded is a useEffect that will only run one at a time, and will
|
||||
// caches the latest deps of requests that come in while it is running, and will
|
||||
// run that after the current run is complete.
|
||||
function useEffectSingleThreaded(
|
||||
fn: (deps) => void | Promise<void>,
|
||||
deps: any[],
|
||||
): void {
|
||||
const updateInProgress = useRef(false);
|
||||
const nextRequestDepsRef = useRef<any[]>(null);
|
||||
useEffect(() => {
|
||||
const main = async (deps) => {
|
||||
if (updateInProgress.current) {
|
||||
nextRequestDepsRef.current = deps;
|
||||
return;
|
||||
}
|
||||
updateInProgress.current = true;
|
||||
const result = fn(deps);
|
||||
if (isPromise(result)) {
|
||||
await result;
|
||||
}
|
||||
updateInProgress.current = false;
|
||||
if (nextRequestDepsRef.current) {
|
||||
const deps = nextRequestDepsRef.current;
|
||||
nextRequestDepsRef.current = null;
|
||||
setTimeout(() => main(deps), 0);
|
||||
}
|
||||
};
|
||||
main(deps);
|
||||
}, deps);
|
||||
}
|
||||
|
||||
/**
|
||||
* Preload all three variants of a responsive image.
|
||||
*/
|
||||
|
||||
@@ -1,193 +0,0 @@
|
||||
import { decryptMetadataJSON, sharedCryptoWorker } from "@/base/crypto";
|
||||
import log from "@/base/log";
|
||||
import { apiURL } from "@/base/origins";
|
||||
import HTTPService from "@ente/shared/network/HTTPService";
|
||||
import localForage from "@ente/shared/storage/localForage";
|
||||
import { getToken } from "@ente/shared/storage/localStorage/helpers";
|
||||
import { getActualKey } from "@ente/shared/user";
|
||||
import {
|
||||
EncryptedEntity,
|
||||
EncryptedEntityKey,
|
||||
Entity,
|
||||
EntityKey,
|
||||
EntitySyncDiffResponse,
|
||||
EntityType,
|
||||
} from "types/entity";
|
||||
import { getLatestVersionEntities } from "utils/entity";
|
||||
|
||||
const DIFF_LIMIT = 500;
|
||||
|
||||
const ENTITY_TABLES: Record<EntityType, string> = {
|
||||
[EntityType.LOCATION_TAG]: "location_tags",
|
||||
};
|
||||
|
||||
const ENTITY_KEY_TABLES: Record<EntityType, string> = {
|
||||
[EntityType.LOCATION_TAG]: "location_tags_key",
|
||||
};
|
||||
|
||||
const ENTITY_SYNC_TIME_TABLES: Record<EntityType, string> = {
|
||||
[EntityType.LOCATION_TAG]: "location_tags_time",
|
||||
};
|
||||
|
||||
const getLocalEntity = async <T>(type: EntityType) => {
|
||||
const entities: Array<Entity<T>> =
|
||||
(await localForage.getItem<Entity<T>[]>(ENTITY_TABLES[type])) || [];
|
||||
return entities;
|
||||
};
|
||||
|
||||
const getEntityLastSyncTime = async (type: EntityType) => {
|
||||
return (
|
||||
(await localForage.getItem<number>(ENTITY_SYNC_TIME_TABLES[type])) ?? 0
|
||||
);
|
||||
};
|
||||
|
||||
const getCachedEntityKey = async (type: EntityType) => {
|
||||
const entityKey: EntityKey =
|
||||
(await localForage.getItem<EntityKey>(ENTITY_KEY_TABLES[type])) || null;
|
||||
return entityKey;
|
||||
};
|
||||
|
||||
// TODO: unexport
|
||||
export const getEntityKey = async (type: EntityType) => {
|
||||
try {
|
||||
const entityKey = await getCachedEntityKey(type);
|
||||
if (entityKey) {
|
||||
return entityKey;
|
||||
}
|
||||
const token = getToken();
|
||||
if (!token) {
|
||||
return;
|
||||
}
|
||||
const resp = await HTTPService.get(
|
||||
await apiURL("/user-entity/key"),
|
||||
{
|
||||
type,
|
||||
},
|
||||
{
|
||||
"X-Auth-Token": token,
|
||||
},
|
||||
);
|
||||
const encryptedEntityKey: EncryptedEntityKey = resp.data;
|
||||
const worker = await sharedCryptoWorker();
|
||||
const masterKey = await getActualKey();
|
||||
const { encryptedKey, header, ...rest } = encryptedEntityKey;
|
||||
const decryptedData = await worker.decryptB64(
|
||||
encryptedKey,
|
||||
header,
|
||||
masterKey,
|
||||
);
|
||||
const decryptedEntityKey: EntityKey = { data: decryptedData, ...rest };
|
||||
localForage.setItem(ENTITY_KEY_TABLES[type], decryptedEntityKey);
|
||||
return decryptedEntityKey;
|
||||
} catch (e) {
|
||||
log.error("Get entity key failed", e);
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
export const getLatestEntities = async <T>(type: EntityType) => {
|
||||
try {
|
||||
await syncEntity<T>(type);
|
||||
return await getLocalEntity<T>(type);
|
||||
} catch (e) {
|
||||
log.error("Sync entities failed", e);
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
export const syncEntities = async () => {
|
||||
try {
|
||||
await syncEntity(EntityType.LOCATION_TAG);
|
||||
} catch (e) {
|
||||
log.error("Sync entities failed", e);
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
|
||||
const syncEntity = async <T>(type: EntityType): Promise<Entity<T>> => {
|
||||
try {
|
||||
let entities = await getLocalEntity(type);
|
||||
log.info(
|
||||
`Syncing ${type} entities localEntitiesCount: ${entities.length}`,
|
||||
);
|
||||
let syncTime = await getEntityLastSyncTime(type);
|
||||
log.info(`Syncing ${type} entities syncTime: ${syncTime}`);
|
||||
let response: EntitySyncDiffResponse;
|
||||
do {
|
||||
response = await getEntityDiff(type, syncTime);
|
||||
if (!response.diff?.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
const entityKey = await getEntityKey(type);
|
||||
// @ts-expect-error TODO: Need to use zod here.
|
||||
const newDecryptedEntities: Array<Entity<T>> = await Promise.all(
|
||||
response.diff.map(async (entity: EncryptedEntity) => {
|
||||
if (entity.isDeleted) {
|
||||
// This entry is deleted, so we don't need to decrypt it, just return it as is
|
||||
// as unknown as EntityData is a hack to get around the type system
|
||||
return entity as unknown as Entity<T>;
|
||||
}
|
||||
const { encryptedData, header, ...rest } = entity;
|
||||
const decryptedData = await decryptMetadataJSON({
|
||||
encryptedDataB64: encryptedData,
|
||||
decryptionHeaderB64: header,
|
||||
keyB64: entityKey.data,
|
||||
});
|
||||
return {
|
||||
...rest,
|
||||
data: decryptedData,
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
entities = getLatestVersionEntities([
|
||||
...entities,
|
||||
...newDecryptedEntities,
|
||||
]);
|
||||
|
||||
const nonDeletedEntities = entities.filter(
|
||||
(entity) => !entity.isDeleted,
|
||||
);
|
||||
|
||||
if (response.diff.length) {
|
||||
syncTime = response.diff.slice(-1)[0].updatedAt;
|
||||
}
|
||||
await localForage.setItem(ENTITY_TABLES[type], nonDeletedEntities);
|
||||
await localForage.setItem(ENTITY_SYNC_TIME_TABLES[type], syncTime);
|
||||
log.info(
|
||||
`Syncing ${type} entities syncedEntitiesCount: ${nonDeletedEntities.length}`,
|
||||
);
|
||||
} while (response.diff.length === DIFF_LIMIT);
|
||||
} catch (e) {
|
||||
log.error("Sync entity failed", e);
|
||||
}
|
||||
};
|
||||
|
||||
const getEntityDiff = async (
|
||||
type: EntityType,
|
||||
time: number,
|
||||
): Promise<EntitySyncDiffResponse> => {
|
||||
try {
|
||||
const token = getToken();
|
||||
if (!token) {
|
||||
return;
|
||||
}
|
||||
const resp = await HTTPService.get(
|
||||
await apiURL("/user-entity/entity/diff"),
|
||||
{
|
||||
sinceTime: time,
|
||||
type,
|
||||
limit: DIFF_LIMIT,
|
||||
},
|
||||
{
|
||||
"X-Auth-Token": token,
|
||||
},
|
||||
);
|
||||
|
||||
return resp.data;
|
||||
} catch (e) {
|
||||
log.error("Get entity diff failed", e);
|
||||
throw e;
|
||||
}
|
||||
};
|
||||
@@ -1,6 +1,10 @@
|
||||
import { ensureElectron } from "@/base/electron";
|
||||
import log from "@/base/log";
|
||||
import type { Metadata } from "@/media/file-metadata";
|
||||
import {
|
||||
fileCreationPhotoDate,
|
||||
fileLocation,
|
||||
type Metadata,
|
||||
} from "@/media/file-metadata";
|
||||
import { FileType } from "@/media/file-type";
|
||||
import { decodeLivePhoto } from "@/media/live-photo";
|
||||
import downloadManager from "@/new/photos/services/download";
|
||||
@@ -17,12 +21,12 @@ import { writeStream } from "@/new/photos/utils/native-stream";
|
||||
import { wait } from "@/utils/promise";
|
||||
import { CustomError } from "@ente/shared/error";
|
||||
import { LS_KEYS, getData, setData } from "@ente/shared/storage/localStorage";
|
||||
import { formatDateTimeShort } from "@ente/shared/time/format";
|
||||
import type { User } from "@ente/shared/user/types";
|
||||
import QueueProcessor, {
|
||||
CancellationStatus,
|
||||
RequestCanceller,
|
||||
} from "@ente/shared/utils/queueProcessor";
|
||||
import i18n from "i18next";
|
||||
import { Collection } from "types/collection";
|
||||
import {
|
||||
CollectionExportNames,
|
||||
@@ -100,6 +104,7 @@ class ExportService {
|
||||
success: 0,
|
||||
failed: 0,
|
||||
};
|
||||
private cachedMetadataDateTimeFormatter: Intl.DateTimeFormat;
|
||||
|
||||
getExportSettings(): ExportSettings {
|
||||
try {
|
||||
@@ -1076,12 +1081,38 @@ class ExportService {
|
||||
fileExportName: string,
|
||||
file: EnteFile,
|
||||
) {
|
||||
const formatter = this.metadataDateTimeFormatter();
|
||||
await ensureElectron().fs.writeFile(
|
||||
getFileMetadataExportPath(collectionExportPath, fileExportName),
|
||||
getGoogleLikeMetadataFile(fileExportName, file),
|
||||
getGoogleLikeMetadataFile(fileExportName, file, formatter),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Lazily created, cached instance of the date time formatter that should be
|
||||
* used for formatting the dates added to the metadata file.
|
||||
*/
|
||||
private metadataDateTimeFormatter() {
|
||||
if (this.cachedMetadataDateTimeFormatter)
|
||||
return this.cachedMetadataDateTimeFormatter;
|
||||
|
||||
// AFAIK, Google's format is not documented. It also seems to vary with
|
||||
// locale. This is a best attempt at constructing a formatter that
|
||||
// mirrors the format used by the timestamps in the takeout JSON.
|
||||
const formatter = new Intl.DateTimeFormat(i18n.language, {
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
year: "numeric",
|
||||
hour: "numeric",
|
||||
minute: "numeric",
|
||||
second: "numeric",
|
||||
timeZoneName: "short",
|
||||
timeZone: "UTC",
|
||||
});
|
||||
this.cachedMetadataDateTimeFormatter = formatter;
|
||||
return formatter;
|
||||
}
|
||||
|
||||
isExportInProgress = () => {
|
||||
return this.exportInProgress;
|
||||
};
|
||||
@@ -1376,33 +1407,34 @@ const getCollectionExportedFiles = (
|
||||
return collectionExportedFiles;
|
||||
};
|
||||
|
||||
const getGoogleLikeMetadataFile = (fileExportName: string, file: EnteFile) => {
|
||||
const getGoogleLikeMetadataFile = (
|
||||
fileExportName: string,
|
||||
file: EnteFile,
|
||||
dateTimeFormatter: Intl.DateTimeFormat,
|
||||
) => {
|
||||
const metadata: Metadata = file.metadata;
|
||||
const creationTime = Math.floor(metadata.creationTime / 1000000);
|
||||
const creationTime = Math.floor(metadata.creationTime / 1e6);
|
||||
const modificationTime = Math.floor(
|
||||
(metadata.modificationTime ?? metadata.creationTime) / 1000000,
|
||||
(metadata.modificationTime ?? metadata.creationTime) / 1e6,
|
||||
);
|
||||
const captionValue: string = file?.pubMagicMetadata?.data?.caption;
|
||||
return JSON.stringify(
|
||||
{
|
||||
title: fileExportName,
|
||||
caption: captionValue,
|
||||
creationTime: {
|
||||
timestamp: creationTime,
|
||||
formatted: formatDateTimeShort(creationTime * 1000),
|
||||
},
|
||||
modificationTime: {
|
||||
timestamp: modificationTime,
|
||||
formatted: formatDateTimeShort(modificationTime * 1000),
|
||||
},
|
||||
geoData: {
|
||||
latitude: metadata.latitude,
|
||||
longitude: metadata.longitude,
|
||||
},
|
||||
const result: Record<string, unknown> = {
|
||||
title: fileExportName,
|
||||
creationTime: {
|
||||
timestamp: creationTime,
|
||||
formatted: dateTimeFormatter.format(
|
||||
fileCreationPhotoDate(file, file.pubMagicMetadata?.data),
|
||||
),
|
||||
},
|
||||
null,
|
||||
2,
|
||||
);
|
||||
modificationTime: {
|
||||
timestamp: modificationTime,
|
||||
formatted: dateTimeFormatter.format(modificationTime * 1000),
|
||||
},
|
||||
};
|
||||
const caption = file?.pubMagicMetadata?.data?.caption;
|
||||
if (caption) result.caption = caption;
|
||||
const geoData = fileLocation(file);
|
||||
if (geoData) result.geoData = geoData;
|
||||
return JSON.stringify(result, null, 2);
|
||||
};
|
||||
|
||||
export const getMetadataFolderExportPath = (collectionExportPath: string) =>
|
||||
|
||||
@@ -1,97 +0,0 @@
|
||||
import log from "@/base/log";
|
||||
import type { Location, LocationTagData } from "types/entity";
|
||||
|
||||
export interface City {
|
||||
city: string;
|
||||
country: string;
|
||||
lat: number;
|
||||
lng: number;
|
||||
}
|
||||
|
||||
const DEFAULT_CITY_RADIUS = 10;
|
||||
const KMS_PER_DEGREE = 111.16;
|
||||
|
||||
class LocationSearchService {
|
||||
private cities: Array<City> = [];
|
||||
private citiesPromise: Promise<void>;
|
||||
|
||||
async loadCities() {
|
||||
try {
|
||||
if (this.citiesPromise) {
|
||||
return;
|
||||
}
|
||||
this.citiesPromise = fetch(
|
||||
"https://static.ente.io/world_cities.json",
|
||||
).then((response) => {
|
||||
return response.json().then((data) => {
|
||||
this.cities = data["data"];
|
||||
});
|
||||
});
|
||||
await this.citiesPromise;
|
||||
} catch (e) {
|
||||
log.error("LocationSearchService loadCities failed", e);
|
||||
this.citiesPromise = null;
|
||||
}
|
||||
}
|
||||
|
||||
async searchCities(searchTerm: string) {
|
||||
try {
|
||||
if (!this.citiesPromise) {
|
||||
this.loadCities();
|
||||
}
|
||||
await this.citiesPromise;
|
||||
return this.cities.filter((city) => {
|
||||
return city.city
|
||||
.toLowerCase()
|
||||
.startsWith(searchTerm.toLowerCase());
|
||||
});
|
||||
} catch (e) {
|
||||
log.error("LocationSearchService searchCities failed", e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new LocationSearchService();
|
||||
|
||||
export function isInsideLocationTag(
|
||||
location: Location,
|
||||
locationTag: LocationTagData,
|
||||
) {
|
||||
return isLocationCloseToPoint(
|
||||
location,
|
||||
locationTag.centerPoint,
|
||||
locationTag.radius,
|
||||
);
|
||||
}
|
||||
|
||||
export function isInsideCity(location: Location, city: City) {
|
||||
return isLocationCloseToPoint(
|
||||
{ latitude: city.lat, longitude: city.lng },
|
||||
location,
|
||||
DEFAULT_CITY_RADIUS,
|
||||
);
|
||||
}
|
||||
|
||||
function isLocationCloseToPoint(
|
||||
centerPoint: Location,
|
||||
location: Location,
|
||||
radius: number,
|
||||
) {
|
||||
const a = (radius * _scaleFactor(centerPoint.latitude)) / KMS_PER_DEGREE;
|
||||
const b = radius / KMS_PER_DEGREE;
|
||||
const x = centerPoint.latitude - location.latitude;
|
||||
const y = centerPoint.longitude - location.longitude;
|
||||
if ((x * x) / (a * a) + (y * y) / (b * b) <= 1) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
///The area bounded by the location tag becomes more elliptical with increase
|
||||
///in the magnitude of the latitude on the caritesian plane. When latitude is
|
||||
///0 degrees, the ellipse is a circle with a = b = r. When latitude incrases,
|
||||
///the major axis (a) has to be scaled by the secant of the latitude.
|
||||
function _scaleFactor(lat: number) {
|
||||
return 1 / Math.cos(lat * (Math.PI / 180));
|
||||
}
|
||||
@@ -8,27 +8,27 @@ import {
|
||||
mlStatusSnapshot,
|
||||
wipSearchPersons,
|
||||
} from "@/new/photos/services/ml";
|
||||
import { parseDateComponents } from "@/new/photos/services/search";
|
||||
import { createSearchQuery, search } from "@/new/photos/services/search";
|
||||
import type {
|
||||
SearchDateComponents,
|
||||
SearchPerson,
|
||||
} from "@/new/photos/services/search/types";
|
||||
import {
|
||||
City,
|
||||
ClipSearchScores,
|
||||
SearchOption,
|
||||
SearchQuery,
|
||||
Suggestion,
|
||||
SuggestionType,
|
||||
} from "@/new/photos/services/search/types";
|
||||
import type { LocationTag } from "@/new/photos/services/user-entity";
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import { t } from "i18next";
|
||||
import { Collection } from "types/collection";
|
||||
import { EntityType, LocationTag, LocationTagData } from "types/entity";
|
||||
import {
|
||||
ClipSearchScores,
|
||||
Search,
|
||||
SearchOption,
|
||||
Suggestion,
|
||||
SuggestionType,
|
||||
} from "types/search";
|
||||
import ComlinkSearchWorker from "utils/comlink/ComlinkSearchWorker";
|
||||
import { getUniqueFiles } from "utils/file";
|
||||
import { getLatestEntities } from "./entityService";
|
||||
import locationSearchService, { City } from "./locationSearchService";
|
||||
|
||||
// Suggestions shown in the search dropdown's empty state, i.e. when the user
|
||||
// selects the search bar but does not provide any input.
|
||||
export const getDefaultOptions = async () => {
|
||||
return [
|
||||
await getMLStatusSuggestion(),
|
||||
@@ -36,6 +36,7 @@ export const getDefaultOptions = async () => {
|
||||
].filter((t) => !!t);
|
||||
};
|
||||
|
||||
// Suggestions shown in the search dropdown when the user has typed something.
|
||||
export const getAutoCompleteSuggestions =
|
||||
(files: EnteFile[], collections: Collection[]) =>
|
||||
async (searchPhrase: string): Promise<SearchOption[]> => {
|
||||
@@ -47,11 +48,12 @@ export const getAutoCompleteSuggestions =
|
||||
const suggestions: Suggestion[] = [
|
||||
await getClipSuggestion(searchPhrase),
|
||||
...getFileTypeSuggestion(searchPhrase),
|
||||
...getDateSuggestion(searchPhrase),
|
||||
// The following functionality has moved to createSearchQuery
|
||||
// - getDateSuggestion(searchPhrase),
|
||||
...(await createSearchQuery(searchPhrase)),
|
||||
...getCollectionSuggestion(searchPhrase, collections),
|
||||
getFileNameSuggestion(searchPhrase, files),
|
||||
getFileCaptionSuggestion(searchPhrase, files),
|
||||
...(await getLocationSuggestions(searchPhrase)),
|
||||
].filter((suggestion) => !!suggestion);
|
||||
|
||||
return convertSuggestionsToOptions(suggestions);
|
||||
@@ -64,13 +66,10 @@ export const getAutoCompleteSuggestions =
|
||||
async function convertSuggestionsToOptions(
|
||||
suggestions: Suggestion[],
|
||||
): Promise<SearchOption[]> {
|
||||
const searchWorker = await ComlinkSearchWorker.getInstance();
|
||||
const previewImageAppendedOptions: SearchOption[] = [];
|
||||
for (const suggestion of suggestions) {
|
||||
const searchQuery = convertSuggestionToSearchQuery(suggestion);
|
||||
const resultFiles = getUniqueFiles(
|
||||
await searchWorker.search(searchQuery),
|
||||
);
|
||||
const resultFiles = getUniqueFiles(await search(searchQuery));
|
||||
if (searchQuery?.clip) {
|
||||
resultFiles.sort((a, b) => {
|
||||
const aScore = searchQuery.clip.get(a.id);
|
||||
@@ -159,13 +158,6 @@ export async function getMLStatusSuggestion(): Promise<Suggestion> {
|
||||
};
|
||||
}
|
||||
|
||||
const getDateSuggestion = (searchPhrase: string): Suggestion[] =>
|
||||
parseDateComponents(searchPhrase).map(({ components, label }) => ({
|
||||
type: SuggestionType.DATE,
|
||||
value: components,
|
||||
label,
|
||||
}));
|
||||
|
||||
function getCollectionSuggestion(
|
||||
searchPhrase: string,
|
||||
collections: Collection[],
|
||||
@@ -206,39 +198,6 @@ function getFileCaptionSuggestion(
|
||||
};
|
||||
}
|
||||
|
||||
async function getLocationSuggestions(searchPhrase: string) {
|
||||
const locationTagResults = await searchLocationTag(searchPhrase);
|
||||
const locationTagSuggestions = locationTagResults.map(
|
||||
(locationTag) =>
|
||||
({
|
||||
type: SuggestionType.LOCATION,
|
||||
value: locationTag.data,
|
||||
label: locationTag.data.name,
|
||||
}) as Suggestion,
|
||||
);
|
||||
const locationTagNames = new Set(
|
||||
locationTagSuggestions.map((result) => result.label),
|
||||
);
|
||||
|
||||
const citySearchResults =
|
||||
await locationSearchService.searchCities(searchPhrase);
|
||||
|
||||
const nonConflictingCityResult = citySearchResults.filter(
|
||||
(city) => !locationTagNames.has(city.city),
|
||||
);
|
||||
|
||||
const citySearchSuggestions = nonConflictingCityResult.map(
|
||||
(city) =>
|
||||
({
|
||||
type: SuggestionType.CITY,
|
||||
value: city,
|
||||
label: city.city,
|
||||
}) as Suggestion,
|
||||
);
|
||||
|
||||
return [...locationTagSuggestions, ...citySearchSuggestions];
|
||||
}
|
||||
|
||||
async function getClipSuggestion(
|
||||
searchPhrase: string,
|
||||
): Promise<Suggestion | undefined> {
|
||||
@@ -280,21 +239,6 @@ function searchFilesByCaption(searchPhrase: string, files: EnteFile[]) {
|
||||
);
|
||||
}
|
||||
|
||||
async function searchLocationTag(searchPhrase: string): Promise<LocationTag[]> {
|
||||
const locationTags = await getLatestEntities<LocationTagData>(
|
||||
EntityType.LOCATION_TAG,
|
||||
);
|
||||
const matchedLocationTags = locationTags.filter((locationTag) =>
|
||||
locationTag.data.name.toLowerCase().includes(searchPhrase),
|
||||
);
|
||||
if (matchedLocationTags.length > 0) {
|
||||
log.info(
|
||||
`Found ${matchedLocationTags.length} location tags for search phrase`,
|
||||
);
|
||||
}
|
||||
return matchedLocationTags;
|
||||
}
|
||||
|
||||
const searchClip = async (
|
||||
searchPhrase: string,
|
||||
): Promise<ClipSearchScores | undefined> => {
|
||||
@@ -304,7 +248,7 @@ const searchClip = async (
|
||||
return matches;
|
||||
};
|
||||
|
||||
function convertSuggestionToSearchQuery(option: Suggestion): Search {
|
||||
function convertSuggestionToSearchQuery(option: Suggestion): SearchQuery {
|
||||
switch (option.type) {
|
||||
case SuggestionType.DATE:
|
||||
return {
|
||||
@@ -313,7 +257,7 @@ function convertSuggestionToSearchQuery(option: Suggestion): Search {
|
||||
|
||||
case SuggestionType.LOCATION:
|
||||
return {
|
||||
location: option.value as LocationTagData,
|
||||
location: option.value as LocationTag,
|
||||
};
|
||||
|
||||
case SuggestionType.CITY:
|
||||
|
||||
@@ -4,7 +4,7 @@ import {
|
||||
triggerMLStatusSync,
|
||||
triggerMLSync,
|
||||
} from "@/new/photos/services/ml";
|
||||
import { syncEntities } from "services/entityService";
|
||||
import { triggerSearchDataSync } from "@/new/photos/services/search";
|
||||
import { syncMapEnabled } from "services/userService";
|
||||
|
||||
/**
|
||||
@@ -16,9 +16,14 @@ export const triggerPreFileInfoSync = () => {
|
||||
};
|
||||
|
||||
/**
|
||||
* Perform a soft "refresh" by making various API calls to fetch state from
|
||||
* remote, using it to update our local state, and triggering periodic jobs that
|
||||
* depend on the local state.
|
||||
* Sync our local state with remote on page load for web and focus for desktop.
|
||||
*
|
||||
* This function makes various API calls to fetch state from remote, using it to
|
||||
* update our local state, and triggering periodic jobs that depend on the local
|
||||
* state.
|
||||
*
|
||||
* This runs on initial page load (on both web and desktop). In addition for
|
||||
* desktop, it also runs each time the desktop app gains focus.
|
||||
*
|
||||
* TODO: This is called after we've synced the local files DBs with remote. That
|
||||
* code belongs here, but currently that state is persisted in the top level
|
||||
@@ -30,7 +35,7 @@ export const triggerPreFileInfoSync = () => {
|
||||
* before doing the file sync and thus should run immediately after login.
|
||||
*/
|
||||
export const sync = async () => {
|
||||
await syncEntities();
|
||||
await syncMapEnabled();
|
||||
await Promise.all([syncMapEnabled()]);
|
||||
triggerSearchDataSync();
|
||||
if (isMLSupported) triggerMLSync();
|
||||
};
|
||||
|
||||
@@ -3,8 +3,10 @@
|
||||
import { ensureElectron } from "@/base/electron";
|
||||
import { nameAndExtension } from "@/base/file";
|
||||
import log from "@/base/log";
|
||||
import { type Location } from "@/base/types";
|
||||
import type { UploadItem } from "@/new/photos/services/upload/types";
|
||||
import { readStream } from "@/new/photos/utils/native-stream";
|
||||
import { maybeParseInt } from "@/utils/parse";
|
||||
|
||||
/**
|
||||
* The data we read from the JSON metadata sidecar files.
|
||||
@@ -16,7 +18,7 @@ import { readStream } from "@/new/photos/utils/native-stream";
|
||||
export interface ParsedMetadataJSON {
|
||||
creationTime?: number;
|
||||
modificationTime?: number;
|
||||
location?: { latitude: number; longitude: number };
|
||||
location?: Location;
|
||||
}
|
||||
|
||||
export const MAX_FILE_NAME_LENGTH_GOOGLE_EXPORT = 46;
|
||||
@@ -112,53 +114,64 @@ const parseMetadataJSONText = (text: string) => {
|
||||
|
||||
const parsedMetadataJSON: ParsedMetadataJSON = {};
|
||||
|
||||
// The metadata provided by Google does not include the time zone where the
|
||||
// photo was taken, it only has an epoch seconds value.
|
||||
if (
|
||||
metadataJSON["photoTakenTime"] &&
|
||||
metadataJSON["photoTakenTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.creationTime =
|
||||
metadataJSON["photoTakenTime"]["timestamp"] * 1e6;
|
||||
} else if (
|
||||
metadataJSON["creationTime"] &&
|
||||
metadataJSON["creationTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.creationTime =
|
||||
metadataJSON["creationTime"]["timestamp"] * 1e6;
|
||||
}
|
||||
parsedMetadataJSON.creationTime =
|
||||
parseGTTimestamp(metadataJSON["photoTakenTime"]) ??
|
||||
parseGTTimestamp(metadataJSON["creationTime"]);
|
||||
|
||||
if (
|
||||
metadataJSON["modificationTime"] &&
|
||||
metadataJSON["modificationTime"]["timestamp"]
|
||||
) {
|
||||
parsedMetadataJSON.modificationTime =
|
||||
metadataJSON["modificationTime"]["timestamp"] * 1e6;
|
||||
}
|
||||
parsedMetadataJSON.modificationTime = parseGTTimestamp(
|
||||
metadataJSON["modificationTime"],
|
||||
);
|
||||
|
||||
if (
|
||||
metadataJSON["geoData"] &&
|
||||
(metadataJSON["geoData"]["latitude"] !== 0.0 ||
|
||||
metadataJSON["geoData"]["longitude"] !== 0.0)
|
||||
) {
|
||||
parsedMetadataJSON.location = {
|
||||
latitude: metadataJSON["geoData"]["latitude"],
|
||||
longitude: metadataJSON["geoData"]["longitude"],
|
||||
};
|
||||
} else if (
|
||||
metadataJSON["geoDataExif"] &&
|
||||
(metadataJSON["geoDataExif"]["latitude"] !== 0.0 ||
|
||||
metadataJSON["geoDataExif"]["longitude"] !== 0.0)
|
||||
) {
|
||||
parsedMetadataJSON.location = {
|
||||
latitude: metadataJSON["geoDataExif"]["latitude"],
|
||||
longitude: metadataJSON["geoDataExif"]["longitude"],
|
||||
};
|
||||
}
|
||||
parsedMetadataJSON.location =
|
||||
parseGTLocation(metadataJSON["geoData"]) ??
|
||||
parseGTLocation(metadataJSON["geoDataExif"]);
|
||||
|
||||
return parsedMetadataJSON;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse a nullish epoch seconds timestamp string from a field in a Google
|
||||
* Takeout JSON, converting it into epoch microseconds if it is found.
|
||||
*
|
||||
* Note that the metadata provided by Google does not include the time zone
|
||||
* where the photo was taken, it only has an epoch seconds value. There is an
|
||||
* associated formatted date value (e.g. "17 Feb 2021, 03:22:16 UTC") but that
|
||||
* seems to be in UTC and doesn't have the time zone either.
|
||||
*/
|
||||
const parseGTTimestamp = (o: unknown): number | undefined => {
|
||||
if (
|
||||
o &&
|
||||
typeof o == "object" &&
|
||||
"timestamp" in o &&
|
||||
typeof o.timestamp == "string"
|
||||
) {
|
||||
const timestamp = maybeParseInt(o.timestamp);
|
||||
if (timestamp) return timestamp * 1e6;
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse a (latitude, longitude) location pair field in a Google Takeout JSON.
|
||||
*
|
||||
* Apparently Google puts in (0, 0) to indicate missing data, so this function
|
||||
* only returns a parsed result if both components are present and non-zero.
|
||||
*/
|
||||
const parseGTLocation = (o: unknown): Location | undefined => {
|
||||
if (
|
||||
o &&
|
||||
typeof o == "object" &&
|
||||
"latitude" in o &&
|
||||
typeof o.latitude == "number" &&
|
||||
"longitude" in o &&
|
||||
typeof o.longitude == "number"
|
||||
) {
|
||||
const { latitude, longitude } = o;
|
||||
if (latitude !== 0 || longitude !== 0) return { latitude, longitude };
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the matching entry (if any) from {@link parsedMetadataJSONMap} for the
|
||||
* {@link fileName} and {@link collectionID} combination.
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
export enum EntityType {
|
||||
LOCATION_TAG = "location",
|
||||
}
|
||||
|
||||
export interface EncryptedEntityKey {
|
||||
userID: number;
|
||||
encryptedKey: string;
|
||||
type: EntityType;
|
||||
header: string;
|
||||
createdAt: number;
|
||||
}
|
||||
|
||||
export interface EntityKey
|
||||
extends Omit<EncryptedEntityKey, "encryptedKey" | "header"> {
|
||||
data: string;
|
||||
}
|
||||
|
||||
export interface EncryptedEntity {
|
||||
id: string;
|
||||
encryptedData: string;
|
||||
header: string;
|
||||
isDeleted: boolean;
|
||||
createdAt: number;
|
||||
updatedAt: number;
|
||||
userID: number;
|
||||
}
|
||||
|
||||
export interface Location {
|
||||
latitude: number | null;
|
||||
longitude: number | null;
|
||||
}
|
||||
|
||||
export interface LocationTagData {
|
||||
name: string;
|
||||
radius: number;
|
||||
aSquare: number;
|
||||
bSquare: number;
|
||||
centerPoint: Location;
|
||||
}
|
||||
|
||||
export type LocationTag = Entity<LocationTagData>;
|
||||
|
||||
export interface Entity<T>
|
||||
extends Omit<EncryptedEntity, "encryptedData" | "header"> {
|
||||
data: T;
|
||||
}
|
||||
|
||||
export interface EntitySyncDiffResponse {
|
||||
diff: EncryptedEntity[];
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
import { FileType } from "@/media/file-type";
|
||||
import type { MLStatus } from "@/new/photos/services/ml";
|
||||
import type {
|
||||
SearchDateComponents,
|
||||
SearchPerson,
|
||||
} from "@/new/photos/services/search/types";
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import { City } from "services/locationSearchService";
|
||||
import { LocationTagData } from "types/entity";
|
||||
|
||||
export enum SuggestionType {
|
||||
DATE = "DATE",
|
||||
LOCATION = "LOCATION",
|
||||
COLLECTION = "COLLECTION",
|
||||
FILE_NAME = "FILE_NAME",
|
||||
PERSON = "PERSON",
|
||||
INDEX_STATUS = "INDEX_STATUS",
|
||||
FILE_CAPTION = "FILE_CAPTION",
|
||||
FILE_TYPE = "FILE_TYPE",
|
||||
CLIP = "CLIP",
|
||||
CITY = "CITY",
|
||||
}
|
||||
|
||||
export interface Suggestion {
|
||||
type: SuggestionType;
|
||||
label: string;
|
||||
value:
|
||||
| SearchDateComponents
|
||||
| number[]
|
||||
| SearchPerson
|
||||
| MLStatus
|
||||
| LocationTagData
|
||||
| City
|
||||
| FileType
|
||||
| ClipSearchScores;
|
||||
hide?: boolean;
|
||||
}
|
||||
|
||||
export type Search = {
|
||||
date?: SearchDateComponents;
|
||||
location?: LocationTagData;
|
||||
city?: City;
|
||||
collection?: number;
|
||||
files?: number[];
|
||||
person?: SearchPerson;
|
||||
fileType?: FileType;
|
||||
clip?: ClipSearchScores;
|
||||
};
|
||||
|
||||
export type SearchResultSummary = {
|
||||
optionName: string;
|
||||
fileCount: number;
|
||||
};
|
||||
|
||||
export interface SearchOption extends Suggestion {
|
||||
fileCount: number;
|
||||
previewFiles: EnteFile[];
|
||||
}
|
||||
|
||||
export type UpdateSearch = (
|
||||
search: Search,
|
||||
summary: SearchResultSummary,
|
||||
) => void;
|
||||
|
||||
export type ClipSearchScores = Map<number, number>;
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
import { haveWindow } from "@/base/env";
|
||||
import { ComlinkWorker } from "@/base/worker/comlink-worker";
|
||||
import type { Remote } from "comlink";
|
||||
import { type DedicatedSearchWorker } from "worker/search.worker";
|
||||
|
||||
class ComlinkSearchWorker {
|
||||
private comlinkWorkerInstance: Remote<DedicatedSearchWorker>;
|
||||
private comlinkWorker: ComlinkWorker<typeof DedicatedSearchWorker>;
|
||||
|
||||
async getInstance() {
|
||||
if (!this.comlinkWorkerInstance) {
|
||||
if (!this.comlinkWorker)
|
||||
this.comlinkWorker = getDedicatedSearchWorker();
|
||||
this.comlinkWorkerInstance = await this.comlinkWorker.remote;
|
||||
}
|
||||
return this.comlinkWorkerInstance;
|
||||
}
|
||||
}
|
||||
|
||||
export const getDedicatedSearchWorker = () => {
|
||||
if (haveWindow()) {
|
||||
const cryptoComlinkWorker = new ComlinkWorker<
|
||||
typeof DedicatedSearchWorker
|
||||
>(
|
||||
"ente-search-worker",
|
||||
new Worker(new URL("worker/search.worker.ts", import.meta.url)),
|
||||
);
|
||||
return cryptoComlinkWorker;
|
||||
}
|
||||
};
|
||||
|
||||
export default new ComlinkSearchWorker();
|
||||
@@ -1,12 +0,0 @@
|
||||
import { Entity } from "types/entity";
|
||||
|
||||
export const getLatestVersionEntities = <T>(entities: Entity<T>[]) => {
|
||||
const latestVersionEntities = new Map<string, Entity<T>>();
|
||||
entities.forEach((entity) => {
|
||||
const existingEntity = latestVersionEntities.get(entity.id);
|
||||
if (!existingEntity || existingEntity.updatedAt < entity.updatedAt) {
|
||||
latestVersionEntities.set(entity.id, entity);
|
||||
}
|
||||
});
|
||||
return Array.from(latestVersionEntities.values());
|
||||
};
|
||||
@@ -1,88 +0,0 @@
|
||||
import { getUICreationDate } from "@/media/file-metadata";
|
||||
import type { SearchDateComponents } from "@/new/photos/services/search/types";
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import { getPublicMagicMetadataSync } from "@ente/shared/file-metadata";
|
||||
import * as Comlink from "comlink";
|
||||
import {
|
||||
isInsideCity,
|
||||
isInsideLocationTag,
|
||||
} from "services/locationSearchService";
|
||||
import { Search } from "types/search";
|
||||
|
||||
export class DedicatedSearchWorker {
|
||||
private files: EnteFile[] = [];
|
||||
|
||||
setFiles(files: EnteFile[]) {
|
||||
this.files = files;
|
||||
}
|
||||
|
||||
search(search: Search) {
|
||||
return this.files.filter((file) => {
|
||||
return isSearchedFile(file, search);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Comlink.expose(DedicatedSearchWorker, self);
|
||||
|
||||
function isSearchedFile(file: EnteFile, search: Search) {
|
||||
if (search?.collection) {
|
||||
return search.collection === file.collectionID;
|
||||
}
|
||||
|
||||
if (search?.date) {
|
||||
return isDateComponentsMatch(
|
||||
search.date,
|
||||
getUICreationDate(file, getPublicMagicMetadataSync(file)),
|
||||
);
|
||||
}
|
||||
if (search?.location) {
|
||||
return isInsideLocationTag(
|
||||
{
|
||||
latitude: file.metadata.latitude,
|
||||
longitude: file.metadata.longitude,
|
||||
},
|
||||
search.location,
|
||||
);
|
||||
}
|
||||
if (search?.city) {
|
||||
return isInsideCity(
|
||||
{
|
||||
latitude: file.metadata.latitude,
|
||||
longitude: file.metadata.longitude,
|
||||
},
|
||||
search.city,
|
||||
);
|
||||
}
|
||||
if (search?.files) {
|
||||
return search.files.indexOf(file.id) !== -1;
|
||||
}
|
||||
if (search?.person) {
|
||||
return search.person.files.indexOf(file.id) !== -1;
|
||||
}
|
||||
if (typeof search?.fileType !== "undefined") {
|
||||
return search.fileType === file.metadata.fileType;
|
||||
}
|
||||
if (typeof search?.clip !== "undefined") {
|
||||
return search.clip.has(file.id);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
const isDateComponentsMatch = (
|
||||
{ year, month, day, weekday, hour }: SearchDateComponents,
|
||||
date: Date,
|
||||
) => {
|
||||
// Components are guaranteed to have at least one attribute present, so
|
||||
// start by assuming true.
|
||||
let match = true;
|
||||
|
||||
if (year) match = date.getFullYear() == year;
|
||||
// JS getMonth is 0-indexed.
|
||||
if (match && month) match = date.getMonth() + 1 == month;
|
||||
if (match && day) match = date.getDate() == day;
|
||||
if (match && weekday) match = date.getDay() == weekday;
|
||||
if (match && hour) match = date.getHours() == hour;
|
||||
|
||||
return match;
|
||||
};
|
||||
@@ -1,4 +1,5 @@
|
||||
import type { UserVerificationResponse } from "@/accounts/types/user";
|
||||
import log from "@/base/log";
|
||||
import { ensure } from "@/utils/ensure";
|
||||
import { VerticallyCentered } from "@ente/shared/components/Container";
|
||||
import EnteSpinner from "@ente/shared/components/EnteSpinner";
|
||||
@@ -159,6 +160,7 @@ const Page: React.FC<PageProps> = ({ appContext }) => {
|
||||
setFieldError(t("EXPIRED_CODE"));
|
||||
}
|
||||
} else {
|
||||
log.error("OTT verification failed", e);
|
||||
setFieldError(`${t("UNKNOWN_ERROR")} ${JSON.stringify(e)}`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -237,9 +237,8 @@ export const clearBlobCaches = async () => {
|
||||
return isElectron() ? clearOPFSCaches() : clearWebCaches();
|
||||
};
|
||||
|
||||
const clearWebCaches = async () => {
|
||||
await Promise.allSettled(blobCacheNames.map((name) => caches.delete(name)));
|
||||
};
|
||||
const clearWebCaches = () =>
|
||||
Promise.all(blobCacheNames.map((name) => caches.delete(name)));
|
||||
|
||||
const clearOPFSCaches = async () => {
|
||||
const root = await navigator.storage.getDirectory();
|
||||
|
||||
@@ -89,10 +89,16 @@ const assertInWorker = <T>(x: T): T => {
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate a new randomly generated 256-bit key suitable for use with the *Box
|
||||
* Return a new randomly generated 256-bit key suitable for use with the *Box
|
||||
* encryption functions.
|
||||
*/
|
||||
export const generateBoxKey = libsodium.generateBoxKey;
|
||||
export const generateNewBoxKey = libsodium.generateNewBoxKey;
|
||||
|
||||
/**
|
||||
* Return a new randomly generated 256-bit key suitable for use with the *Blob
|
||||
* or *Stream encryption functions.
|
||||
*/
|
||||
export const generateNewBlobOrStreamKey = libsodium.generateNewBlobOrStreamKey;
|
||||
|
||||
/**
|
||||
* Encrypt the given data, returning a box containing the encrypted data and a
|
||||
@@ -187,7 +193,8 @@ export const encryptMetadataJSON = async (r: {
|
||||
: sharedCryptoWorker().then((w) => w.encryptMetadataJSON(r));
|
||||
|
||||
/**
|
||||
* Decrypt a box encrypted using {@link encryptBoxB64}.
|
||||
* Decrypt a box encrypted using {@link encryptBoxB64} and returns the decrypted
|
||||
* bytes.
|
||||
*/
|
||||
export const decryptBox = (box: EncryptedBox, key: BytesOrB64) =>
|
||||
inWorker()
|
||||
|
||||
@@ -134,11 +134,22 @@ const bytes = async (bob: BytesOrB64) =>
|
||||
* This returns a new randomly generated 256-bit key suitable for being used
|
||||
* with libsodium's secretbox APIs.
|
||||
*/
|
||||
export const generateBoxKey = async () => {
|
||||
export const generateNewBoxKey = async () => {
|
||||
await sodium.ready;
|
||||
return toB64(sodium.crypto_secretbox_keygen());
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate a key for use with the *Blob or *Stream encryption functions.
|
||||
*
|
||||
* This returns a new randomly generated 256-bit key suitable for being used
|
||||
* with libsodium's secretstream APIs.
|
||||
*/
|
||||
export const generateNewBlobOrStreamKey = async () => {
|
||||
await sodium.ready;
|
||||
return toB64(sodium.crypto_secretstream_xchacha20poly1305_keygen());
|
||||
};
|
||||
|
||||
/**
|
||||
* Encrypt the given data using libsodium's secretbox APIs, using a randomly
|
||||
* generated nonce.
|
||||
@@ -379,7 +390,7 @@ export async function encryptFileChunk(
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrypt the result of {@link encryptBoxB64}.
|
||||
* Decrypt the result of {@link encryptBoxB64} and return the decrypted bytes.
|
||||
*/
|
||||
export const decryptBox = async (
|
||||
{ encryptedData, nonce }: EncryptedBox,
|
||||
|
||||
@@ -5,7 +5,6 @@ import { getUserLocales } from "get-user-locale";
|
||||
import i18n from "i18next";
|
||||
import resourcesToBackend from "i18next-resources-to-backend";
|
||||
import { initReactI18next } from "react-i18next";
|
||||
import { object, string } from "yup";
|
||||
|
||||
/**
|
||||
* List of all {@link SupportedLocale}s.
|
||||
@@ -60,7 +59,7 @@ const defaultLocale: SupportedLocale = "en-US";
|
||||
* produce a string like "July 19, 2024".
|
||||
*/
|
||||
export const setupI18n = async () => {
|
||||
const localeString = savedLocaleStringMigratingIfNeeded();
|
||||
const localeString = localStorage.getItem("locale") ?? undefined;
|
||||
const locale = closestSupportedLocale(localeString);
|
||||
|
||||
// https://www.i18next.com/overview/api
|
||||
@@ -136,71 +135,6 @@ export const setupI18n = async () => {
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Read and return the locale (if any) that we'd previously saved in local
|
||||
* storage.
|
||||
*
|
||||
* If it finds a locale stored in the old format, it also updates the saved
|
||||
* value and returns it in the new format.
|
||||
*/
|
||||
const savedLocaleStringMigratingIfNeeded = (): SupportedLocale | undefined => {
|
||||
const ls = localStorage.getItem("locale");
|
||||
|
||||
// An older version of our code had stored only the language code, not the
|
||||
// full locale. Migrate these to the new locale format. Luckily, all such
|
||||
// languages can be unambiguously mapped to locales in our current set.
|
||||
//
|
||||
// This migration is dated Feb 2024. And it can be removed after a few
|
||||
// months, because by then either customers would've opened the app and
|
||||
// their setting migrated to the new format, or the browser would've cleared
|
||||
// the older local storage entry anyway (tag: Migration).
|
||||
|
||||
if (!ls) {
|
||||
// Nothing found
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (includes(supportedLocales, ls)) {
|
||||
// Already in the new format
|
||||
return ls;
|
||||
}
|
||||
|
||||
let value: string | undefined;
|
||||
try {
|
||||
const oldFormatData = object({ value: string() }).json().cast(ls);
|
||||
value = oldFormatData.value;
|
||||
} catch (e) {
|
||||
// Not a valid JSON, or not in the format we expected it. This shouldn't
|
||||
// have happened, we're the only one setting it.
|
||||
log.error("Failed to parse locale obtained from local storage", e);
|
||||
// Also remove the old key, it is not parseable by us anymore.
|
||||
localStorage.removeItem("locale");
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const newValue = mapOldValue(value);
|
||||
if (newValue) localStorage.setItem("locale", newValue);
|
||||
|
||||
return newValue;
|
||||
};
|
||||
|
||||
const mapOldValue = (value: string | undefined) => {
|
||||
switch (value) {
|
||||
case "en":
|
||||
return "en-US";
|
||||
case "fr":
|
||||
return "fr-FR";
|
||||
case "zh":
|
||||
return "zh-CN";
|
||||
case "nl":
|
||||
return "nl-NL";
|
||||
case "es":
|
||||
return "es-ES";
|
||||
default:
|
||||
return undefined;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the closest / best matching {@link SupportedLocale}.
|
||||
*
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { getKV, setKV } from "@/base/kv";
|
||||
import { inWorker } from "./env";
|
||||
import { getKV } from "@/base/kv";
|
||||
|
||||
/**
|
||||
* Return the origin (scheme, host, port triple) that should be used for making
|
||||
@@ -35,21 +34,10 @@ export const apiURL = async (path: string) => (await apiOrigin()) + path;
|
||||
*
|
||||
* Otherwise return undefined.
|
||||
*/
|
||||
export const customAPIOrigin = async () => {
|
||||
let origin = await getKV("apiOrigin");
|
||||
if (!origin && !inWorker()) {
|
||||
// TODO: Migration of apiOrigin from local storage to indexed DB. Added
|
||||
// 27 June 2024, 1.7.2-rc. Remove me after a bit (tag: Migration).
|
||||
const legacyOrigin = localStorage.getItem("apiOrigin");
|
||||
if (legacyOrigin !== null) {
|
||||
origin = legacyOrigin;
|
||||
if (origin) await setKV("apiOrigin", origin);
|
||||
localStorage.removeItem("apiOrigin");
|
||||
}
|
||||
}
|
||||
|
||||
return origin ?? process.env.NEXT_PUBLIC_ENTE_ENDPOINT ?? undefined;
|
||||
};
|
||||
export const customAPIOrigin = async () =>
|
||||
(await getKV("apiOrigin")) ??
|
||||
process.env.NEXT_PUBLIC_ENTE_ENDPOINT ??
|
||||
undefined;
|
||||
|
||||
/**
|
||||
* A convenience wrapper over {@link customAPIOrigin} that returns the only the
|
||||
|
||||
@@ -3,7 +3,7 @@ import { decryptBox } from "./crypto";
|
||||
import { toB64 } from "./crypto/libsodium";
|
||||
|
||||
/**
|
||||
* Return the user's master key (as a base64 string) from session storage.
|
||||
* Return the user's master key from session storage.
|
||||
*
|
||||
* Precondition: The user should be logged in.
|
||||
*/
|
||||
|
||||
7
web/packages/base/types/index.ts
Normal file
7
web/packages/base/types/index.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* A location, represented as a (latitude, longitude) pair.
|
||||
*/
|
||||
export interface Location {
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
}
|
||||
@@ -1,12 +1,14 @@
|
||||
import { decryptMetadataJSON, encryptMetadataJSON } from "@/base/crypto";
|
||||
import { authenticatedRequestHeaders, ensureOk } from "@/base/http";
|
||||
import { apiURL } from "@/base/origins";
|
||||
import { type Location } from "@/base/types";
|
||||
import {
|
||||
type EnteFile,
|
||||
type FilePublicMagicMetadata,
|
||||
} from "@/new/photos/types/file";
|
||||
import { mergeMetadata1 } from "@/new/photos/utils/file";
|
||||
import { ensure } from "@/utils/ensure";
|
||||
import { nullToUndefined } from "@/utils/transform";
|
||||
import { z } from "zod";
|
||||
import { FileType } from "./file-type";
|
||||
|
||||
@@ -327,15 +329,16 @@ const withoutNullAndUndefinedValues = (o: object) =>
|
||||
);
|
||||
|
||||
/**
|
||||
* Return the file's creation date in a form suitable for using in the UI.
|
||||
* Return the file's creation date as a Date in the hypothetical "timezone of
|
||||
* the photo".
|
||||
*
|
||||
* For all the details and nuance, see {@link toUIDate}.
|
||||
* For all the details and nuance, see {@link createPhotoDate}.
|
||||
*/
|
||||
export const getUICreationDate = (
|
||||
export const fileCreationPhotoDate = (
|
||||
enteFile: EnteFile,
|
||||
publicMagicMetadata: PublicMagicMetadata | undefined,
|
||||
) =>
|
||||
toUIDate(
|
||||
createPhotoDate(
|
||||
publicMagicMetadata?.dateTime ??
|
||||
publicMagicMetadata?.editedTime ??
|
||||
enteFile.metadata.creationTime,
|
||||
@@ -559,7 +562,7 @@ export interface ParsedMetadata {
|
||||
*/
|
||||
creationDate?: ParsedMetadataDate;
|
||||
/** The GPS coordinates where the photo was taken. */
|
||||
location?: { latitude: number; longitude: number };
|
||||
location?: Location;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -724,9 +727,9 @@ export const parseMetadataDate = (
|
||||
const dropLast = (s: string) => (s ? s.substring(0, s.length - 1) : s);
|
||||
|
||||
/**
|
||||
* Return a date that can be used on the UI by constructing it from a
|
||||
* {@link ParsedMetadataDate}, or its {@link dateTime} component, or a UTC epoch
|
||||
* timestamp.
|
||||
* Return a date that can be used on the represent a photo on the UI, by
|
||||
* constructing it from a {@link ParsedMetadataDate}, or its {@link dateTime}
|
||||
* component, or a UTC epoch timestamp.
|
||||
*
|
||||
* These dates are all hypothetically in the timezone of the place where the
|
||||
* photo was taken. Different photos might've been taken in different timezones,
|
||||
@@ -745,7 +748,9 @@ const dropLast = (s: string) => (s ? s.substring(0, s.length - 1) : s);
|
||||
*
|
||||
* See also: [Note: Photos are always in local date/time].
|
||||
*/
|
||||
export const toUIDate = (dateLike: ParsedMetadataDate | string | number) => {
|
||||
export const createPhotoDate = (
|
||||
dateLike: ParsedMetadataDate | string | number,
|
||||
) => {
|
||||
switch (typeof dateLike) {
|
||||
case "object":
|
||||
// A ISO 8601 string without a timezone. The Date constructor will
|
||||
@@ -760,3 +765,22 @@ export const toUIDate = (dateLike: ParsedMetadataDate | string | number) => {
|
||||
return new Date(dateLike / 1000);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the GPS coordinates (if any) present in the given {@link EnteFile}.
|
||||
*/
|
||||
export const fileLocation = (enteFile: EnteFile): Location | undefined => {
|
||||
// TODO: EnteFile types. Need to verify that metadata itself, and
|
||||
// metadata.lat/lng can not be null (I think they likely can, if so need to
|
||||
// update the types). Need to supress the linter meanwhile.
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||
if (!enteFile.metadata) return undefined;
|
||||
|
||||
const latitude = nullToUndefined(enteFile.metadata.latitude);
|
||||
const longitude = nullToUndefined(enteFile.metadata.longitude);
|
||||
|
||||
if (latitude === undefined || longitude === undefined) return undefined;
|
||||
|
||||
return { latitude, longitude };
|
||||
};
|
||||
|
||||
@@ -69,17 +69,7 @@ const Contents: React.FC<ContentsProps> = (props) => {
|
||||
>();
|
||||
|
||||
useEffect(
|
||||
() =>
|
||||
void getKV("apiOrigin").then((o) =>
|
||||
setInitialAPIOrigin(
|
||||
// Migrate apiOrigin from local storage to indexed DB.
|
||||
//
|
||||
// This code was added 27 June 2024. Note that the legacy
|
||||
// value was never in production builds, only nightlies, so
|
||||
// this code can be removed soon (tag: Migration).
|
||||
o ?? localStorage.getItem("apiOrigin") ?? "",
|
||||
),
|
||||
),
|
||||
() => void getKV("apiOrigin").then((o) => setInitialAPIOrigin(o ?? "")),
|
||||
[],
|
||||
);
|
||||
|
||||
@@ -219,12 +209,6 @@ const Form: React.FC<FormProps> = ({ initialAPIOrigin, onClose }) => {
|
||||
const updateAPIOrigin = async (origin: string) => {
|
||||
if (!origin) {
|
||||
await removeKV("apiOrigin");
|
||||
// Migrate apiOrigin from local storage to indexed DB.
|
||||
//
|
||||
// This code was added 27 June 2024. Note that the legacy value was
|
||||
// never in production builds, only nightlies, so this code can be
|
||||
// removed at some point soon (tag: Migration).
|
||||
localStorage.removeItem("apiOrigin");
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
78
web/packages/new/photos/services/migrations.ts
Normal file
78
web/packages/new/photos/services/migrations.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { isDesktop } from "@/base/app";
|
||||
import { getKVN, removeKV, setKV } from "@/base/kv";
|
||||
import log from "@/base/log";
|
||||
import localForage from "@ente/shared/storage/localForage";
|
||||
import { deleteDB } from "idb";
|
||||
|
||||
/**
|
||||
* App specific migrations.
|
||||
*
|
||||
* The app stores data in multiple places: local storage, IndexedDB, OPFS, and
|
||||
* not all of these support DB migrations. And even when they do, those are
|
||||
* rather heavy weight and complicated (e.g. IndexedDB).
|
||||
*
|
||||
* Further, there are various app level migrations, e.g. resetting the diff
|
||||
* fetch times, that don't correspond to DB migrations, these are just changes
|
||||
* we need to make to our locally persisted values and not the schemas
|
||||
* themselves.
|
||||
*
|
||||
* Thus we introduce the concept of app level migrations. This is some code
|
||||
* which runs early in the page load, and runs arbitrary blocks of code until it
|
||||
* reaches the last migration number.
|
||||
*
|
||||
* We can put all sorts of changes here: cleanup of legacy keys, re-triggers for
|
||||
* various fetches etc.
|
||||
*
|
||||
* This code usually runs fairly early on page load, but if you need specific
|
||||
* guarantees or have dependencies in the order of operations (beyond what is
|
||||
* captured by the sequential flow here), then this might not be appropriate.
|
||||
*/
|
||||
export const runMigrations = async () => {
|
||||
const m = (await getKVN("migrationLevel")) ?? 0;
|
||||
const latest = 2;
|
||||
if (m < latest) {
|
||||
log.info(`Running migrations ${m} => ${latest}`);
|
||||
if (m < 1 && isDesktop) await m0();
|
||||
if (m < 2) await m1();
|
||||
await setKV("migrationLevel", latest);
|
||||
}
|
||||
};
|
||||
|
||||
// Some of these (indicated by "Prunable") can be no-oped in the future when
|
||||
// almost all clients would've migrated over.
|
||||
|
||||
// Added: Aug 2024 (v1.7.3). Prunable.
|
||||
const m0 = () =>
|
||||
Promise.all([
|
||||
// Delete the legacy face DB v1.
|
||||
deleteDB("mldata"),
|
||||
|
||||
// Delete the legacy CLIP (mostly) related keys from LocalForage.
|
||||
localForage.removeItem("embeddings"),
|
||||
localForage.removeItem("embedding_sync_time"),
|
||||
localForage.removeItem("embeddings_v2"),
|
||||
localForage.removeItem("file_embeddings"),
|
||||
localForage.removeItem("onnx-clip-embedding_sync_time"),
|
||||
localForage.removeItem("file-ml-clip-face-embedding_sync_time"),
|
||||
|
||||
// Delete keys for the legacy diff based sync.
|
||||
removeKV("embeddingSyncTime:onnx-clip"),
|
||||
removeKV("embeddingSyncTime:file-ml-clip-face"),
|
||||
|
||||
// Delete the legacy face DB v2.
|
||||
deleteDB("face"),
|
||||
]).then(() => {
|
||||
// Delete legacy ML keys.
|
||||
localStorage.removeItem("faceIndexingEnabled");
|
||||
});
|
||||
|
||||
// Added: Sep 2024 (v1.7.5-beta). Prunable.
|
||||
const m1 = () =>
|
||||
// Older versions of the user-entities code kept the diff related state
|
||||
// in a different place. These entries are not needed anymore (the tags
|
||||
// themselves will get resynced).
|
||||
Promise.all([
|
||||
localForage.removeItem("location_tags"),
|
||||
localForage.removeItem("location_tags_key"),
|
||||
localForage.removeItem("location_tags_time"),
|
||||
]);
|
||||
133
web/packages/new/photos/services/ml/cgroups.ts
Normal file
133
web/packages/new/photos/services/ml/cgroups.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
/**
|
||||
* A cgroup ("cluster group") is a group of clusters (possibly containing a
|
||||
* single cluster) that the user has interacted with.
|
||||
*
|
||||
* Interactions include hiding, merging and giving a name and/or a cover photo.
|
||||
*
|
||||
* The most frequent interaction is naming a {@link FaceCluster}, which promotes
|
||||
* it to a become a {@link CGroup}. The promotion comes with the ability to be
|
||||
* synced with remote (as a "cgroup" user entity).
|
||||
*
|
||||
* There after, the user may attach more clusters to the same {@link CGroup}.
|
||||
*
|
||||
* > A named cluster group can be thought of as a "person", though this is not
|
||||
* > necessarily an accurate characterization. e.g. there can be a named cluster
|
||||
* > group that contains face clusters of pets.
|
||||
*
|
||||
* The other form of interaction is hiding. The user may hide a single (unnamed)
|
||||
* cluster, or they may hide an named {@link CGroup}. In both cases, we promote
|
||||
* the cluster to a CGroup if needed so that their request to hide gets synced.
|
||||
*
|
||||
* While in our local representation we separately maintain clusters and link to
|
||||
* them from within CGroups by their clusterID, in the remote representation
|
||||
* clusters themselves don't get synced. Instead, the "cgroup" entities synced
|
||||
* with remote contain the clusters within themselves. So a group that gets
|
||||
* synced with remote looks something like:
|
||||
*
|
||||
* { id, name, clusters: [{ clusterID, faceIDs }] }
|
||||
*
|
||||
*/
|
||||
export interface CGroup {
|
||||
/**
|
||||
* A nanoid for this cluster group.
|
||||
*
|
||||
* This is the ID of the "cgroup" user entity (the envelope), and it is not
|
||||
* contained as part of the group entity payload itself.
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* A name assigned by the user to this cluster group.
|
||||
*
|
||||
* The client should handle both empty strings and undefined as indicating a
|
||||
* cgroup without a name. When the client needs to set this to an "empty"
|
||||
* value, which happens when hiding an unnamed cluster, it should it to an
|
||||
* empty string. That is, expect `"" | undefined`, but set `""`.
|
||||
*/
|
||||
name: string | undefined;
|
||||
/**
|
||||
* An unordered set of ids of the clusters that belong to this group.
|
||||
*
|
||||
* For ergonomics of transportation and persistence this is an array, but it
|
||||
* should conceptually be thought of as a set.
|
||||
*/
|
||||
clusterIDs: string[];
|
||||
/**
|
||||
* True if this cluster group should be hidden.
|
||||
*
|
||||
* The user can hide both named cluster groups and single unnamed clusters.
|
||||
* If the user hides a single cluster that was offered as a suggestion to
|
||||
* them on a client, the client will create a new unnamed cgroup containing
|
||||
* it, and set its hidden flag to sync it with remote (so that other clients
|
||||
* can also stop showing this cluster).
|
||||
*/
|
||||
isHidden: boolean;
|
||||
/**
|
||||
* The ID of the face that should be used as the cover photo for this
|
||||
* cluster group (if the user has set one).
|
||||
*
|
||||
* This is similar to the [@link displayFaceID}, the difference being:
|
||||
*
|
||||
* - {@link avatarFaceID} is the face selected by the user.
|
||||
*
|
||||
* - {@link displayFaceID} is the automatic placeholder, and only comes
|
||||
* into effect if the user has not explicitly selected a face.
|
||||
*/
|
||||
avatarFaceID: string | undefined;
|
||||
/**
|
||||
* Locally determined ID of the "best" face that should be used as the
|
||||
* display face, to represent this cluster group in the UI.
|
||||
*
|
||||
* This property is not synced with remote. For more details, see
|
||||
* {@link avatarFaceID}.
|
||||
*/
|
||||
displayFaceID: string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Syncronize the user's cluster groups with remote, running local clustering if
|
||||
* needed.
|
||||
*
|
||||
* A cgroup (cluster group) consists of clusters, each of which itself is a set
|
||||
* of faces.
|
||||
*
|
||||
* cgroup << cluster << face
|
||||
*
|
||||
* CGroups are synced with remote, while clusters are a local only (though the
|
||||
* clusters that are part of a cgroup do get synced with remote).
|
||||
*
|
||||
* Clusters are generated locally using {@link clusterFaces} function. These
|
||||
* generated clusters are then mapped to cgroups based on various user actions:
|
||||
*
|
||||
* - The user can provide a name for a cluster ("name a person"). This
|
||||
* upgrades a cluster into a cgroup, and it then gets synced via remote to
|
||||
* the user's other clients.
|
||||
*
|
||||
* - They can attach more clusters to a cgroup ("merge clusters").
|
||||
*
|
||||
* - They can remove a cluster from a cgroup ("break clusters").
|
||||
*
|
||||
* - They can hide a cluster. This creates an unnamed cgroup so that the
|
||||
* user's other clients know not to show it.
|
||||
*/
|
||||
export const syncCGroups = () => {
|
||||
// 1. Fetch existing cgroups for the user from remote.
|
||||
// 2. Save them to DB.
|
||||
// 3. Prune stale faceIDs from the clusters in the DB.
|
||||
// 4. Rerun clustering using the cgroups and clusters in DB.
|
||||
// 5. Save the generated clusters to DB.
|
||||
//
|
||||
// The user can see both the cgroups and clusters in the UI, but only the
|
||||
// cgroups are synced.
|
||||
// const syncCGroupsWithRemote()
|
||||
/*
|
||||
* After clustering, we also do some routine cleanup. Faces belonging to files
|
||||
* that have been deleted (including those in Trash) should be pruned off.
|
||||
*
|
||||
* We should not make strict assumptions about the clusters we get from remote.
|
||||
* In particular, the same face ID can be in different clusters. In such cases
|
||||
* we should assign it arbitrarily assign it to the last cluster we find it in.
|
||||
* Such leeway is intentionally provided to allow clients some slack in how they
|
||||
* implement the sync without needing to make an blocking API request for every
|
||||
* user interaction.
|
||||
*/
|
||||
};
|
||||
@@ -3,6 +3,7 @@ import { newNonSecureID } from "@/base/id-worker";
|
||||
import log from "@/base/log";
|
||||
import { ensure } from "@/utils/ensure";
|
||||
import type { EnteFile } from "../../types/file";
|
||||
import type { CGroup } from "./cgroups";
|
||||
import { faceDirection, type Face, type FaceIndex } from "./face";
|
||||
import { dotProduct } from "./math";
|
||||
|
||||
@@ -28,91 +29,6 @@ export interface FaceCluster {
|
||||
faceIDs: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* A cgroup ("cluster group") is a group of clusters (possibly containing a
|
||||
* single cluster) that the user has interacted with.
|
||||
*
|
||||
* Interactions include hiding, merging and giving a name and/or a cover photo.
|
||||
*
|
||||
* The most frequent interaction is naming a {@link FaceCluster}, which promotes
|
||||
* it to a become a {@link CGroup}. The promotion comes with the ability to be
|
||||
* synced with remote (as a "cgroup" user entity).
|
||||
*
|
||||
* There after, the user may attach more clusters to the same {@link CGroup}.
|
||||
*
|
||||
* > A named cluster group can be thought of as a "person", though this is not
|
||||
* > necessarily an accurate characterization. e.g. there can be a named cluster
|
||||
* > group that contains face clusters of pets.
|
||||
*
|
||||
* The other form of interaction is hiding. The user may hide a single (unnamed)
|
||||
* cluster, or they may hide an named {@link CGroup}. In both cases, we promote
|
||||
* the cluster to a CGroup if needed so that their request to hide gets synced.
|
||||
*
|
||||
* While in our local representation we separately maintain clusters and link to
|
||||
* them from within CGroups by their clusterID, in the remote representation
|
||||
* clusters themselves don't get synced. Instead, the "cgroup" entities synced
|
||||
* with remote contain the clusters within themselves. So a group that gets
|
||||
* synced with remote looks something like:
|
||||
*
|
||||
* { id, name, clusters: [{ clusterID, faceIDs }] }
|
||||
*
|
||||
*/
|
||||
export interface CGroup {
|
||||
/**
|
||||
* A nanoid for this cluster group.
|
||||
*
|
||||
* This is the ID of the "cgroup" user entity (the envelope), and it is not
|
||||
* contained as part of the group entity payload itself.
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* A name assigned by the user to this cluster group.
|
||||
*
|
||||
* The client should handle both empty strings and undefined as indicating a
|
||||
* cgroup without a name. When the client needs to set this to an "empty"
|
||||
* value, which happens when hiding an unnamed cluster, it should it to an
|
||||
* empty string. That is, expect `"" | undefined`, but set `""`.
|
||||
*/
|
||||
name: string | undefined;
|
||||
/**
|
||||
* An unordered set of ids of the clusters that belong to this group.
|
||||
*
|
||||
* For ergonomics of transportation and persistence this is an array, but it
|
||||
* should conceptually be thought of as a set.
|
||||
*/
|
||||
clusterIDs: string[];
|
||||
/**
|
||||
* True if this cluster group should be hidden.
|
||||
*
|
||||
* The user can hide both named cluster groups and single unnamed clusters.
|
||||
* If the user hides a single cluster that was offered as a suggestion to
|
||||
* them on a client, the client will create a new unnamed cgroup containing
|
||||
* it, and set its hidden flag to sync it with remote (so that other clients
|
||||
* can also stop showing this cluster).
|
||||
*/
|
||||
isHidden: boolean;
|
||||
/**
|
||||
* The ID of the face that should be used as the cover photo for this
|
||||
* cluster group (if the user has set one).
|
||||
*
|
||||
* This is similar to the [@link displayFaceID}, the difference being:
|
||||
*
|
||||
* - {@link avatarFaceID} is the face selected by the user.
|
||||
*
|
||||
* - {@link displayFaceID} is the automatic placeholder, and only comes
|
||||
* into effect if the user has not explicitly selected a face.
|
||||
*/
|
||||
avatarFaceID: string | undefined;
|
||||
/**
|
||||
* Locally determined ID of the "best" face that should be used as the
|
||||
* display face, to represent this cluster group in the UI.
|
||||
*
|
||||
* This property is not synced with remote. For more details, see
|
||||
* {@link avatarFaceID}.
|
||||
*/
|
||||
displayFaceID: string | undefined;
|
||||
}
|
||||
|
||||
export interface ClusteringOpts {
|
||||
minBlur: number;
|
||||
minScore: number;
|
||||
@@ -149,37 +65,9 @@ export interface ClusterPreviewFace {
|
||||
}
|
||||
|
||||
/**
|
||||
* Cluster faces into groups.
|
||||
*
|
||||
* A cgroup (cluster group) consists of clusters, each of which itself is a set
|
||||
* of faces.
|
||||
*
|
||||
* cgroup << cluster << face
|
||||
*
|
||||
* This function generates clusters locally using a batched form of linear
|
||||
* Generates clusters from the given faces using a batched form of linear
|
||||
* clustering, with a bit of lookback (and a dollop of heuristics) to get the
|
||||
* clusters to merge across batches.
|
||||
*
|
||||
* This user can later tweak these clusters by performing the following actions
|
||||
* to the list of clusters that they can see:
|
||||
*
|
||||
* - They can provide a name for a cluster ("name a person"). This upgrades a
|
||||
* cluster into a "cgroup", which is an entity that gets synced via remote
|
||||
* to the user's other clients.
|
||||
*
|
||||
* - They can attach more clusters to a cgroup ("merge clusters")
|
||||
*
|
||||
* - They can remove a cluster from a cgroup ("break clusters").
|
||||
*
|
||||
* After clustering, we also do some routine cleanup. Faces belonging to files
|
||||
* that have been deleted (including those in Trash) should be pruned off.
|
||||
*
|
||||
* We should not make strict assumptions about the clusters we get from remote.
|
||||
* In particular, the same face ID can be in different clusters. In such cases
|
||||
* we should assign it arbitrarily assign it to the last cluster we find it in.
|
||||
* Such leeway is intentionally provided to allow clients some slack in how they
|
||||
* implement the sync without needing to make an blocking API request for every
|
||||
* user interaction.
|
||||
*/
|
||||
export const clusterFaces = (
|
||||
faceIndexes: FaceIndex[],
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { removeKV } from "@/base/kv";
|
||||
import log from "@/base/log";
|
||||
import localForage from "@ente/shared/storage/localForage";
|
||||
import { deleteDB, openDB, type DBSchema } from "idb";
|
||||
import type { CGroup } from "./cgroups";
|
||||
import type { LocalCLIPIndex } from "./clip";
|
||||
import type { CGroup, FaceCluster } from "./cluster";
|
||||
import type { FaceCluster } from "./cluster";
|
||||
import type { LocalFaceIndex } from "./face";
|
||||
|
||||
/**
|
||||
@@ -107,8 +106,6 @@ interface FileStatus {
|
||||
let _mlDB: ReturnType<typeof openMLDB> | undefined;
|
||||
|
||||
const openMLDB = async () => {
|
||||
deleteLegacyDB();
|
||||
|
||||
const db = await openDB<MLDBSchema>("ml", 1, {
|
||||
upgrade(db, oldVersion, newVersion) {
|
||||
log.info(`Upgrading ML DB ${oldVersion} => ${newVersion}`);
|
||||
@@ -142,44 +139,6 @@ const openMLDB = async () => {
|
||||
return db;
|
||||
};
|
||||
|
||||
const deleteLegacyDB = () => {
|
||||
// Delete the legacy face DB v1.
|
||||
//
|
||||
// This code was added June 2024 (v1.7.1-rc) and can be removed at some
|
||||
// point when most clients have migrated (tag: Migration).
|
||||
void deleteDB("mldata");
|
||||
|
||||
// Delete the legacy CLIP (mostly) related keys from LocalForage.
|
||||
//
|
||||
// This code was added July 2024 (v1.7.2-rc) and can be removed at some
|
||||
// point when most clients have migrated (tag: Migration).
|
||||
void Promise.all([
|
||||
localForage.removeItem("embeddings"),
|
||||
localForage.removeItem("embedding_sync_time"),
|
||||
localForage.removeItem("embeddings_v2"),
|
||||
localForage.removeItem("file_embeddings"),
|
||||
localForage.removeItem("onnx-clip-embedding_sync_time"),
|
||||
localForage.removeItem("file-ml-clip-face-embedding_sync_time"),
|
||||
]);
|
||||
|
||||
// Delete keys for the legacy diff based sync.
|
||||
//
|
||||
// This code was added July 2024 (v1.7.3-beta). These keys were never
|
||||
// enabled outside of the nightly builds, so this cleanup is not a hard
|
||||
// need. Either ways, it can be removed at some point when most clients have
|
||||
// migrated (tag: Migration).
|
||||
void Promise.all([
|
||||
removeKV("embeddingSyncTime:onnx-clip"),
|
||||
removeKV("embeddingSyncTime:file-ml-clip-face"),
|
||||
]);
|
||||
|
||||
// Delete the legacy face DB v2.
|
||||
//
|
||||
// This code was added Aug 2024 (v1.7.3-beta) and can be removed at some
|
||||
// point when most clients have migrated (tag: Migration).
|
||||
void deleteDB("face");
|
||||
};
|
||||
|
||||
/**
|
||||
* @returns a lazily created, cached connection to the ML DB.
|
||||
*/
|
||||
@@ -191,8 +150,6 @@ const mlDB = () => (_mlDB ??= openMLDB());
|
||||
* This is meant to be called during logout on the main thread.
|
||||
*/
|
||||
export const clearMLDB = async () => {
|
||||
deleteLegacyDB();
|
||||
|
||||
try {
|
||||
if (_mlDB) (await _mlDB).close();
|
||||
} catch (e) {
|
||||
|
||||
@@ -229,11 +229,6 @@ const mlLocalKey = "mlEnabled";
|
||||
* that is synced with remote.
|
||||
*/
|
||||
const isMLEnabledLocal = () => {
|
||||
// Delete legacy ML keys.
|
||||
//
|
||||
// This code was added August 2024 (v1.7.3-beta) and can be removed at some
|
||||
// point when most clients have migrated (tag: Migration).
|
||||
localStorage.removeItem("faceIndexingEnabled");
|
||||
return localStorage.getItem(mlLocalKey) == "1";
|
||||
};
|
||||
|
||||
|
||||
@@ -1,86 +1,71 @@
|
||||
import { nullToUndefined } from "@/utils/transform";
|
||||
import type { Component } from "chrono-node";
|
||||
import * as chrono from "chrono-node";
|
||||
import { masterKeyFromSession } from "@/base/session-store";
|
||||
import { ComlinkWorker } from "@/base/worker/comlink-worker";
|
||||
import i18n, { t } from "i18next";
|
||||
import type { SearchDateComponents } from "./types";
|
||||
|
||||
interface DateSearchResult {
|
||||
components: SearchDateComponents;
|
||||
label: string;
|
||||
}
|
||||
import type { EnteFile } from "../../types/file";
|
||||
import type { DateSearchResult, SearchQuery } from "./types";
|
||||
import type { SearchWorker } from "./worker";
|
||||
|
||||
/**
|
||||
* Try to parse an arbitrary search string into sets of date components.
|
||||
*
|
||||
* e.g. "December 2022" will be parsed into a
|
||||
*
|
||||
* [(year 2022, month 12, day undefined)]
|
||||
*
|
||||
* while "22 December 2022" will be parsed into
|
||||
*
|
||||
* [(year 2022, month 12, day 22)]
|
||||
*
|
||||
* In addition, also return a formatted representation of the "best" guess at
|
||||
* the date that was intended by the search string.
|
||||
* Cached instance of the {@link ComlinkWorker} that wraps our web worker.
|
||||
*/
|
||||
export const parseDateComponents = (s: string): DateSearchResult[] =>
|
||||
parseChrono(s)
|
||||
.concat(parseYearComponents(s))
|
||||
.concat(parseHolidayComponents(s));
|
||||
let _comlinkWorker: ComlinkWorker<typeof SearchWorker> | undefined;
|
||||
|
||||
export const parseChrono = (s: string): DateSearchResult[] =>
|
||||
chrono
|
||||
.parse(s)
|
||||
.map((result) => {
|
||||
const p = result.start;
|
||||
const component = (s: Component) =>
|
||||
p.isCertain(s) ? nullToUndefined(p.get(s)) : undefined;
|
||||
/**
|
||||
* Lazily created, cached, instance of {@link SearchWorker}.
|
||||
*/
|
||||
const worker = () => (_comlinkWorker ??= createComlinkWorker()).remote;
|
||||
|
||||
const year = component("year");
|
||||
const month = component("month");
|
||||
const day = component("day");
|
||||
const weekday = component("weekday");
|
||||
const hour = component("hour");
|
||||
/**
|
||||
* Create a new instance of a comlink worker that wraps a {@link SearchWorker}
|
||||
* web worker.
|
||||
*/
|
||||
const createComlinkWorker = () =>
|
||||
new ComlinkWorker<typeof SearchWorker>(
|
||||
"search",
|
||||
new Worker(new URL("worker.ts", import.meta.url)),
|
||||
);
|
||||
|
||||
if (!year && !month && !day && !weekday && !hour) return undefined;
|
||||
const components = { year, month, day, weekday, hour };
|
||||
/**
|
||||
* Fetch any data that would be needed if the user were to search.
|
||||
*/
|
||||
export const triggerSearchDataSync = () =>
|
||||
void worker().then((w) => masterKeyFromSession().then((k) => w.sync(k)));
|
||||
|
||||
const format: Intl.DateTimeFormatOptions = {};
|
||||
if (year) format.year = "numeric";
|
||||
if (month) format.month = "long";
|
||||
if (day) format.day = "numeric";
|
||||
if (weekday) format.weekday = "long";
|
||||
if (hour) {
|
||||
format.hour = "numeric";
|
||||
format.dayPeriod = "short";
|
||||
}
|
||||
/**
|
||||
* Set the files over which we will search.
|
||||
*/
|
||||
export const setSearchableFiles = (enteFiles: EnteFile[]) =>
|
||||
void worker().then((w) => w.setEnteFiles(enteFiles));
|
||||
|
||||
const formatter = new Intl.DateTimeFormat(i18n.language, format);
|
||||
const label = formatter.format(p.date());
|
||||
return { components, label };
|
||||
})
|
||||
.filter((x) => x !== undefined);
|
||||
/**
|
||||
* Convert a search string into a reusable "search query" that can be passed on
|
||||
* to the {@link search} function.
|
||||
*
|
||||
* @param searchString The string we want to search for.
|
||||
*/
|
||||
export const createSearchQuery = (searchString: string) =>
|
||||
worker().then((w) =>
|
||||
w.createSearchQuery(searchString, i18n.language, holidays()),
|
||||
);
|
||||
|
||||
/** chrono does not parse years like "2024", so do it manually. */
|
||||
const parseYearComponents = (s: string): DateSearchResult[] => {
|
||||
// s is already trimmed.
|
||||
if (s.length == 4) {
|
||||
const year = parseInt(s);
|
||||
if (year && year <= 9999) {
|
||||
const components = { year };
|
||||
return [{ components, label: s }];
|
||||
}
|
||||
}
|
||||
return [];
|
||||
};
|
||||
/**
|
||||
* Search for and return the list of {@link EnteFile}s that match the given
|
||||
* {@link search} query.
|
||||
*/
|
||||
export const search = async (search: SearchQuery) =>
|
||||
worker().then((w) => w.search(search));
|
||||
|
||||
// This cannot be a const, it needs to be evaluated lazily for the t() to work.
|
||||
/**
|
||||
* A list of holidays - their yearly dates and localized names.
|
||||
*
|
||||
* We need to keep this on the main thread since it uses the t() function for
|
||||
* localization (although I haven't tried that in a web worker, it might work
|
||||
* there too). Also, it cannot be a const since it needs to be evaluated lazily
|
||||
* for the t() to work.
|
||||
*/
|
||||
const holidays = (): DateSearchResult[] => [
|
||||
{ components: { month: 12, day: 25 }, label: t("CHRISTMAS") },
|
||||
{ components: { month: 12, day: 24 }, label: t("CHRISTMAS_EVE") },
|
||||
{ components: { month: 1, day: 1 }, label: t("NEW_YEAR") },
|
||||
{ components: { month: 12, day: 31 }, label: t("NEW_YEAR_EVE") },
|
||||
];
|
||||
|
||||
const parseHolidayComponents = (s: string) =>
|
||||
holidays().filter(({ label }) => label.toLowerCase().includes(s));
|
||||
|
||||
@@ -1,9 +1,18 @@
|
||||
/**
|
||||
* @file types shared between the main thread interface to search (`index.ts`)
|
||||
* and the search worker (`worker.ts`)
|
||||
* and the search worker that does the actual searching (`worker.ts`).
|
||||
*/
|
||||
|
||||
import type { EnteFile } from "../../types/file";
|
||||
import type { Location } from "@/base/types";
|
||||
import { FileType } from "@/media/file-type";
|
||||
import type { MLStatus } from "@/new/photos/services/ml";
|
||||
import type { EnteFile } from "@/new/photos/types/file";
|
||||
import type { LocationTag } from "../user-entity";
|
||||
|
||||
export interface DateSearchResult {
|
||||
components: SearchDateComponents;
|
||||
label: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A parsed version of a potential natural language date time string.
|
||||
@@ -48,3 +57,72 @@ export interface SearchPerson {
|
||||
displayFaceID: string;
|
||||
displayFaceFile: EnteFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* A city as identified by a static dataset.
|
||||
*
|
||||
* Each city is represented by its latitude and longitude. The dataset does not
|
||||
* have information about the city's estimated radius.
|
||||
*/
|
||||
export type City = Location & {
|
||||
/** Name of the city. */
|
||||
name: string;
|
||||
};
|
||||
|
||||
// TODO-cgroup: Audit below
|
||||
|
||||
export enum SuggestionType {
|
||||
DATE = "DATE",
|
||||
LOCATION = "LOCATION",
|
||||
COLLECTION = "COLLECTION",
|
||||
FILE_NAME = "FILE_NAME",
|
||||
PERSON = "PERSON",
|
||||
INDEX_STATUS = "INDEX_STATUS",
|
||||
FILE_CAPTION = "FILE_CAPTION",
|
||||
FILE_TYPE = "FILE_TYPE",
|
||||
CLIP = "CLIP",
|
||||
CITY = "CITY",
|
||||
}
|
||||
|
||||
export interface Suggestion {
|
||||
type: SuggestionType;
|
||||
label: string;
|
||||
value:
|
||||
| SearchDateComponents
|
||||
| number[]
|
||||
| SearchPerson
|
||||
| MLStatus
|
||||
| LocationTag
|
||||
| City
|
||||
| FileType
|
||||
| ClipSearchScores;
|
||||
hide?: boolean;
|
||||
}
|
||||
|
||||
export interface SearchQuery {
|
||||
date?: SearchDateComponents;
|
||||
location?: LocationTag;
|
||||
city?: City;
|
||||
collection?: number;
|
||||
files?: number[];
|
||||
person?: SearchPerson;
|
||||
fileType?: FileType;
|
||||
clip?: ClipSearchScores;
|
||||
}
|
||||
|
||||
export interface SearchResultSummary {
|
||||
optionName: string;
|
||||
fileCount: number;
|
||||
}
|
||||
|
||||
export interface SearchOption extends Suggestion {
|
||||
fileCount: number;
|
||||
previewFiles: EnteFile[];
|
||||
}
|
||||
|
||||
export type UpdateSearch = (
|
||||
search: SearchQuery,
|
||||
summary: SearchResultSummary,
|
||||
) => void;
|
||||
|
||||
export type ClipSearchScores = Map<number, number>;
|
||||
|
||||
364
web/packages/new/photos/services/search/worker.ts
Normal file
364
web/packages/new/photos/services/search/worker.ts
Normal file
@@ -0,0 +1,364 @@
|
||||
import { HTTPError } from "@/base/http";
|
||||
import type { Location } from "@/base/types";
|
||||
import { fileCreationPhotoDate, fileLocation } from "@/media/file-metadata";
|
||||
import type { EnteFile } from "@/new/photos/types/file";
|
||||
import { nullToUndefined } from "@/utils/transform";
|
||||
import { getPublicMagicMetadataSync } from "@ente/shared/file-metadata";
|
||||
import type { Component } from "chrono-node";
|
||||
import * as chrono from "chrono-node";
|
||||
import { expose } from "comlink";
|
||||
import { z } from "zod";
|
||||
import {
|
||||
savedLocationTags,
|
||||
syncLocationTags,
|
||||
type LocationTag,
|
||||
} from "../user-entity";
|
||||
import type {
|
||||
City,
|
||||
DateSearchResult,
|
||||
SearchDateComponents,
|
||||
SearchQuery,
|
||||
Suggestion,
|
||||
} from "./types";
|
||||
import { SuggestionType } from "./types";
|
||||
|
||||
type SearchableCity = City & {
|
||||
/**
|
||||
* Name of the city, lowercased. Precomputed to save an op during search.
|
||||
*/
|
||||
lowercasedName: string;
|
||||
};
|
||||
|
||||
type SearchableLocationTag = LocationTag & {
|
||||
/**
|
||||
* Name of the location tag, lowercased. Precomputed to save an op during
|
||||
* search.
|
||||
*/
|
||||
lowercasedName: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* A web worker that runs the search asynchronously so that the main thread
|
||||
* remains responsive.
|
||||
*/
|
||||
export class SearchWorker {
|
||||
private enteFiles: EnteFile[] = [];
|
||||
private locationTags: SearchableLocationTag[] = [];
|
||||
private cities: SearchableCity[] = [];
|
||||
|
||||
/**
|
||||
* Fetch any state we might need when the actual search happens.
|
||||
*
|
||||
* @param masterKey The user's master key. Web workers do not have access to
|
||||
* session storage so this key needs to be passed to us explicitly.
|
||||
*/
|
||||
async sync(masterKey: Uint8Array) {
|
||||
return Promise.all([
|
||||
syncLocationTags(masterKey)
|
||||
.then(() => savedLocationTags())
|
||||
.then((ts) => {
|
||||
this.locationTags = ts.map((t) => ({
|
||||
...t,
|
||||
lowercasedName: t.name.toLowerCase(),
|
||||
}));
|
||||
}),
|
||||
fetchCities().then((cs) => {
|
||||
this.cities = cs.map((c) => ({
|
||||
...c,
|
||||
lowercasedName: c.name.toLowerCase(),
|
||||
}));
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the files that we should search across.
|
||||
*/
|
||||
setEnteFiles(enteFiles: EnteFile[]) {
|
||||
this.enteFiles = enteFiles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a search string into a reusable query.
|
||||
*/
|
||||
createSearchQuery(
|
||||
searchString: string,
|
||||
locale: string,
|
||||
holidays: DateSearchResult[],
|
||||
) {
|
||||
return createSearchQuery(
|
||||
searchString,
|
||||
locale,
|
||||
holidays,
|
||||
this.locationTags,
|
||||
this.cities,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return {@link EnteFile}s that satisfy the given {@link searchQuery}.
|
||||
*/
|
||||
search(searchQuery: SearchQuery) {
|
||||
return this.enteFiles.filter((f) => isMatch(f, searchQuery));
|
||||
}
|
||||
}
|
||||
|
||||
expose(SearchWorker);
|
||||
|
||||
const createSearchQuery = (
|
||||
searchString: string,
|
||||
locale: string,
|
||||
holidays: DateSearchResult[],
|
||||
locationTags: SearchableLocationTag[],
|
||||
cities: SearchableCity[],
|
||||
): Suggestion[] => {
|
||||
// Normalize it by trimming whitespace and converting to lowercase.
|
||||
const s = searchString.trim().toLowerCase();
|
||||
if (s.length == 0) return [];
|
||||
|
||||
return [
|
||||
dateSuggestions(s, locale, holidays),
|
||||
locationSuggestions(s, locationTags, cities),
|
||||
].flat();
|
||||
};
|
||||
|
||||
const dateSuggestions = (
|
||||
s: string,
|
||||
locale: string,
|
||||
holidays: DateSearchResult[],
|
||||
) =>
|
||||
parseDateComponents(s, locale, holidays).map(({ components, label }) => ({
|
||||
type: SuggestionType.DATE,
|
||||
value: components,
|
||||
label,
|
||||
}));
|
||||
|
||||
/**
|
||||
* Try to parse an arbitrary search string into sets of date components.
|
||||
*
|
||||
* e.g. "December 2022" will be parsed into a
|
||||
*
|
||||
* [(year 2022, month 12, day undefined)]
|
||||
*
|
||||
* while "22 December 2022" will be parsed into
|
||||
*
|
||||
* [(year 2022, month 12, day 22)]
|
||||
*
|
||||
* In addition, also return a formatted representation of the "best" guess at
|
||||
* the date that was intended by the search string.
|
||||
*/
|
||||
const parseDateComponents = (
|
||||
s: string,
|
||||
locale: string,
|
||||
holidays: DateSearchResult[],
|
||||
): DateSearchResult[] =>
|
||||
[
|
||||
parseChrono(s, locale),
|
||||
parseYearComponents(s),
|
||||
parseHolidayComponents(s, holidays),
|
||||
].flat();
|
||||
|
||||
const parseChrono = (s: string, locale: string): DateSearchResult[] =>
|
||||
chrono
|
||||
.parse(s)
|
||||
.map((result) => {
|
||||
const p = result.start;
|
||||
const component = (s: Component) =>
|
||||
p.isCertain(s) ? nullToUndefined(p.get(s)) : undefined;
|
||||
|
||||
const year = component("year");
|
||||
const month = component("month");
|
||||
const day = component("day");
|
||||
const weekday = component("weekday");
|
||||
const hour = component("hour");
|
||||
|
||||
if (!year && !month && !day && !weekday && !hour) return undefined;
|
||||
const components = { year, month, day, weekday, hour };
|
||||
|
||||
const format: Intl.DateTimeFormatOptions = {};
|
||||
if (year) format.year = "numeric";
|
||||
if (month) format.month = "long";
|
||||
if (day) format.day = "numeric";
|
||||
if (weekday) format.weekday = "long";
|
||||
if (hour) {
|
||||
format.hour = "numeric";
|
||||
format.dayPeriod = "short";
|
||||
}
|
||||
|
||||
const formatter = new Intl.DateTimeFormat(locale, format);
|
||||
const label = formatter.format(p.date());
|
||||
return { components, label };
|
||||
})
|
||||
.filter((x) => x !== undefined);
|
||||
|
||||
/** chrono does not parse years like "2024", so do it manually. */
|
||||
const parseYearComponents = (s: string): DateSearchResult[] => {
|
||||
// s is already trimmed.
|
||||
if (s.length == 4) {
|
||||
const year = parseInt(s);
|
||||
if (year && year <= 9999) {
|
||||
const components = { year };
|
||||
return [{ components, label: s }];
|
||||
}
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
const parseHolidayComponents = (s: string, holidays: DateSearchResult[]) =>
|
||||
holidays.filter(({ label }) => label.toLowerCase().includes(s));
|
||||
|
||||
/**
|
||||
* Zod schema describing world_cities.json.
|
||||
*
|
||||
* The entries also have a country field which we don't currently use.
|
||||
*/
|
||||
const RemoteWorldCities = z.object({
|
||||
data: z.array(
|
||||
z.object({
|
||||
city: z.string(),
|
||||
lat: z.number(),
|
||||
lng: z.number(),
|
||||
}),
|
||||
),
|
||||
});
|
||||
|
||||
const fetchCities = async () => {
|
||||
const res = await fetch("https://static.ente.io/world_cities.json");
|
||||
if (!res.ok) throw new HTTPError(res);
|
||||
return RemoteWorldCities.parse(await res.json()).data.map(
|
||||
({ city, lat, lng }) => ({ name: city, latitude: lat, longitude: lng }),
|
||||
);
|
||||
};
|
||||
|
||||
const locationSuggestions = (
|
||||
s: string,
|
||||
locationTags: SearchableLocationTag[],
|
||||
cities: SearchableCity[],
|
||||
) => {
|
||||
const matchingLocationTags = locationTags.filter((t) =>
|
||||
t.lowercasedName.includes(s),
|
||||
);
|
||||
|
||||
const matchingLocationTagLNames = new Set(
|
||||
matchingLocationTags.map((t) => t.lowercasedName),
|
||||
);
|
||||
|
||||
const matchingCities = cities.filter(
|
||||
(c) =>
|
||||
c.lowercasedName.startsWith(s) &&
|
||||
!matchingLocationTagLNames.has(c.lowercasedName),
|
||||
);
|
||||
|
||||
return [
|
||||
matchingLocationTags.map((t) => ({
|
||||
type: SuggestionType.LOCATION,
|
||||
value: t,
|
||||
label: t.name,
|
||||
})),
|
||||
matchingCities.map((c) => ({
|
||||
type: SuggestionType.CITY,
|
||||
value: c,
|
||||
label: c.name,
|
||||
})),
|
||||
].flat();
|
||||
};
|
||||
|
||||
const isMatch = (file: EnteFile, query: SearchQuery) => {
|
||||
if (query.collection) {
|
||||
return query.collection === file.collectionID;
|
||||
}
|
||||
|
||||
if (query.date) {
|
||||
return isDateComponentsMatch(
|
||||
query.date,
|
||||
fileCreationPhotoDate(file, getPublicMagicMetadataSync(file)),
|
||||
);
|
||||
}
|
||||
|
||||
if (query.location) {
|
||||
const location = fileLocation(file);
|
||||
if (!location) return false;
|
||||
|
||||
return isInsideLocationTag(location, query.location);
|
||||
}
|
||||
|
||||
if (query.city) {
|
||||
const location = fileLocation(file);
|
||||
if (!location) return false;
|
||||
|
||||
return isInsideCity(location, query.city);
|
||||
}
|
||||
|
||||
if (query.files) {
|
||||
return query.files.includes(file.id);
|
||||
}
|
||||
|
||||
if (query.person) {
|
||||
return query.person.files.includes(file.id);
|
||||
}
|
||||
|
||||
if (typeof query.fileType !== "undefined") {
|
||||
return query.fileType === file.metadata.fileType;
|
||||
}
|
||||
|
||||
if (typeof query.clip !== "undefined") {
|
||||
return query.clip.has(file.id);
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
const isDateComponentsMatch = (
|
||||
{ year, month, day, weekday, hour }: SearchDateComponents,
|
||||
date: Date,
|
||||
) => {
|
||||
// Components are guaranteed to have at least one attribute present, so
|
||||
// start by assuming true.
|
||||
let match = true;
|
||||
|
||||
if (year) match = date.getFullYear() == year;
|
||||
// JS getMonth is 0-indexed.
|
||||
if (match && month) match = date.getMonth() + 1 == month;
|
||||
if (match && day) match = date.getDate() == day;
|
||||
if (match && weekday) match = date.getDay() == weekday;
|
||||
if (match && hour) match = date.getHours() == hour;
|
||||
|
||||
return match;
|
||||
};
|
||||
|
||||
const defaultCityRadius = 10;
|
||||
const kmsPerDegree = 111.16;
|
||||
|
||||
const isInsideLocationTag = (location: Location, locationTag: LocationTag) =>
|
||||
// This code is included in the photos app which currently doesn't have
|
||||
// strict mode, and causes a spurious linter warning (but only when included
|
||||
// in photos!), so we need to ts-ignore.
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment, @typescript-eslint/prefer-ts-expect-error
|
||||
// @ts-ignore
|
||||
isWithinRadius(location, locationTag.centerPoint, locationTag.radius);
|
||||
|
||||
const isInsideCity = (location: Location, city: City) =>
|
||||
isWithinRadius(location, city, defaultCityRadius);
|
||||
|
||||
const isWithinRadius = (
|
||||
location: Location,
|
||||
center: Location,
|
||||
radius: number,
|
||||
) => {
|
||||
const a = (radius * radiusScaleFactor(center.latitude)) / kmsPerDegree;
|
||||
const b = radius / kmsPerDegree;
|
||||
const x = center.latitude - location.latitude;
|
||||
const y = center.longitude - location.longitude;
|
||||
return (x * x) / (a * a) + (y * y) / (b * b) <= 1;
|
||||
};
|
||||
|
||||
/**
|
||||
* A latitude specific scaling factor to apply to the radius of a location
|
||||
* search.
|
||||
*
|
||||
* The area bounded by the location tag becomes more elliptical with increase in
|
||||
* the magnitude of the latitude on the cartesian plane. When latitude is 0
|
||||
* degrees, the ellipse is a circle with a = b = r. When latitude incrases, the
|
||||
* major axis (a) has to be scaled by the secant of the latitude.
|
||||
*/
|
||||
const radiusScaleFactor = (lat: number) => 1 / Math.cos(lat * (Math.PI / 180));
|
||||
@@ -2,17 +2,16 @@ import {
|
||||
decryptBlob,
|
||||
decryptBoxB64,
|
||||
encryptBoxB64,
|
||||
generateBoxKey,
|
||||
generateNewBlobOrStreamKey,
|
||||
} from "@/base/crypto";
|
||||
import { authenticatedRequestHeaders, ensureOk, HTTPError } from "@/base/http";
|
||||
import { getKV, getKVN, setKV } from "@/base/kv";
|
||||
import { apiURL } from "@/base/origins";
|
||||
import { masterKeyFromSession } from "@/base/session-store";
|
||||
import { ensure } from "@/utils/ensure";
|
||||
import { nullToUndefined } from "@/utils/transform";
|
||||
import { z } from "zod";
|
||||
import { gunzip } from "./gzip";
|
||||
import type { CGroup } from "./ml/cluster";
|
||||
import type { CGroup } from "./ml/cgroups";
|
||||
import { applyCGroupDiff } from "./ml/db";
|
||||
|
||||
/**
|
||||
@@ -22,13 +21,129 @@ import { applyCGroupDiff } from "./ml/db";
|
||||
* e.g. location tags, cluster groups.
|
||||
*/
|
||||
export type EntityType =
|
||||
/**
|
||||
* A location tag.
|
||||
*
|
||||
* The entity data is base64(encrypt(json))
|
||||
*/
|
||||
| "location"
|
||||
/**
|
||||
* A cluster group.
|
||||
*
|
||||
* Format: An encrypted string containing a gzipped JSON string representing
|
||||
* the cgroup data.
|
||||
* The entity data is base64(encrypt(gzip(json)))
|
||||
*/
|
||||
"cgroup";
|
||||
| "cgroup";
|
||||
|
||||
/**
|
||||
* Sync our local location tags with those on remote.
|
||||
*
|
||||
* This function fetches all the location tag user entities from remote and
|
||||
* updates our local database. It uses local state to remember the last time it
|
||||
* synced, so each subsequent sync is a lightweight diff.
|
||||
*
|
||||
* @param masterKey The user's master key. This is used to encrypt and decrypt
|
||||
* the location tags specific entity key.
|
||||
*/
|
||||
export const syncLocationTags = async (masterKey: Uint8Array) => {
|
||||
const decoder = new TextDecoder();
|
||||
const parse = (id: string, data: Uint8Array): LocationTag => ({
|
||||
id,
|
||||
...RemoteLocationTag.parse(JSON.parse(decoder.decode(data))),
|
||||
});
|
||||
|
||||
const processBatch = async (entities: UserEntityChange[]) => {
|
||||
const existingTagsByID = new Map(
|
||||
(await savedLocationTags()).map((t) => [t.id, t]),
|
||||
);
|
||||
entities.forEach(({ id, data }) =>
|
||||
data
|
||||
? existingTagsByID.set(id, parse(id, data))
|
||||
: existingTagsByID.delete(id),
|
||||
);
|
||||
return saveLocationTags([...existingTagsByID.values()]);
|
||||
};
|
||||
|
||||
return syncUserEntity("location", masterKey, processBatch);
|
||||
};
|
||||
|
||||
/** Zod schema for the tag that we get from or put to remote. */
|
||||
const RemoteLocationTag = z.object({
|
||||
name: z.string(),
|
||||
radius: z.number(),
|
||||
centerPoint: z.object({
|
||||
latitude: z.number(),
|
||||
longitude: z.number(),
|
||||
}),
|
||||
});
|
||||
|
||||
/** Zod schema for the tag that we persist locally. */
|
||||
const LocalLocationTag = RemoteLocationTag.extend({
|
||||
id: z.string(),
|
||||
});
|
||||
|
||||
export type LocationTag = z.infer<typeof LocalLocationTag>;
|
||||
|
||||
const saveLocationTags = (tags: LocationTag[]) =>
|
||||
setKV("locationTags", JSON.stringify(tags));
|
||||
|
||||
/**
|
||||
* Return all the location tags that are present locally.
|
||||
*
|
||||
* Use {@link syncLocationTags} to sync this list with remote.
|
||||
*/
|
||||
export const savedLocationTags = async () =>
|
||||
LocalLocationTag.array().parse(
|
||||
JSON.parse((await getKV("locationTags")) ?? "[]"),
|
||||
);
|
||||
|
||||
/**
|
||||
* Sync the {@link CGroup} entities that we have locally with remote.
|
||||
*
|
||||
* This fetches all the user entities corresponding to the "cgroup" entity type
|
||||
* from remote that have been created, updated or deleted since the last time we
|
||||
* checked.
|
||||
*
|
||||
* This diff is then applied to the data we have persisted locally.
|
||||
*
|
||||
* @param masterKey The user's master key. This is used to encrypt and decrypt
|
||||
* the cgroup specific entity key.
|
||||
*/
|
||||
export const syncCGroups = (masterKey: Uint8Array) => {
|
||||
const parse = async (id: string, data: Uint8Array): Promise<CGroup> => {
|
||||
const rp = RemoteCGroup.parse(JSON.parse(await gunzip(data)));
|
||||
return {
|
||||
id,
|
||||
name: rp.name,
|
||||
clusterIDs: rp.assigned.map(({ id }) => id),
|
||||
isHidden: rp.isHidden,
|
||||
avatarFaceID: rp.avatarFaceID,
|
||||
displayFaceID: undefined,
|
||||
};
|
||||
};
|
||||
|
||||
const processBatch = async (entities: UserEntityChange[]) =>
|
||||
await applyCGroupDiff(
|
||||
await Promise.all(
|
||||
entities.map(async ({ id, data }) =>
|
||||
data ? await parse(id, data) : id,
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
return syncUserEntity("cgroup", masterKey, processBatch);
|
||||
};
|
||||
|
||||
const RemoteCGroup = z.object({
|
||||
name: z.string().nullish().transform(nullToUndefined),
|
||||
assigned: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
faces: z.string().array(),
|
||||
}),
|
||||
),
|
||||
isHidden: z.boolean(),
|
||||
avatarFaceID: z.string().nullish().transform(nullToUndefined),
|
||||
});
|
||||
|
||||
/**
|
||||
* The maximum number of items to fetch in a single diff
|
||||
@@ -82,6 +197,41 @@ interface UserEntityChange {
|
||||
updatedAt: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync of the given {@link type} entities that we have locally with remote.
|
||||
*
|
||||
* This fetches all the user entities of {@link type} from remote that have been
|
||||
* created, updated or deleted since the last time we checked.
|
||||
*
|
||||
* For each diff response, the {@link processBatch} is invoked to give a chance
|
||||
* to caller to apply the updates to the data we have persisted locally.
|
||||
*
|
||||
* The user's {@link masterKey} is used to decrypt (or encrypt, when generating
|
||||
* a new one) the entity key.
|
||||
*/
|
||||
const syncUserEntity = async (
|
||||
type: EntityType,
|
||||
masterKey: Uint8Array,
|
||||
processBatch: (entities: UserEntityChange[]) => Promise<void>,
|
||||
) => {
|
||||
const entityKeyB64 = await getOrCreateEntityKeyB64(type, masterKey);
|
||||
|
||||
let sinceTime = (await savedLatestUpdatedAt(type)) ?? 0;
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition, no-constant-condition
|
||||
while (true) {
|
||||
const entities = await userEntityDiff(type, sinceTime, entityKeyB64);
|
||||
if (entities.length == 0) break;
|
||||
|
||||
await processBatch(entities);
|
||||
|
||||
sinceTime = entities.reduce(
|
||||
(max, entity) => Math.max(max, entity.updatedAt),
|
||||
sinceTime,
|
||||
);
|
||||
await saveLatestUpdatedAt(type, sinceTime);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Zod schema for a item in the user entity diff.
|
||||
*/
|
||||
@@ -185,32 +335,34 @@ const userEntityDiff = async (
|
||||
*
|
||||
* See also, [Note: User entity keys].
|
||||
*/
|
||||
const getOrCreateEntityKeyB64 = async (type: EntityType) => {
|
||||
const getOrCreateEntityKeyB64 = async (
|
||||
type: EntityType,
|
||||
masterKey: Uint8Array,
|
||||
) => {
|
||||
// See if we already have it locally.
|
||||
const saved = await savedRemoteUserEntityKey(type);
|
||||
if (saved) return decryptEntityKey(saved);
|
||||
if (saved) return decryptEntityKey(saved, masterKey);
|
||||
|
||||
// See if remote already has it.
|
||||
const existing = await getUserEntityKey(type);
|
||||
if (existing) {
|
||||
// Only save it if we can decrypt it to avoid corrupting our local state
|
||||
// in unforeseen circumstances.
|
||||
const result = await decryptEntityKey(existing);
|
||||
const result = await decryptEntityKey(existing, masterKey);
|
||||
await saveRemoteUserEntityKey(type, existing);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Nada. Create a new one, put it to remote, save it locally, and return.
|
||||
// TODO-Cluster Keep this read only, only add the writeable bits after other
|
||||
// stuff has been tested.
|
||||
throw new Error("Not implemented");
|
||||
// const generatedKeyB64 = await worker.generateEncryptionKey();
|
||||
// const encryptedNewKey = await worker.encryptToB64(
|
||||
// generatedKeyB64,
|
||||
// encryptionKeyB64,
|
||||
// );
|
||||
// await postUserEntityKey(type, newKey);
|
||||
// return decrypt(newKey);
|
||||
|
||||
// As a sanity check, genarate the key but immediately encrypt it as if it
|
||||
// were fetched from remote and then try to decrypt it before doing anything
|
||||
// with it.
|
||||
const generated = await generateNewEncryptedEntityKey(masterKey);
|
||||
const result = decryptEntityKey(generated, masterKey);
|
||||
await postUserEntityKey(type, generated);
|
||||
await saveRemoteUserEntityKey(type, generated);
|
||||
return result;
|
||||
};
|
||||
|
||||
const entityKeyKey = (type: EntityType) => `entityKey/${type}`;
|
||||
@@ -235,24 +387,32 @@ const saveRemoteUserEntityKey = (
|
||||
) => setKV(entityKeyKey(type), JSON.stringify(entityKey));
|
||||
|
||||
/**
|
||||
* Generate a new entity key and return it after encrypting it using the user's
|
||||
* master key.
|
||||
* Generate a new entity key and return it in the shape of an
|
||||
* {@link RemoteUserEntityKey} after encrypting it using the user's master key.
|
||||
*/
|
||||
// TODO: Temporary export to silence lint
|
||||
export const generateEncryptedEntityKey = async () =>
|
||||
encryptBoxB64(await generateBoxKey(), await masterKeyFromSession());
|
||||
const generateNewEncryptedEntityKey = async (masterKey: Uint8Array) => {
|
||||
const { encryptedData, nonce } = await encryptBoxB64(
|
||||
await generateNewBlobOrStreamKey(),
|
||||
masterKey,
|
||||
);
|
||||
// Remote calls it the header, but it really is the nonce.
|
||||
return { encryptedKey: encryptedData, header: nonce };
|
||||
};
|
||||
|
||||
/**
|
||||
* Decrypt an encrypted entity key using the user's master key.
|
||||
*/
|
||||
const decryptEntityKey = async (remote: RemoteUserEntityKey) =>
|
||||
const decryptEntityKey = async (
|
||||
remote: RemoteUserEntityKey,
|
||||
masterKey: Uint8Array,
|
||||
) =>
|
||||
decryptBoxB64(
|
||||
{
|
||||
encryptedData: remote.encryptedKey,
|
||||
// Remote calls it the header, but it really is the nonce.
|
||||
nonce: remote.header,
|
||||
},
|
||||
await masterKeyFromSession(),
|
||||
masterKey,
|
||||
);
|
||||
|
||||
/**
|
||||
@@ -283,7 +443,9 @@ const getUserEntityKey = async (
|
||||
};
|
||||
|
||||
const RemoteUserEntityKey = z.object({
|
||||
/** Base64 encoded entity key, encrypted with the user's master key. */
|
||||
encryptedKey: z.string(),
|
||||
/** Base64 encoded nonce used during encryption of this entity key. */
|
||||
header: z.string(),
|
||||
});
|
||||
|
||||
@@ -294,8 +456,7 @@ type RemoteUserEntityKey = z.infer<typeof RemoteUserEntityKey>;
|
||||
*
|
||||
* See: [Note: User entity keys]
|
||||
*/
|
||||
// TODO-Cluster remove export
|
||||
export const postUserEntityKey = async (
|
||||
const postUserEntityKey = async (
|
||||
type: EntityType,
|
||||
entityKey: RemoteUserEntityKey,
|
||||
) => {
|
||||
@@ -325,69 +486,3 @@ const savedLatestUpdatedAt = (type: EntityType) =>
|
||||
*/
|
||||
const saveLatestUpdatedAt = (type: EntityType, value: number) =>
|
||||
setKV(latestUpdatedAtKey(type), value);
|
||||
|
||||
/**
|
||||
* Sync the {@link CGroup} entities that we have locally with remote.
|
||||
*
|
||||
* This fetches all the user entities corresponding to the "cgroup" entity type
|
||||
* from remote that have been created, updated or deleted since the last time we
|
||||
* checked.
|
||||
*
|
||||
* This diff is then applied to the data we have persisted locally.
|
||||
*/
|
||||
export const syncCGroups = async () => {
|
||||
const type: EntityType = "cgroup";
|
||||
|
||||
const entityKeyB64 = await getOrCreateEntityKeyB64(type);
|
||||
|
||||
const parse = async (id: string, data: Uint8Array): Promise<CGroup> => {
|
||||
const rp = RemoteCGroup.parse(JSON.parse(await gunzip(data)));
|
||||
return {
|
||||
id,
|
||||
name: rp.name,
|
||||
clusterIDs: rp.assigned.map(({ id }) => id),
|
||||
isHidden: rp.isHidden,
|
||||
avatarFaceID: rp.avatarFaceID,
|
||||
displayFaceID: undefined,
|
||||
};
|
||||
};
|
||||
|
||||
let sinceTime = (await savedLatestUpdatedAt(type)) ?? 0;
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition, no-constant-condition
|
||||
while (true) {
|
||||
const entities = await userEntityDiff(type, sinceTime, entityKeyB64);
|
||||
if (entities.length == 0) break;
|
||||
|
||||
await applyCGroupDiff(
|
||||
await Promise.all(
|
||||
entities.map(async ({ id, data }) =>
|
||||
data ? await parse(id, data) : id,
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
sinceTime = entities.reduce(
|
||||
(max, entity) => Math.max(max, entity.updatedAt),
|
||||
sinceTime,
|
||||
);
|
||||
await saveLatestUpdatedAt(type, sinceTime);
|
||||
}
|
||||
};
|
||||
|
||||
/** Zod schema for the {@link RemoteCGroup} type. */
|
||||
const RemoteCGroup = z.object({
|
||||
name: z.string().nullish().transform(nullToUndefined),
|
||||
assigned: z.array(
|
||||
z.object({
|
||||
id: z.string(),
|
||||
faces: z.string().array(),
|
||||
}),
|
||||
),
|
||||
isHidden: z.boolean(),
|
||||
avatarFaceID: z.string().nullish().transform(nullToUndefined),
|
||||
});
|
||||
|
||||
/**
|
||||
* Contents of a "cgroup" user entity, as synced via remote.
|
||||
*/
|
||||
type RemoteCGroup = z.infer<typeof RemoteCGroup>;
|
||||
|
||||
@@ -3,7 +3,7 @@ import {
|
||||
decryptPublicMagicMetadata,
|
||||
type PublicMagicMetadata,
|
||||
} from "@/media/file-metadata";
|
||||
import { EnteFile } from "@/new/photos/types/file";
|
||||
import type { EnteFile } from "@/new/photos/types/file";
|
||||
import { fileLogID } from "@/new/photos/utils/file";
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { removeKV, setKV } from "@/base/kv";
|
||||
import { getKV, removeKV, setKV } from "@/base/kv";
|
||||
import log from "@/base/log";
|
||||
|
||||
export enum LS_KEYS {
|
||||
@@ -68,6 +68,17 @@ export const setLSUser = async (user: object) => {
|
||||
* inlined into `setLSUser` (tag: Migration).
|
||||
*/
|
||||
export const migrateKVToken = async (user: unknown) => {
|
||||
// Throw an error if the data is in local storage but not in IndexedDB. This
|
||||
// is a pre-cursor to inlining this code.
|
||||
// TODO(REL): Remove this sanity check after a few days.
|
||||
const oldLSUser = getData(LS_KEYS.USER);
|
||||
const wasMissing =
|
||||
oldLSUser &&
|
||||
typeof oldLSUser == "object" &&
|
||||
"token" in oldLSUser &&
|
||||
typeof oldLSUser.token == "string" &&
|
||||
!(await getKV("token"));
|
||||
|
||||
user &&
|
||||
typeof user == "object" &&
|
||||
"id" in user &&
|
||||
@@ -81,4 +92,9 @@ export const migrateKVToken = async (user: unknown) => {
|
||||
typeof user.token == "string"
|
||||
? await setKV("token", user.token)
|
||||
: await removeKV("token");
|
||||
|
||||
if (wasMissing)
|
||||
throw new Error(
|
||||
"The user's token was present in local storage but not in IndexedDB",
|
||||
);
|
||||
};
|
||||
|
||||
@@ -9,13 +9,6 @@ const dateTimeFullFormatter1 = new Intl.DateTimeFormat(i18n.language, {
|
||||
const dateTimeFullFormatter2 = new Intl.DateTimeFormat(i18n.language, {
|
||||
year: "numeric",
|
||||
});
|
||||
const dateTimeShortFormatter = new Intl.DateTimeFormat(i18n.language, {
|
||||
month: "short",
|
||||
day: "numeric",
|
||||
year: "numeric",
|
||||
hour: "2-digit",
|
||||
minute: "2-digit",
|
||||
});
|
||||
|
||||
const timeFormatter = new Intl.DateTimeFormat(i18n.language, {
|
||||
timeStyle: "short",
|
||||
@@ -37,10 +30,6 @@ export function formatDate(date: number | Date) {
|
||||
.join(" ");
|
||||
}
|
||||
|
||||
export function formatDateTimeShort(date: number | Date) {
|
||||
return dateTimeShortFormatter.format(date);
|
||||
}
|
||||
|
||||
export function formatTime(date: number | Date) {
|
||||
return timeFormatter.format(date).toUpperCase();
|
||||
}
|
||||
|
||||
19
web/packages/utils/parse.ts
Normal file
19
web/packages/utils/parse.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
/**
|
||||
* A wrapper over parseInt that deals with its sheNaNigans.
|
||||
*
|
||||
* This function takes as an input a string nominally (though the implementation
|
||||
* is meant to work for arbitrary JavaScript values). It parses it into a base
|
||||
* 10 integer. If the result is NaN, it returns undefined, otherwise it returns
|
||||
* the parsed integer.
|
||||
*
|
||||
* From MDN:
|
||||
*
|
||||
* > To be sure that you are working with numbers, coerce the value to a number
|
||||
* > and use Number.isNaN() to test the result()
|
||||
* >
|
||||
* > https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/isNaN
|
||||
*/
|
||||
export const maybeParseInt = (s: string) => {
|
||||
const n = parseInt(s, 10);
|
||||
return Number.isNaN(n) ? undefined : n;
|
||||
};
|
||||
Reference in New Issue
Block a user