forked from admin/french-vocab
fix: harden backup import typing
This commit is contained in:
@@ -1,4 +1,5 @@
|
|||||||
import Dexie, { type Table } from 'dexie';
|
import Dexie, { type Table } from 'dexie';
|
||||||
|
import type { TtsSettings } from '../types/settings';
|
||||||
import type { Word, StudyProgress, StudyStats } from '../types/vocabulary';
|
import type { Word, StudyProgress, StudyStats } from '../types/vocabulary';
|
||||||
|
|
||||||
export interface WordEntry extends Word {
|
export interface WordEntry extends Word {
|
||||||
@@ -9,11 +10,32 @@ export interface ProgressEntry extends StudyProgress {
|
|||||||
syncedAt?: Date;
|
syncedAt?: Date;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface SettingsEntry {
|
||||||
|
key: string;
|
||||||
|
value: TtsSettings;
|
||||||
|
}
|
||||||
|
|
||||||
|
type ImportedWordEntry = Omit<Word, 'addedAt'> & {
|
||||||
|
addedAt?: Date | string;
|
||||||
|
};
|
||||||
|
|
||||||
|
interface ImportedProgressEntry extends Omit<StudyProgress, 'nextReviewDate' | 'lastStudiedDate'> {
|
||||||
|
nextReviewDate: Date | string;
|
||||||
|
lastStudiedDate: Date | string;
|
||||||
|
syncedAt?: Date | string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BackupPayload {
|
||||||
|
words?: ImportedWordEntry[];
|
||||||
|
progress?: ImportedProgressEntry[];
|
||||||
|
stats?: StudyStats;
|
||||||
|
}
|
||||||
|
|
||||||
export class FrenchVocabDB extends Dexie {
|
export class FrenchVocabDB extends Dexie {
|
||||||
words!: Table<WordEntry>;
|
words!: Table<WordEntry>;
|
||||||
progress!: Table<ProgressEntry>;
|
progress!: Table<ProgressEntry>;
|
||||||
stats!: Table<StudyStats & { id: string }>;
|
stats!: Table<StudyStats & { id: string }>;
|
||||||
settings!: Table<{ key: string; value: any }>;
|
settings!: Table<SettingsEntry>;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
super('FrenchVocabDB');
|
super('FrenchVocabDB');
|
||||||
@@ -564,16 +586,36 @@ export async function exportData() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 导入数据
|
// 导入数据
|
||||||
export async function importData(data: any) {
|
const normalizeWordEntry = (word: ImportedWordEntry): WordEntry => ({
|
||||||
if (data.words) {
|
...word,
|
||||||
|
addedAt: word.addedAt ? new Date(word.addedAt) : new Date(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const normalizeProgressEntry = (progress: ImportedProgressEntry): ProgressEntry => ({
|
||||||
|
...progress,
|
||||||
|
nextReviewDate: new Date(progress.nextReviewDate),
|
||||||
|
lastStudiedDate: new Date(progress.lastStudiedDate),
|
||||||
|
syncedAt: progress.syncedAt ? new Date(progress.syncedAt) : undefined,
|
||||||
|
});
|
||||||
|
|
||||||
|
export async function importData(data: unknown) {
|
||||||
|
if (!data || typeof data !== 'object') {
|
||||||
|
throw new Error('Invalid backup data.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const backup = data as BackupPayload;
|
||||||
|
|
||||||
|
if (Array.isArray(backup.words)) {
|
||||||
await db.words.clear();
|
await db.words.clear();
|
||||||
await db.words.bulkAdd(data.words);
|
await db.words.bulkAdd(backup.words.map(normalizeWordEntry));
|
||||||
}
|
}
|
||||||
if (data.progress) {
|
|
||||||
|
if (Array.isArray(backup.progress)) {
|
||||||
await db.progress.clear();
|
await db.progress.clear();
|
||||||
await db.progress.bulkAdd(data.progress);
|
await db.progress.bulkAdd(backup.progress.map(normalizeProgressEntry));
|
||||||
}
|
}
|
||||||
if (data.stats) {
|
|
||||||
await db.stats.put({ ...data.stats, id: 'main' });
|
if (backup.stats) {
|
||||||
|
await db.stats.put({ ...backup.stats, id: 'main' });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user