diff --git a/app.vue b/app.vue index 112f8ef..226d6e1 100644 --- a/app.vue +++ b/app.vue @@ -35,7 +35,10 @@ onMounted(async () => { await settingsStore.loadConfig() await settingsStore.loadDevnetManifest() nodesStore.init() - filesStore.loadHistory() + // Await: any persistHistory triggered before this resolves would otherwise + // run against an empty `files` array, and the fail-closed guard in the + // store only kicks in once loadHistory has flipped its failure flag. + await filesStore.loadHistory() filesStore.setupProgressListeners() updaterStore.checkForUpdate() settingsStore.reconnectIndelible() diff --git a/src-tauri/src/config.rs b/src-tauri/src/config.rs index 4755cc8..b7619da 100644 --- a/src-tauri/src/config.rs +++ b/src-tauri/src/config.rs @@ -193,7 +193,14 @@ impl UploadHistory { let dir = config_path(); std::fs::create_dir_all(&dir)?; let content = serde_json::to_string_pretty(self)?; - std::fs::write(Self::history_file(), content)?; + // Write to a tempfile alongside the target then rename. A crash or + // short write that hits the tempfile leaves the live history file + // intact — `std::fs::write` would truncate it before failing, and + // every prior upload's datamap on disk would lose its index entry. + let final_path = Self::history_file(); + let tmp_path = dir.join("upload_history.json.tmp"); + std::fs::write(&tmp_path, content)?; + std::fs::rename(&tmp_path, &final_path)?; Ok(()) } } diff --git a/stores/files.ts b/stores/files.ts index 9a0c5c9..2b6185f 100644 --- a/stores/files.ts +++ b/stores/files.ts @@ -170,6 +170,11 @@ export const useFilesStore = defineStore('files', { files: [] as FileEntry[], nextId: 1, historyLoaded: false, + /** True if loadHistory threw and we never populated `files` from disk. + * persistHistory must refuse to write while this is set — otherwise the + * next upload-complete/remove/clear would overwrite the on-disk file + * with an empty array, orphaning every prior datamap entry. */ + historyLoadFailed: false, /** True once the Rust progress event listeners have been wired up. * Idempotent — safe to call setupProgressListeners() multiple times. */ _progressListenersStarted: false, @@ -257,13 +262,23 @@ export const useFilesStore = defineStore('files', { }) } this.historyLoaded = true + this.historyLoadFailed = false } catch (e) { console.error('Failed to load upload history:', e) this.historyLoaded = true + this.historyLoadFailed = true } }, async persistHistory() { + // Fail-closed: if we never successfully loaded the history, refuse to + // write. Otherwise a load error would set `files = []` and the next + // upload-complete/remove/clear would clobber upload_history.json, + // orphaning every prior datamap on disk. + if (this.historyLoadFailed) { + console.warn('Skipping upload history save — initial load failed; not overwriting on-disk file.') + return + } // Only uploads are persisted — downloads are intentionally in-memory // so the table starts fresh each session. const entries: UploadHistoryEntry[] = this.files diff --git a/tests/stores/files.test.ts b/tests/stores/files.test.ts new file mode 100644 index 0000000..df34daa --- /dev/null +++ b/tests/stores/files.test.ts @@ -0,0 +1,103 @@ +import { describe, it, expect, beforeEach, vi } from 'vitest' +import { mockInvoke, resetTauriMocks, setMockInvokeHandler } from '../mocks/tauri' +import { useFilesStore } from '~/stores/files' + +describe('files store — upload history persistence', () => { + let store: ReturnType + + beforeEach(() => { + resetTauriMocks() + store = useFilesStore() + store.$reset() + }) + + describe('loadHistory + persistHistory', () => { + it('marks load successful and allows persistHistory to write', async () => { + const saveCalls: any[] = [] + setMockInvokeHandler((cmd, args) => { + if (cmd === 'load_upload_history') { + return [ + { + name: 'foo.pdf', + size_bytes: 1024, + address: '0xabc', + cost: '0.01', + uploaded_at: '2026-05-01T00:00:00Z', + data_map_file: '/cfg/foo.datamap', + gas_cost: null, + public_address: null, + }, + ] + } + if (cmd === 'save_upload_history') { + saveCalls.push(args) + } + }) + + await store.loadHistory() + expect(store.historyLoaded).toBe(true) + expect(store.historyLoadFailed).toBe(false) + expect(store.files).toHaveLength(1) + + await store.persistHistory() + expect(saveCalls).toHaveLength(1) + expect(saveCalls[0].entries).toHaveLength(1) + expect(saveCalls[0].entries[0].address).toBe('0xabc') + }) + + it('flags failure and refuses to overwrite on-disk file when load throws', async () => { + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}) + vi.spyOn(console, 'error').mockImplementation(() => {}) + + let saveInvoked = false + setMockInvokeHandler((cmd) => { + if (cmd === 'load_upload_history') { + throw new Error('parse error: unexpected token') + } + if (cmd === 'save_upload_history') { + saveInvoked = true + } + }) + + await store.loadHistory() + expect(store.historyLoaded).toBe(true) + expect(store.historyLoadFailed).toBe(true) + expect(store.files).toHaveLength(0) + + // Even after a "settled" upload row is appended, persistHistory must be + // a no-op — writing now would clobber upload_history.json with an array + // containing only this new entry and orphan every prior datamap. + store.files.push({ + id: 999, + kind: 'upload', + name: 'new.pdf', + size_bytes: 1, + address: '0xnew', + status: 'complete', + date: '2026-05-12T00:00:00Z', + } as any) + + await store.persistHistory() + expect(saveInvoked).toBe(false) + expect(warnSpy).toHaveBeenCalled() + + warnSpy.mockRestore() + }) + + it('clears the failure flag on a subsequent successful load', async () => { + vi.spyOn(console, 'error').mockImplementation(() => {}) + + // First call: fail. + mockInvoke.mockImplementationOnce(() => { + throw new Error('transient') + }) + await store.loadHistory() + expect(store.historyLoadFailed).toBe(true) + + // Second call: succeed. + mockInvoke.mockImplementationOnce(() => []) + await store.loadHistory() + expect(store.historyLoadFailed).toBe(false) + }) + }) +})