fix(bulk-upload): handle network errors and partial upload success

save() now wraps each chunk fetch in try/catch — a thrown network error
marks all files in that chunk as errored. Also handles HTTP 200 responses
with a non-empty errors array (partial success): only the named filenames
are marked as errored rather than all files in the chunk. Navigation is
suppressed whenever any file fails.

Tests added:
- network error marks all chunk files as errored, no navigation
- HTTP 200 with errors array marks only affected files

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Marcel
2026-04-25 11:09:49 +02:00
parent 1299f191e2
commit 48c82aa07b
2 changed files with 72 additions and 7 deletions

View File

@@ -111,19 +111,29 @@ async function save() {
// Raw fetch is intentional: SvelteKit form actions can't stream chunked
// FormData with per-chunk progress. Session cookie is sent automatically
// by the browser for same-origin requests.
const res = await fetch('/api/documents/quick-upload', { method: 'POST', body: formData });
if (!res.ok) {
hadErrors = true;
try {
const res = await fetch('/api/documents/quick-upload', { method: 'POST', body: formData });
const body = await res.json().catch(() => ({ errors: [] }));
const errorFilenames = new Set<string>(
(body.errors ?? []).map((err: { filename: string }) => err.filename)
);
for (const entry of chunk) {
if (errorFilenames.has(entry.file.name)) {
const e = files.get(entry.id);
if (e) files.set(entry.id, { ...e, status: 'error' });
if (!res.ok || errorFilenames.size > 0) {
hadErrors = true;
for (const entry of chunk) {
// When backend names specific files, mark only those; otherwise mark all.
const isError = errorFilenames.size > 0 ? errorFilenames.has(entry.file.name) : true;
if (isError) {
const e = files.get(entry.id);
if (e) files.set(entry.id, { ...e, status: 'error' });
}
}
}
} catch {
hadErrors = true;
for (const entry of chunk) {
const e = files.get(entry.id);
if (e) files.set(entry.id, { ...e, status: 'error' });
}
}
chunkProgress = { done: i + 1, total: chunks.length };
}

View File

@@ -206,6 +206,61 @@ describe('BulkDocumentEditLayout', () => {
);
});
it('save() marks only the failed file when server returns HTTP 200 with a partial errors array', async () => {
// Backend can return 200 OK while reporting individual file failures
const mockFetch = vi.fn().mockResolvedValue({
ok: true,
json: async () => ({
created: [{ id: '1' }],
updated: [],
errors: [{ filename: 'b.pdf', code: 'FILE_UPLOAD_FAILED' }]
})
});
vi.stubGlobal('fetch', mockFetch);
const { container } = render(BulkDocumentEditLayout, {});
await addFilesViaInput(container, [makeFile('a.pdf'), makeFile('b.pdf'), makeFile('c.pdf')]);
const saveBtn = container.querySelector(
'button[data-testid="bulk-save-btn"]'
) as HTMLButtonElement;
saveBtn.click();
await vi.waitFor(() => expect(mockFetch).toHaveBeenCalledTimes(1), { timeout: 3000 });
await vi.waitFor(
() => {
const errorChips = container.querySelectorAll('[data-chip-id][data-status="error"]');
expect(errorChips.length).toBe(1);
expect(errorChips[0].textContent).toContain('b');
},
{ timeout: 1000 }
);
// Navigation should be suppressed because hadErrors is true
expect(goto).not.toHaveBeenCalled();
});
it('save() marks all chunk files as errored when fetch throws a network error', async () => {
vi.stubGlobal('fetch', vi.fn().mockRejectedValue(new Error('network error')));
const { container } = render(BulkDocumentEditLayout, {});
await addFilesViaInput(container, [makeFile('a.pdf'), makeFile('b.pdf')]);
const saveBtn = container.querySelector(
'button[data-testid="bulk-save-btn"]'
) as HTMLButtonElement;
saveBtn.click();
await vi.waitFor(
() => {
const errorChips = container.querySelectorAll('[data-chip-id][data-status="error"]');
expect(errorChips.length).toBe(2);
},
{ timeout: 3000 }
);
expect(goto).not.toHaveBeenCalled();
});
it('save() does not call fetch a second time when already saving', async () => {
let resolveFirst: (() => void) | undefined;
const mockFetch = vi.fn().mockImplementation(