fix: ajuste de logica para erro 413 "payload grande demais"

This commit is contained in:
NANDO9322 2026-02-02 19:53:42 -03:00
parent cea13a379c
commit 8e95828f85

View file

@ -521,6 +521,9 @@ export const ImportData: React.FC = () => {
const handleImport = async () => { const handleImport = async () => {
if (!token) return; if (!token) return;
setIsLoading(true); setIsLoading(true);
// Reset result before start
setResult(null);
try { try {
let endpoint = ""; let endpoint = "";
if (activeTab === 'fot') endpoint = '/api/import/fot'; if (activeTab === 'fot') endpoint = '/api/import/fot';
@ -528,38 +531,83 @@ export const ImportData: React.FC = () => {
else if (activeTab === 'profissionais') endpoint = '/api/profissionais/import'; else if (activeTab === 'profissionais') endpoint = '/api/profissionais/import';
else if (activeTab === 'financeiro') endpoint = '/api/finance/import'; else if (activeTab === 'financeiro') endpoint = '/api/finance/import';
const response = await fetch(`${API_BASE_URL}${endpoint}`, { const CHUNK_SIZE = 1000;
method: "POST", let successes = 0;
headers: { let errorsList: string[] = [];
"Content-Type": "application/json",
"Authorization": `Bearer ${token}`
},
body: JSON.stringify(data)
});
if (!response.ok) {
throw new Error(`Erro na importação: ${response.statusText}`);
}
const resData = await response.json();
if (activeTab === 'profissionais') { // Batch loop
// { created, updated, errors_count, errors: [] } for (let i = 0; i < data.length; i += CHUNK_SIZE) {
setResult({ const chunk = data.slice(i, i + CHUNK_SIZE);
success: (resData.created || 0) + (resData.updated || 0), console.log(`Importing chunk ${(i/CHUNK_SIZE)+1}/${Math.ceil(data.length/CHUNK_SIZE)} (${chunk.length} records)`);
errors: resData.errors || []
}); const response = await fetch(`${API_BASE_URL}${endpoint}`, {
} else if (resData.message) { method: "POST",
setResult({ success: data.length, errors: [] }); headers: {
} else { "Content-Type": "application/json",
setResult({ "Authorization": `Bearer ${token}`
success: resData.SuccessCount, },
errors: resData.Errors || [] body: JSON.stringify(chunk)
}); });
if (!response.ok) {
const errBody = await response.text();
// Try parse JSON
let errMsg = response.statusText;
try {
const jsonErr = JSON.parse(errBody);
errMsg = jsonErr.error || errMsg;
} catch (e) {}
errorsList.push(`Chunk ${i}: ${errMsg}`);
console.error(`Chunk failed: ${errMsg}`);
// We continue processing other chunks?
// Or stop?
// Usually valid records in other chunks should be saved.
// But if user expects all or nothing, this is tricky.
// For now, continue best effort.
} else {
const resData = await response.json();
if (activeTab === 'profissionais') {
successes += (resData.created || 0) + (resData.updated || 0);
if (resData.errors && resData.errors.length > 0) {
errorsList.push(...resData.errors);
}
} else if (resData.message) {
// Some endpoints return just { message: "xxx" } implying all success?
// Check handler.
// If success, assume chunk length inserted if no detailed count?
// Or parse message "Imported X records"?
// Assuming chunk success:
successes += chunk.length;
} else {
// Standard { SuccessCount: X, Errors: [] }
successes += (resData.SuccessCount || 0) + (resData.imported || 0); // Handle variations
if (resData.Errors && resData.Errors.length > 0) errorsList.push(...resData.Errors);
}
}
} }
} catch (error) {
console.error("Import error:", error); // Final Result
alert("Erro ao importar dados. Verifique o console."); if (errorsList.length > 0) {
console.warn("Import finished with errors", errorsList);
// Show summary
setResult({
success: successes,
errors: errorsList
});
alert(`Importação concluída com parcialidade.\nSucesso: ${successes}\nErros: ${errorsList.length}. Verifique o resultado.`);
} else {
setResult({
success: successes,
errors: []
});
alert(`Importação concluída com sucesso! ${successes} registros.`);
}
} catch (error: any) {
console.error("Import error description:", error);
alert("Erro crítico ao importar: " + (error.message || "Desconhecido"));
} finally { } finally {
setIsLoading(false); setIsLoading(false);
} }