fix: ajuste de logica para erro 413 "payload grande demais"

This commit is contained in:
NANDO9322 2026-02-02 19:53:42 -03:00
parent cea13a379c
commit 8e95828f85

View file

@ -521,6 +521,9 @@ export const ImportData: React.FC = () => {
const handleImport = async () => {
if (!token) return;
setIsLoading(true);
// Reset result before start
setResult(null);
try {
let endpoint = "";
if (activeTab === 'fot') endpoint = '/api/import/fot';
@ -528,38 +531,83 @@ export const ImportData: React.FC = () => {
else if (activeTab === 'profissionais') endpoint = '/api/profissionais/import';
else if (activeTab === 'financeiro') endpoint = '/api/finance/import';
const response = await fetch(`${API_BASE_URL}${endpoint}`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"Authorization": `Bearer ${token}`
},
body: JSON.stringify(data)
});
if (!response.ok) {
throw new Error(`Erro na importação: ${response.statusText}`);
}
const resData = await response.json();
const CHUNK_SIZE = 1000;
let successes = 0;
let errorsList: string[] = [];
if (activeTab === 'profissionais') {
// { created, updated, errors_count, errors: [] }
setResult({
success: (resData.created || 0) + (resData.updated || 0),
errors: resData.errors || []
});
} else if (resData.message) {
setResult({ success: data.length, errors: [] });
} else {
setResult({
success: resData.SuccessCount,
errors: resData.Errors || []
});
// Batch loop
for (let i = 0; i < data.length; i += CHUNK_SIZE) {
const chunk = data.slice(i, i + CHUNK_SIZE);
console.log(`Importing chunk ${(i/CHUNK_SIZE)+1}/${Math.ceil(data.length/CHUNK_SIZE)} (${chunk.length} records)`);
const response = await fetch(`${API_BASE_URL}${endpoint}`, {
method: "POST",
headers: {
"Content-Type": "application/json",
"Authorization": `Bearer ${token}`
},
body: JSON.stringify(chunk)
});
if (!response.ok) {
const errBody = await response.text();
// Try parse JSON
let errMsg = response.statusText;
try {
const jsonErr = JSON.parse(errBody);
errMsg = jsonErr.error || errMsg;
} catch (e) {}
errorsList.push(`Chunk ${i}: ${errMsg}`);
console.error(`Chunk failed: ${errMsg}`);
// We continue processing other chunks?
// Or stop?
// Usually valid records in other chunks should be saved.
// But if user expects all or nothing, this is tricky.
// For now, continue best effort.
} else {
const resData = await response.json();
if (activeTab === 'profissionais') {
successes += (resData.created || 0) + (resData.updated || 0);
if (resData.errors && resData.errors.length > 0) {
errorsList.push(...resData.errors);
}
} else if (resData.message) {
// Some endpoints return just { message: "xxx" } implying all success?
// Check handler.
// If success, assume chunk length inserted if no detailed count?
// Or parse message "Imported X records"?
// Assuming chunk success:
successes += chunk.length;
} else {
// Standard { SuccessCount: X, Errors: [] }
successes += (resData.SuccessCount || 0) + (resData.imported || 0); // Handle variations
if (resData.Errors && resData.Errors.length > 0) errorsList.push(...resData.Errors);
}
}
}
} catch (error) {
console.error("Import error:", error);
alert("Erro ao importar dados. Verifique o console.");
// Final Result
if (errorsList.length > 0) {
console.warn("Import finished with errors", errorsList);
// Show summary
setResult({
success: successes,
errors: errorsList
});
alert(`Importação concluída com parcialidade.\nSucesso: ${successes}\nErros: ${errorsList.length}. Verifique o resultado.`);
} else {
setResult({
success: successes,
errors: []
});
alert(`Importação concluída com sucesso! ${successes} registros.`);
}
} catch (error: any) {
console.error("Import error description:", error);
alert("Erro crítico ao importar: " + (error.message || "Desconhecido"));
} finally {
setIsLoading(false);
}