diff --git a/apps/sim/lib/workflows/persistence/utils.ts b/apps/sim/lib/workflows/persistence/utils.ts index b9d70021d9c..070bb90d769 100644 --- a/apps/sim/lib/workflows/persistence/utils.ts +++ b/apps/sim/lib/workflows/persistence/utils.ts @@ -407,9 +407,9 @@ export async function loadWorkflowFromNormalizedTables( if (subflow.type === SUBFLOW_TYPES.LOOP) { const loopType = (config as Loop).loopType === 'for' || - (config as Loop).loopType === 'forEach' || - (config as Loop).loopType === 'while' || - (config as Loop).loopType === 'doWhile' + (config as Loop).loopType === 'forEach' || + (config as Loop).loopType === 'while' || + (config as Loop).loopType === 'doWhile' ? (config as Loop).loopType : 'for' @@ -446,7 +446,7 @@ export async function loadWorkflowFromNormalizedTables( distribution: (config as Parallel).distribution ?? '', parallelType: (config as Parallel).parallelType === 'count' || - (config as Parallel).parallelType === 'collection' + (config as Parallel).parallelType === 'collection' ? (config as Parallel).parallelType : 'count', enabled: credMigratedBlocks[subflow.id]?.enabled ?? true, @@ -490,6 +490,8 @@ export async function saveWorkflowToNormalizedTables( tx.delete(workflowSubflows).where(eq(workflowSubflows.workflowId, workflowId)), ]) + const CHUNK_SIZE = 50 + // Insert blocks if (Object.keys(state.blocks).length > 0) { const blockInserts = Object.values(state.blocks).map((block) => ({ @@ -512,7 +514,12 @@ export async function saveWorkflowToNormalizedTables( locked: block.locked ?? false, })) - await tx.insert(workflowBlocks).values(blockInserts) + // SQLite limits bound parameters to 999 per statement. + // workflowBlocks has 17 fields -> max safe chunk = floor(999/17) = 58. + // Using 50 for a conservative margin. + for (let i = 0; i < blockInserts.length; i += CHUNK_SIZE) { + await tx.insert(workflowBlocks).values(blockInserts.slice(i, i + CHUNK_SIZE)) + } } // Insert edges @@ -526,7 +533,9 @@ export async function saveWorkflowToNormalizedTables( targetHandle: edge.targetHandle || null, })) - await tx.insert(workflowEdges).values(edgeInserts) + for (let i = 0; i < edgeInserts.length; i += CHUNK_SIZE) { + await tx.insert(workflowEdges).values(edgeInserts.slice(i, i + CHUNK_SIZE)) + } } // Insert subflows (loops and parallels) @@ -553,7 +562,9 @@ export async function saveWorkflowToNormalizedTables( }) if (subflowInserts.length > 0) { - await tx.insert(workflowSubflows).values(subflowInserts) + for (let i = 0; i < subflowInserts.length; i += CHUNK_SIZE) { + await tx.insert(workflowSubflows).values(subflowInserts.slice(i, i + CHUNK_SIZE)) + } } }) @@ -749,11 +760,11 @@ export function regenerateWorkflowStateIds(state: RegenerateStateInput): Regener blockIdMapping.set(oldId, crypto.randomUUID()) }) - // Map edge IDs + // Map edge IDs - ;(state.edges || []).forEach((edge: Edge) => { - edgeIdMapping.set(edge.id, crypto.randomUUID()) - }) + ; (state.edges || []).forEach((edge: Edge) => { + edgeIdMapping.set(edge.id, crypto.randomUUID()) + }) // Map loop IDs Object.keys(state.loops || {}).forEach((oldId) => { @@ -807,20 +818,20 @@ export function regenerateWorkflowStateIds(state: RegenerateStateInput): Regener newBlocks[newId] = newBlock }) - // Regenerate edges with updated source/target references + // Regenerate edges with updated source/target references - ;(state.edges || []).forEach((edge: Edge) => { - const newId = edgeIdMapping.get(edge.id)! - const newSource = blockIdMapping.get(edge.source) || edge.source - const newTarget = blockIdMapping.get(edge.target) || edge.target + ; (state.edges || []).forEach((edge: Edge) => { + const newId = edgeIdMapping.get(edge.id)! + const newSource = blockIdMapping.get(edge.source) || edge.source + const newTarget = blockIdMapping.get(edge.target) || edge.target - newEdges.push({ - ...edge, - id: newId, - source: newSource, - target: newTarget, + newEdges.push({ + ...edge, + id: newId, + source: newSource, + target: newTarget, + }) }) - }) // Regenerate loops with updated node references Object.entries(state.loops || {}).forEach(([oldId, loop]) => {