From d84cba6d19170c8fa39328d500f6ffbdcc6629b7 Mon Sep 17 00:00:00 2001 From: PlaneInABottle Date: Fri, 13 Mar 2026 12:10:39 +0300 Subject: [PATCH 1/6] chore(self-hosting): add health check endpoint (#3562) Add a simple API health route for deployment platforms and container probes, with focused route coverage. Co-authored-by: test --- apps/sim/app/api/health/route.test.ts | 17 +++++++++++++++++ apps/sim/app/api/health/route.ts | 12 ++++++++++++ 2 files changed, 29 insertions(+) create mode 100644 apps/sim/app/api/health/route.test.ts create mode 100644 apps/sim/app/api/health/route.ts diff --git a/apps/sim/app/api/health/route.test.ts b/apps/sim/app/api/health/route.test.ts new file mode 100644 index 0000000000..6abca82757 --- /dev/null +++ b/apps/sim/app/api/health/route.test.ts @@ -0,0 +1,17 @@ +/** + * @vitest-environment node + */ +import { describe, expect, it } from 'vitest' +import { GET } from '@/app/api/health/route' + +describe('GET /api/health', () => { + it('returns an ok status payload', async () => { + const response = await GET() + + expect(response.status).toBe(200) + await expect(response.json()).resolves.toEqual({ + status: 'ok', + timestamp: expect.any(String), + }) + }) +}) diff --git a/apps/sim/app/api/health/route.ts b/apps/sim/app/api/health/route.ts new file mode 100644 index 0000000000..5486272998 --- /dev/null +++ b/apps/sim/app/api/health/route.ts @@ -0,0 +1,12 @@ +/** + * Health check endpoint for deployment platforms and container probes. + */ +export async function GET(): Promise { + return Response.json( + { + status: 'ok', + timestamp: new Date().toISOString(), + }, + { status: 200 } + ) +} From 92290029f09b05638850dfaccfec14c825b43376 Mon Sep 17 00:00:00 2001 From: PlaneInABottle Date: Fri, 13 Mar 2026 12:55:58 +0300 Subject: [PATCH 2/6] fix(execution): queued execution finalization and async correlation (#3535) * fix(execution): finalize runs before wrapper recovery * fix(async): preserve execution correlation across queued runs * fix(webhooks): pass correlation into preprocessing * style(webhooks): normalize webhook executor formatting * fix(async): avoid pre-starting queued execution logs Let executeWorkflowCore own normal-path logging start so queued workflow and schedule executions persist the richer deployment and environment metadata instead of an earlier placeholder start record. * fix(async): harden execution finalization guards Prevent leaked core finalization markers from accumulating while keeping outer recovery paths idempotent. Preserve best-effort logging completion by reusing settled completion promises instead of reopening duplicate terminal writes. * fix(async): preserve outcomes during cleanup Keep execution finalization cleanup best-effort so cancellation cleanup failures do not overwrite successful or failed outcomes. Restore webhook processor formatting to the repository Biome style to avoid noisy formatter churn. * fix(async): keep execution finalization state consistent Retry minimal logging for early failures, only mark core finalization after a log row actually completes, and let paused completions fall back cleanly. * fix(async): clean stale finalization guards Scan all finalized execution ids during TTL cleanup so refreshed keys cannot keep expired guards alive, and cover the reused-id ordering regression. * fix(async): retry failed error finalization Allow error finalization to retry after a non-error completion and fallback both fail, and always persist failed/error semantics for completeWithError. * fix(webhooks): reuse preprocessing execution ids Thread preprocessing execution identity into queued webhook execution so both phases share the same correlation and logs. --------- Co-authored-by: test --- .../app/api/schedules/execute/route.test.ts | 46 +- apps/sim/app/api/schedules/execute/route.ts | 16 +- .../api/webhooks/trigger/[path]/route.test.ts | 95 ++- .../app/api/webhooks/trigger/[path]/route.ts | 2 + .../[id]/execute/route.async.test.ts | 160 +++++ .../app/api/workflows/[id]/execute/route.ts | 12 +- .../async-execution-correlation.test.ts | 71 +++ .../async-preprocessing-correlation.test.ts | 272 ++++++++ apps/sim/background/schedule-execution.ts | 44 +- apps/sim/background/webhook-execution.ts | 48 +- apps/sim/background/workflow-execution.ts | 42 +- apps/sim/executor/execution/types.ts | 2 + .../core/async-jobs/backends/trigger-dev.ts | 4 + apps/sim/lib/core/async-jobs/types.ts | 16 + apps/sim/lib/execution/preprocessing.test.ts | 92 +++ apps/sim/lib/execution/preprocessing.ts | 15 +- .../preprocessing.webhook-correlation.test.ts | 93 +++ apps/sim/lib/logs/execution/logger.test.ts | 53 +- apps/sim/lib/logs/execution/logger.ts | 57 +- .../logs/execution/logging-session.test.ts | 199 ++++++ .../sim/lib/logs/execution/logging-session.ts | 68 +- apps/sim/lib/logs/types.ts | 6 +- apps/sim/lib/webhooks/processor.test.ts | 219 +++++++ apps/sim/lib/webhooks/processor.ts | 171 ++++- .../workflows/executor/execution-core.test.ts | 582 ++++++++++++++++++ .../lib/workflows/executor/execution-core.ts | 234 +++++-- 26 files changed, 2453 insertions(+), 166 deletions(-) create mode 100644 apps/sim/app/api/workflows/[id]/execute/route.async.test.ts create mode 100644 apps/sim/background/async-execution-correlation.test.ts create mode 100644 apps/sim/background/async-preprocessing-correlation.test.ts create mode 100644 apps/sim/lib/execution/preprocessing.test.ts create mode 100644 apps/sim/lib/execution/preprocessing.webhook-correlation.test.ts create mode 100644 apps/sim/lib/logs/execution/logging-session.test.ts create mode 100644 apps/sim/lib/webhooks/processor.test.ts create mode 100644 apps/sim/lib/workflows/executor/execution-core.test.ts diff --git a/apps/sim/app/api/schedules/execute/route.test.ts b/apps/sim/app/api/schedules/execute/route.test.ts index 66dc9fd6f3..c4bb541533 100644 --- a/apps/sim/app/api/schedules/execute/route.test.ts +++ b/apps/sim/app/api/schedules/execute/route.test.ts @@ -107,7 +107,11 @@ vi.mock('@sim/db', () => ({ }, })) -import { GET } from '@/app/api/schedules/execute/route' +vi.mock('uuid', () => ({ + v4: vi.fn().mockReturnValue('schedule-execution-1'), +})) + +import { GET } from './route' const SINGLE_SCHEDULE = [ { @@ -204,4 +208,44 @@ describe('Scheduled Workflow Execution API Route', () => { const data = await response.json() expect(data).toHaveProperty('executedCount', 2) }) + + it('should enqueue preassigned correlation metadata for schedules', async () => { + mockDbReturning.mockReturnValue(SINGLE_SCHEDULE) + + const response = await GET(createMockRequest()) + + expect(response.status).toBe(200) + expect(mockEnqueue).toHaveBeenCalledWith( + 'schedule-execution', + expect.objectContaining({ + scheduleId: 'schedule-1', + workflowId: 'workflow-1', + executionId: 'schedule-execution-1', + requestId: 'test-request-id', + correlation: { + executionId: 'schedule-execution-1', + requestId: 'test-request-id', + source: 'schedule', + workflowId: 'workflow-1', + scheduleId: 'schedule-1', + triggerType: 'schedule', + scheduledFor: '2025-01-01T00:00:00.000Z', + }, + }), + { + metadata: { + workflowId: 'workflow-1', + correlation: { + executionId: 'schedule-execution-1', + requestId: 'test-request-id', + source: 'schedule', + workflowId: 'workflow-1', + scheduleId: 'schedule-1', + triggerType: 'schedule', + scheduledFor: '2025-01-01T00:00:00.000Z', + }, + }, + } + ) + }) }) diff --git a/apps/sim/app/api/schedules/execute/route.ts b/apps/sim/app/api/schedules/execute/route.ts index c928b17143..fc87b07833 100644 --- a/apps/sim/app/api/schedules/execute/route.ts +++ b/apps/sim/app/api/schedules/execute/route.ts @@ -2,6 +2,7 @@ import { db, workflowDeploymentVersion, workflowSchedule } from '@sim/db' import { createLogger } from '@sim/logger' import { and, eq, isNull, lt, lte, not, or, sql } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' +import { v4 as uuidv4 } from 'uuid' import { verifyCronAuth } from '@/lib/auth/internal' import { getJobQueue, shouldExecuteInline } from '@/lib/core/async-jobs' import { generateRequestId } from '@/lib/core/utils/request' @@ -57,10 +58,23 @@ export async function GET(request: NextRequest) { const queuePromises = dueSchedules.map(async (schedule) => { const queueTime = schedule.lastQueuedAt ?? queuedAt + const executionId = uuidv4() + const correlation = { + executionId, + requestId, + source: 'schedule' as const, + workflowId: schedule.workflowId, + scheduleId: schedule.id, + triggerType: 'schedule', + scheduledFor: schedule.nextRunAt?.toISOString(), + } const payload = { scheduleId: schedule.id, workflowId: schedule.workflowId, + executionId, + requestId, + correlation, blockId: schedule.blockId || undefined, cronExpression: schedule.cronExpression || undefined, lastRanAt: schedule.lastRanAt?.toISOString(), @@ -71,7 +85,7 @@ export async function GET(request: NextRequest) { try { const jobId = await jobQueue.enqueue('schedule-execution', payload, { - metadata: { workflowId: schedule.workflowId }, + metadata: { workflowId: schedule.workflowId, correlation }, }) logger.info( `[${requestId}] Queued schedule execution task ${jobId} for workflow ${schedule.workflowId}` diff --git a/apps/sim/app/api/webhooks/trigger/[path]/route.test.ts b/apps/sim/app/api/webhooks/trigger/[path]/route.test.ts index 195572f8cb..7938123e1d 100644 --- a/apps/sim/app/api/webhooks/trigger/[path]/route.test.ts +++ b/apps/sim/app/api/webhooks/trigger/[path]/route.test.ts @@ -101,6 +101,7 @@ const { processWebhookMock, executeMock, getWorkspaceBilledAccountUserIdMock, + queueWebhookExecutionMock, } = vi.hoisted(() => ({ generateRequestHashMock: vi.fn().mockResolvedValue('test-hash-123'), validateSlackSignatureMock: vi.fn().mockResolvedValue(true), @@ -125,6 +126,10 @@ const { .mockImplementation(async (workspaceId: string | null | undefined) => workspaceId ? 'test-user-id' : null ), + queueWebhookExecutionMock: vi.fn().mockImplementation(async () => { + const { NextResponse } = await import('next/server') + return NextResponse.json({ message: 'Webhook processed' }) + }), })) vi.mock('@trigger.dev/sdk', () => ({ @@ -350,21 +355,28 @@ vi.mock('@/lib/webhooks/processor', () => ({ return null } ), - checkWebhookPreprocessing: vi - .fn() - .mockResolvedValue({ error: null, actorUserId: 'test-user-id' }), + checkWebhookPreprocessing: vi.fn().mockResolvedValue({ + error: null, + actorUserId: 'test-user-id', + executionId: 'preprocess-execution-id', + correlation: { + executionId: 'preprocess-execution-id', + requestId: 'mock-request-id', + source: 'webhook', + workflowId: 'test-workflow-id', + webhookId: 'generic-webhook-id', + path: 'test-path', + provider: 'generic', + triggerType: 'webhook', + }, + }), formatProviderErrorResponse: vi.fn().mockImplementation((_webhook, error, status) => { const { NextResponse } = require('next/server') return NextResponse.json({ error }, { status }) }), shouldSkipWebhookEvent: vi.fn().mockReturnValue(false), handlePreDeploymentVerification: vi.fn().mockReturnValue(null), - queueWebhookExecution: vi.fn().mockImplementation(async () => { - // Call processWebhookMock so tests can verify it was called - processWebhookMock() - const { NextResponse } = await import('next/server') - return NextResponse.json({ message: 'Webhook processed' }) - }), + queueWebhookExecution: queueWebhookExecutionMock, })) vi.mock('drizzle-orm/postgres-js', () => ({ @@ -419,7 +431,7 @@ describe('Webhook Trigger API Route', () => { const params = Promise.resolve({ path: 'non-existent-path' }) - const response = await POST(req, { params }) + const response = await POST(req as any, { params }) expect(response.status).toBe(404) @@ -494,6 +506,47 @@ describe('Webhook Trigger API Route', () => { }) describe('Generic Webhook Authentication', () => { + it('passes correlation-bearing request context into webhook queueing', async () => { + testData.webhooks.push({ + id: 'generic-webhook-id', + provider: 'generic', + path: 'test-path', + isActive: true, + providerConfig: { requireAuth: false }, + workflowId: 'test-workflow-id', + }) + + const req = createMockRequest('POST', { event: 'test', id: 'test-123' }) + const params = Promise.resolve({ path: 'test-path' }) + + const response = await POST(req as any, { params }) + + expect(response.status).toBe(200) + expect(queueWebhookExecutionMock).toHaveBeenCalledOnce() + const call = queueWebhookExecutionMock.mock.calls[0] + expect(call[0]).toEqual(expect.objectContaining({ id: 'generic-webhook-id' })) + expect(call[1]).toEqual(expect.objectContaining({ id: 'test-workflow-id' })) + expect(call[2]).toEqual(expect.objectContaining({ event: 'test', id: 'test-123' })) + expect(call[4]).toEqual( + expect.objectContaining({ + requestId: 'mock-request-id', + path: 'test-path', + actorUserId: 'test-user-id', + executionId: 'preprocess-execution-id', + correlation: { + executionId: 'preprocess-execution-id', + requestId: 'mock-request-id', + source: 'webhook', + workflowId: 'test-workflow-id', + webhookId: 'generic-webhook-id', + path: 'test-path', + provider: 'generic', + triggerType: 'webhook', + }, + }) + ) + }) + it('should process generic webhook without authentication', async () => { testData.webhooks.push({ id: 'generic-webhook-id', @@ -514,7 +567,7 @@ describe('Webhook Trigger API Route', () => { const req = createMockRequest('POST', { event: 'test', id: 'test-123' }) const params = Promise.resolve({ path: 'test-path' }) - const response = await POST(req, { params }) + const response = await POST(req as any, { params }) expect(response.status).toBe(200) @@ -544,7 +597,7 @@ describe('Webhook Trigger API Route', () => { const req = createMockRequest('POST', { event: 'bearer.test' }, headers) const params = Promise.resolve({ path: 'test-path' }) - const response = await POST(req, { params }) + const response = await POST(req as any, { params }) expect(response.status).toBe(200) }) @@ -575,7 +628,7 @@ describe('Webhook Trigger API Route', () => { const req = createMockRequest('POST', { event: 'custom.header.test' }, headers) const params = Promise.resolve({ path: 'test-path' }) - const response = await POST(req, { params }) + const response = await POST(req as any, { params }) expect(response.status).toBe(200) }) @@ -610,7 +663,7 @@ describe('Webhook Trigger API Route', () => { const req = createMockRequest('POST', { event: 'case.test' }, headers) const params = Promise.resolve({ path: 'test-path' }) - const response = await POST(req, { params }) + const response = await POST(req as any, { params }) expect(response.status).toBe(200) } @@ -645,7 +698,7 @@ describe('Webhook Trigger API Route', () => { const req = createMockRequest('POST', { event: 'custom.case.test' }, headers) const params = Promise.resolve({ path: 'test-path' }) - const response = await POST(req, { params }) + const response = await POST(req as any, { params }) expect(response.status).toBe(200) } @@ -668,7 +721,7 @@ describe('Webhook Trigger API Route', () => { const req = createMockRequest('POST', { event: 'wrong.token.test' }, headers) const params = Promise.resolve({ path: 'test-path' }) - const response = await POST(req, { params }) + const response = await POST(req as any, { params }) expect(response.status).toBe(401) expect(await response.text()).toContain('Unauthorized - Invalid authentication token') @@ -696,7 +749,7 @@ describe('Webhook Trigger API Route', () => { const req = createMockRequest('POST', { event: 'wrong.custom.test' }, headers) const params = Promise.resolve({ path: 'test-path' }) - const response = await POST(req, { params }) + const response = await POST(req as any, { params }) expect(response.status).toBe(401) expect(await response.text()).toContain('Unauthorized - Invalid authentication token') @@ -716,7 +769,7 @@ describe('Webhook Trigger API Route', () => { const req = createMockRequest('POST', { event: 'no.auth.test' }) const params = Promise.resolve({ path: 'test-path' }) - const response = await POST(req, { params }) + const response = await POST(req as any, { params }) expect(response.status).toBe(401) expect(await response.text()).toContain('Unauthorized - Invalid authentication token') @@ -744,7 +797,7 @@ describe('Webhook Trigger API Route', () => { const req = createMockRequest('POST', { event: 'exclusivity.test' }, headers) const params = Promise.resolve({ path: 'test-path' }) - const response = await POST(req, { params }) + const response = await POST(req as any, { params }) expect(response.status).toBe(401) expect(await response.text()).toContain('Unauthorized - Invalid authentication token') @@ -772,7 +825,7 @@ describe('Webhook Trigger API Route', () => { const req = createMockRequest('POST', { event: 'wrong.header.name.test' }, headers) const params = Promise.resolve({ path: 'test-path' }) - const response = await POST(req, { params }) + const response = await POST(req as any, { params }) expect(response.status).toBe(401) expect(await response.text()).toContain('Unauthorized - Invalid authentication token') @@ -797,7 +850,7 @@ describe('Webhook Trigger API Route', () => { const req = createMockRequest('POST', { event: 'no.token.config.test' }, headers) const params = Promise.resolve({ path: 'test-path' }) - const response = await POST(req, { params }) + const response = await POST(req as any, { params }) expect(response.status).toBe(401) expect(await response.text()).toContain( diff --git a/apps/sim/app/api/webhooks/trigger/[path]/route.ts b/apps/sim/app/api/webhooks/trigger/[path]/route.ts index f8d0d8fb1b..56304c3e85 100644 --- a/apps/sim/app/api/webhooks/trigger/[path]/route.ts +++ b/apps/sim/app/api/webhooks/trigger/[path]/route.ts @@ -144,6 +144,8 @@ export async function POST( requestId, path, actorUserId: preprocessResult.actorUserId, + executionId: preprocessResult.executionId, + correlation: preprocessResult.correlation, }) responses.push(response) } diff --git a/apps/sim/app/api/workflows/[id]/execute/route.async.test.ts b/apps/sim/app/api/workflows/[id]/execute/route.async.test.ts new file mode 100644 index 0000000000..0a9aa008ba --- /dev/null +++ b/apps/sim/app/api/workflows/[id]/execute/route.async.test.ts @@ -0,0 +1,160 @@ +/** + * @vitest-environment node + */ + +import { createMockRequest } from '@sim/testing' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { + mockCheckHybridAuth, + mockAuthorizeWorkflowByWorkspacePermission, + mockPreprocessExecution, + mockEnqueue, +} = vi.hoisted(() => ({ + mockCheckHybridAuth: vi.fn(), + mockAuthorizeWorkflowByWorkspacePermission: vi.fn(), + mockPreprocessExecution: vi.fn(), + mockEnqueue: vi.fn().mockResolvedValue('job-123'), +})) + +vi.mock('@/lib/auth/hybrid', () => ({ + checkHybridAuth: mockCheckHybridAuth, +})) + +vi.mock('@/lib/workflows/utils', () => ({ + authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission, + createHttpResponseFromBlock: vi.fn(), + workflowHasResponseBlock: vi.fn().mockReturnValue(false), +})) + +vi.mock('@/lib/execution/preprocessing', () => ({ + preprocessExecution: mockPreprocessExecution, +})) + +vi.mock('@/lib/core/async-jobs', () => ({ + getJobQueue: vi.fn().mockResolvedValue({ + enqueue: mockEnqueue, + startJob: vi.fn(), + completeJob: vi.fn(), + markJobFailed: vi.fn(), + }), + shouldExecuteInline: vi.fn().mockReturnValue(false), +})) + +vi.mock('@/lib/core/utils/request', () => ({ + generateRequestId: vi.fn().mockReturnValue('req-12345678'), +})) + +vi.mock('@/lib/core/utils/urls', () => ({ + getBaseUrl: vi.fn().mockReturnValue('http://localhost:3000'), +})) + +vi.mock('@/lib/execution/call-chain', () => ({ + SIM_VIA_HEADER: 'x-sim-via', + parseCallChain: vi.fn().mockReturnValue([]), + validateCallChain: vi.fn().mockReturnValue(null), + buildNextCallChain: vi.fn().mockReturnValue(['workflow-1']), +})) + +vi.mock('@/lib/logs/execution/logging-session', () => ({ + LoggingSession: vi.fn().mockImplementation(() => ({})), +})) + +vi.mock('@/background/workflow-execution', () => ({ + executeWorkflowJob: vi.fn(), +})) + +vi.mock('@sim/logger', () => ({ + createLogger: vi.fn().mockReturnValue({ + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + }), +})) + +vi.mock('uuid', () => ({ + validate: vi.fn().mockReturnValue(true), + v4: vi.fn().mockReturnValue('execution-123'), +})) + +import { POST } from './route' + +describe('workflow execute async route', () => { + beforeEach(() => { + vi.clearAllMocks() + + mockCheckHybridAuth.mockResolvedValue({ + success: true, + userId: 'session-user-1', + authType: 'session', + }) + + mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({ + allowed: true, + workflow: { + id: 'workflow-1', + userId: 'owner-1', + workspaceId: 'workspace-1', + }, + }) + + mockPreprocessExecution.mockResolvedValue({ + success: true, + actorUserId: 'actor-1', + workflowRecord: { + id: 'workflow-1', + userId: 'owner-1', + workspaceId: 'workspace-1', + }, + }) + }) + + it('queues async execution with matching correlation metadata', async () => { + const req = createMockRequest( + 'POST', + { input: { hello: 'world' } }, + { + 'Content-Type': 'application/json', + 'X-Execution-Mode': 'async', + } + ) + const params = Promise.resolve({ id: 'workflow-1' }) + + const response = await POST(req as any, { params }) + const body = await response.json() + + expect(response.status).toBe(202) + expect(body.executionId).toBe('execution-123') + expect(body.jobId).toBe('job-123') + expect(mockEnqueue).toHaveBeenCalledWith( + 'workflow-execution', + expect.objectContaining({ + workflowId: 'workflow-1', + userId: 'actor-1', + executionId: 'execution-123', + requestId: 'req-12345678', + correlation: { + executionId: 'execution-123', + requestId: 'req-12345678', + source: 'workflow', + workflowId: 'workflow-1', + triggerType: 'manual', + }, + }), + { + metadata: { + workflowId: 'workflow-1', + userId: 'actor-1', + correlation: { + executionId: 'execution-123', + requestId: 'req-12345678', + source: 'workflow', + workflowId: 'workflow-1', + triggerType: 'manual', + }, + }, + } + ) + }) +}) diff --git a/apps/sim/app/api/workflows/[id]/execute/route.ts b/apps/sim/app/api/workflows/[id]/execute/route.ts index 6f469e2486..1200debd41 100644 --- a/apps/sim/app/api/workflows/[id]/execute/route.ts +++ b/apps/sim/app/api/workflows/[id]/execute/route.ts @@ -166,19 +166,29 @@ type AsyncExecutionParams = { async function handleAsyncExecution(params: AsyncExecutionParams): Promise { const { requestId, workflowId, userId, input, triggerType, executionId, callChain } = params + const correlation = { + executionId, + requestId, + source: 'workflow' as const, + workflowId, + triggerType, + } + const payload: WorkflowExecutionPayload = { workflowId, userId, input, triggerType, executionId, + requestId, + correlation, callChain, } try { const jobQueue = await getJobQueue() const jobId = await jobQueue.enqueue('workflow-execution', payload, { - metadata: { workflowId, userId }, + metadata: { workflowId, userId, correlation }, }) logger.info(`[${requestId}] Queued async workflow execution`, { diff --git a/apps/sim/background/async-execution-correlation.test.ts b/apps/sim/background/async-execution-correlation.test.ts new file mode 100644 index 0000000000..4b2f8fb097 --- /dev/null +++ b/apps/sim/background/async-execution-correlation.test.ts @@ -0,0 +1,71 @@ +/** + * @vitest-environment node + */ + +import { describe, expect, it } from 'vitest' +import { buildScheduleCorrelation } from './schedule-execution' +import { buildWebhookCorrelation } from './webhook-execution' +import { buildWorkflowCorrelation } from './workflow-execution' + +describe('async execution correlation fallbacks', () => { + it('falls back for legacy workflow payloads missing correlation fields', () => { + const correlation = buildWorkflowCorrelation({ + workflowId: 'workflow-1', + userId: 'user-1', + triggerType: 'api', + executionId: 'execution-legacy', + }) + + expect(correlation).toEqual({ + executionId: 'execution-legacy', + requestId: 'executio', + source: 'workflow', + workflowId: 'workflow-1', + triggerType: 'api', + }) + }) + + it('falls back for legacy schedule payloads missing preassigned request id', () => { + const correlation = buildScheduleCorrelation({ + scheduleId: 'schedule-1', + workflowId: 'workflow-1', + executionId: 'schedule-exec-1', + now: '2025-01-01T00:00:00.000Z', + scheduledFor: '2025-01-01T00:00:00.000Z', + }) + + expect(correlation).toEqual({ + executionId: 'schedule-exec-1', + requestId: 'schedule', + source: 'schedule', + workflowId: 'workflow-1', + scheduleId: 'schedule-1', + triggerType: 'schedule', + scheduledFor: '2025-01-01T00:00:00.000Z', + }) + }) + + it('falls back for legacy webhook payloads missing preassigned fields', () => { + const correlation = buildWebhookCorrelation({ + webhookId: 'webhook-1', + workflowId: 'workflow-1', + userId: 'user-1', + executionId: 'webhook-exec-1', + provider: 'slack', + body: {}, + headers: {}, + path: 'incoming/slack', + }) + + expect(correlation).toEqual({ + executionId: 'webhook-exec-1', + requestId: 'webhook-', + source: 'webhook', + workflowId: 'workflow-1', + webhookId: 'webhook-1', + path: 'incoming/slack', + provider: 'slack', + triggerType: 'webhook', + }) + }) +}) diff --git a/apps/sim/background/async-preprocessing-correlation.test.ts b/apps/sim/background/async-preprocessing-correlation.test.ts new file mode 100644 index 0000000000..c2844150c6 --- /dev/null +++ b/apps/sim/background/async-preprocessing-correlation.test.ts @@ -0,0 +1,272 @@ +/** + * @vitest-environment node + */ + +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { + mockPreprocessExecution, + mockTask, + mockDbUpdate, + mockExecuteWorkflowCore, + mockLoggingSession, + mockBlockExistsInDeployment, + mockLoadDeployedWorkflowState, + mockGetScheduleTimeValues, + mockGetSubBlockValue, +} = vi.hoisted(() => ({ + mockPreprocessExecution: vi.fn(), + mockTask: vi.fn((config) => config), + mockDbUpdate: vi.fn(() => ({ + set: vi.fn(() => ({ where: vi.fn().mockResolvedValue(undefined) })), + })), + mockExecuteWorkflowCore: vi.fn(), + mockLoggingSession: vi.fn(), + mockBlockExistsInDeployment: vi.fn(), + mockLoadDeployedWorkflowState: vi.fn(), + mockGetScheduleTimeValues: vi.fn(), + mockGetSubBlockValue: vi.fn(), +})) + +vi.mock('@trigger.dev/sdk', () => ({ task: mockTask })) + +vi.mock('@sim/db', () => ({ + db: { + update: mockDbUpdate, + select: vi.fn(), + }, + workflow: {}, + workflowSchedule: {}, +})) + +vi.mock('drizzle-orm', () => ({ eq: vi.fn() })) + +vi.mock('@/lib/execution/preprocessing', () => ({ + preprocessExecution: mockPreprocessExecution, +})) + +vi.mock('@/lib/logs/execution/logging-session', () => ({ + LoggingSession: vi.fn().mockImplementation(() => { + const instance = { + safeStart: vi.fn().mockResolvedValue(true), + safeCompleteWithError: vi.fn().mockResolvedValue(undefined), + markAsFailed: vi.fn().mockResolvedValue(undefined), + } + mockLoggingSession(instance) + return instance + }), +})) + +vi.mock('@/lib/core/execution-limits', () => ({ + createTimeoutAbortController: vi.fn(() => ({ + signal: undefined, + cleanup: vi.fn(), + isTimedOut: vi.fn().mockReturnValue(false), + timeoutMs: undefined, + })), + getTimeoutErrorMessage: vi.fn(), +})) + +vi.mock('@/lib/logs/execution/trace-spans/trace-spans', () => ({ + buildTraceSpans: vi.fn(() => ({ traceSpans: [] })), +})) + +vi.mock('@/lib/workflows/executor/execution-core', () => ({ + executeWorkflowCore: mockExecuteWorkflowCore, + wasExecutionFinalizedByCore: vi.fn().mockReturnValue(false), +})) + +vi.mock('@/lib/workflows/executor/human-in-the-loop-manager', () => ({ + PauseResumeManager: { + persistPauseResult: vi.fn(), + processQueuedResumes: vi.fn(), + }, +})) + +vi.mock('@/lib/workflows/persistence/utils', () => ({ + blockExistsInDeployment: mockBlockExistsInDeployment, + loadDeployedWorkflowState: mockLoadDeployedWorkflowState, +})) + +vi.mock('@/lib/workflows/schedules/utils', () => ({ + calculateNextRunTime: vi.fn(), + getScheduleTimeValues: mockGetScheduleTimeValues, + getSubBlockValue: mockGetSubBlockValue, +})) + +vi.mock('@/executor/execution/snapshot', () => ({ + ExecutionSnapshot: vi.fn(), +})) + +vi.mock('@/executor/utils/errors', () => ({ + hasExecutionResult: vi.fn().mockReturnValue(false), +})) + +vi.mock('@sim/logger', () => ({ + createLogger: vi.fn().mockReturnValue({ + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + }), +})) + +import { executeScheduleJob } from './schedule-execution' +import { executeWorkflowJob } from './workflow-execution' + +describe('async preprocessing correlation threading', () => { + beforeEach(() => { + vi.clearAllMocks() + mockLoadDeployedWorkflowState.mockResolvedValue({ + blocks: { + 'schedule-block': { + type: 'schedule', + }, + }, + edges: [], + loops: {}, + parallels: {}, + deploymentVersionId: 'deployment-1', + }) + mockGetSubBlockValue.mockReturnValue('daily') + mockGetScheduleTimeValues.mockReturnValue({ timezone: 'UTC' }) + }) + + it('does not pre-start workflow logging before core execution', async () => { + mockPreprocessExecution.mockResolvedValueOnce({ + success: true, + actorUserId: 'actor-1', + workflowRecord: { + id: 'workflow-1', + userId: 'owner-1', + workspaceId: 'workspace-1', + variables: {}, + }, + executionTimeout: {}, + }) + mockExecuteWorkflowCore.mockResolvedValueOnce({ + success: true, + status: 'success', + output: { ok: true }, + metadata: { duration: 10, userId: 'actor-1' }, + }) + + await executeWorkflowJob({ + workflowId: 'workflow-1', + userId: 'user-1', + triggerType: 'api', + executionId: 'execution-1', + requestId: 'request-1', + }) + + const loggingSession = mockLoggingSession.mock.calls[0]?.[0] + expect(loggingSession).toBeDefined() + expect(loggingSession.safeStart).not.toHaveBeenCalled() + expect(mockExecuteWorkflowCore).toHaveBeenCalledWith( + expect.objectContaining({ + loggingSession, + }) + ) + }) + + it('does not pre-start schedule logging before core execution', async () => { + mockPreprocessExecution.mockResolvedValueOnce({ + success: true, + actorUserId: 'actor-2', + workflowRecord: { + id: 'workflow-1', + userId: 'owner-1', + workspaceId: 'workspace-1', + variables: {}, + }, + executionTimeout: {}, + }) + mockExecuteWorkflowCore.mockResolvedValueOnce({ + success: true, + status: 'success', + output: { ok: true }, + metadata: { duration: 12, userId: 'actor-2' }, + }) + + await executeScheduleJob({ + scheduleId: 'schedule-1', + workflowId: 'workflow-1', + executionId: 'execution-2', + requestId: 'request-2', + now: '2025-01-01T00:00:00.000Z', + scheduledFor: '2025-01-01T00:00:00.000Z', + }) + + const loggingSession = mockLoggingSession.mock.calls[0]?.[0] + expect(loggingSession).toBeDefined() + expect(loggingSession.safeStart).not.toHaveBeenCalled() + expect(mockExecuteWorkflowCore).toHaveBeenCalledWith( + expect.objectContaining({ + loggingSession, + }) + ) + }) + + it('passes workflow correlation into preprocessing', async () => { + mockPreprocessExecution.mockResolvedValueOnce({ + success: false, + error: { message: 'preprocessing failed', statusCode: 500, logCreated: true }, + }) + + await expect( + executeWorkflowJob({ + workflowId: 'workflow-1', + userId: 'user-1', + triggerType: 'api', + executionId: 'execution-1', + requestId: 'request-1', + }) + ).rejects.toThrow('preprocessing failed') + + expect(mockPreprocessExecution).toHaveBeenCalledWith( + expect.objectContaining({ + triggerData: { + correlation: { + executionId: 'execution-1', + requestId: 'request-1', + source: 'workflow', + workflowId: 'workflow-1', + triggerType: 'api', + }, + }, + }) + ) + }) + + it('passes schedule correlation into preprocessing', async () => { + mockPreprocessExecution.mockResolvedValueOnce({ + success: false, + error: { message: 'auth failed', statusCode: 401, logCreated: true }, + }) + + await executeScheduleJob({ + scheduleId: 'schedule-1', + workflowId: 'workflow-1', + executionId: 'execution-2', + requestId: 'request-2', + now: '2025-01-01T00:00:00.000Z', + scheduledFor: '2025-01-01T00:00:00.000Z', + }) + + expect(mockPreprocessExecution).toHaveBeenCalledWith( + expect.objectContaining({ + triggerData: { + correlation: { + executionId: 'execution-2', + requestId: 'request-2', + source: 'schedule', + workflowId: 'workflow-1', + scheduleId: 'schedule-1', + triggerType: 'schedule', + scheduledFor: '2025-01-01T00:00:00.000Z', + }, + }, + }) + ) + }) +}) diff --git a/apps/sim/background/schedule-execution.ts b/apps/sim/background/schedule-execution.ts index 7b6fd62576..6d1cbd5939 100644 --- a/apps/sim/background/schedule-execution.ts +++ b/apps/sim/background/schedule-execution.ts @@ -4,11 +4,15 @@ import { task } from '@trigger.dev/sdk' import { Cron } from 'croner' import { eq } from 'drizzle-orm' import { v4 as uuidv4 } from 'uuid' +import type { AsyncExecutionCorrelation } from '@/lib/core/async-jobs/types' import { createTimeoutAbortController, getTimeoutErrorMessage } from '@/lib/core/execution-limits' import { preprocessExecution } from '@/lib/execution/preprocessing' import { LoggingSession } from '@/lib/logs/execution/logging-session' import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans' -import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core' +import { + executeWorkflowCore, + wasExecutionFinalizedByCore, +} from '@/lib/workflows/executor/execution-core' import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager' import { blockExistsInDeployment, @@ -36,6 +40,23 @@ type RunWorkflowResult = | { status: 'success'; blocks: Record; executionResult: ExecutionCoreResult } | { status: 'failure'; blocks: Record; executionResult: ExecutionCoreResult } +export function buildScheduleCorrelation( + payload: ScheduleExecutionPayload +): AsyncExecutionCorrelation { + const executionId = payload.executionId || uuidv4() + const requestId = payload.requestId || payload.correlation?.requestId || executionId.slice(0, 8) + + return { + executionId, + requestId, + source: 'schedule', + workflowId: payload.workflowId, + scheduleId: payload.scheduleId, + triggerType: payload.correlation?.triggerType || 'schedule', + scheduledFor: payload.scheduledFor || payload.correlation?.scheduledFor, + } +} + async function applyScheduleUpdate( scheduleId: string, updates: WorkflowScheduleUpdate, @@ -111,6 +132,7 @@ async function determineNextRunAfterError( async function runWorkflowExecution({ payload, + correlation, workflowRecord, actorUserId, loggingSession, @@ -119,6 +141,7 @@ async function runWorkflowExecution({ asyncTimeout, }: { payload: ScheduleExecutionPayload + correlation: AsyncExecutionCorrelation workflowRecord: WorkflowRecord actorUserId: string loggingSession: LoggingSession @@ -171,6 +194,7 @@ async function runWorkflowExecution({ useDraftState: false, startTime: new Date().toISOString(), isClientSession: false, + correlation, } const snapshot = new ExecutionSnapshot( @@ -249,6 +273,10 @@ async function runWorkflowExecution({ } catch (error: unknown) { logger.error(`[${requestId}] Early failure in scheduled workflow ${payload.workflowId}`, error) + if (wasExecutionFinalizedByCore(error, executionId)) { + throw error + } + const executionResult = hasExecutionResult(error) ? error.executionResult : undefined const { traceSpans } = executionResult ? buildTraceSpans(executionResult) : { traceSpans: [] } @@ -267,6 +295,9 @@ async function runWorkflowExecution({ export type ScheduleExecutionPayload = { scheduleId: string workflowId: string + executionId?: string + requestId?: string + correlation?: AsyncExecutionCorrelation blockId?: string cronExpression?: string lastRanAt?: string @@ -301,8 +332,9 @@ function calculateNextRunTime( } export async function executeScheduleJob(payload: ScheduleExecutionPayload) { - const executionId = uuidv4() - const requestId = executionId.slice(0, 8) + const correlation = buildScheduleCorrelation(payload) + const executionId = correlation.executionId + const requestId = correlation.requestId const now = new Date(payload.now) const scheduledFor = payload.scheduledFor ? new Date(payload.scheduledFor) : null @@ -329,6 +361,7 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) { checkRateLimit: true, checkDeployment: true, loggingSession, + triggerData: { correlation }, }) if (!preprocessResult.success) { @@ -455,11 +488,16 @@ export async function executeScheduleJob(payload: ScheduleExecutionPayload) { return } + if (!workflowRecord.workspaceId) { + throw new Error(`Workflow ${payload.workflowId} has no associated workspace`) + } + logger.info(`[${requestId}] Executing scheduled workflow ${payload.workflowId}`) try { const executionResult = await runWorkflowExecution({ payload, + correlation, workflowRecord, actorUserId, loggingSession, diff --git a/apps/sim/background/webhook-execution.ts b/apps/sim/background/webhook-execution.ts index 26b2179012..8bac6e558c 100644 --- a/apps/sim/background/webhook-execution.ts +++ b/apps/sim/background/webhook-execution.ts @@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger' import { task } from '@trigger.dev/sdk' import { eq } from 'drizzle-orm' import { v4 as uuidv4 } from 'uuid' +import type { AsyncExecutionCorrelation } from '@/lib/core/async-jobs/types' import { createTimeoutAbortController, getTimeoutErrorMessage } from '@/lib/core/execution-limits' import { IdempotencyService, webhookIdempotency } from '@/lib/core/idempotency' import { processExecutionFiles } from '@/lib/execution/files' @@ -12,7 +13,10 @@ import { LoggingSession } from '@/lib/logs/execution/logging-session' import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans' import { WebhookAttachmentProcessor } from '@/lib/webhooks/attachment-processor' import { fetchAndProcessAirtablePayloads, formatWebhookInput } from '@/lib/webhooks/utils.server' -import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core' +import { + executeWorkflowCore, + wasExecutionFinalizedByCore, +} from '@/lib/workflows/executor/execution-core' import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager' import { loadDeployedWorkflowState } from '@/lib/workflows/persistence/utils' import { resolveOAuthAccountId } from '@/app/api/auth/oauth/utils' @@ -25,6 +29,24 @@ import { getTrigger, isTriggerValid } from '@/triggers' const logger = createLogger('TriggerWebhookExecution') +export function buildWebhookCorrelation( + payload: WebhookExecutionPayload +): AsyncExecutionCorrelation { + const executionId = payload.executionId || uuidv4() + const requestId = payload.requestId || payload.correlation?.requestId || executionId.slice(0, 8) + + return { + executionId, + requestId, + source: 'webhook', + workflowId: payload.workflowId, + webhookId: payload.webhookId, + path: payload.path, + provider: payload.provider, + triggerType: payload.correlation?.triggerType || 'webhook', + } +} + /** * Process trigger outputs based on their schema definitions * Finds outputs marked as 'file' or 'file[]' and uploads them to execution storage @@ -99,6 +121,9 @@ export type WebhookExecutionPayload = { webhookId: string workflowId: string userId: string + executionId?: string + requestId?: string + correlation?: AsyncExecutionCorrelation provider: string body: any headers: Record @@ -109,8 +134,9 @@ export type WebhookExecutionPayload = { } export async function executeWebhookJob(payload: WebhookExecutionPayload) { - const executionId = uuidv4() - const requestId = executionId.slice(0, 8) + const correlation = buildWebhookCorrelation(payload) + const executionId = correlation.executionId + const requestId = correlation.requestId logger.info(`[${requestId}] Starting webhook execution`, { webhookId: payload.webhookId, @@ -128,7 +154,7 @@ export async function executeWebhookJob(payload: WebhookExecutionPayload) { ) const runOperation = async () => { - return await executeWebhookJobInternal(payload, executionId, requestId) + return await executeWebhookJobInternal(payload, correlation) } return await webhookIdempotency.executeWithIdempotency( @@ -156,9 +182,9 @@ async function resolveCredentialAccountUserId(credentialId: string): Promise callChain?: string[] } @@ -31,8 +52,9 @@ export type WorkflowExecutionPayload = { */ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) { const workflowId = payload.workflowId - const executionId = payload.executionId || uuidv4() - const requestId = executionId.slice(0, 8) + const correlation = buildWorkflowCorrelation(payload) + const executionId = correlation.executionId + const requestId = correlation.requestId logger.info(`[${requestId}] Starting workflow execution job: ${workflowId}`, { userId: payload.userId, @@ -40,7 +62,7 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) { executionId, }) - const triggerType = payload.triggerType || 'api' + const triggerType = (correlation.triggerType || 'api') as CoreTriggerType const loggingSession = new LoggingSession(workflowId, executionId, triggerType, requestId) try { @@ -53,6 +75,7 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) { checkRateLimit: true, checkDeployment: true, loggingSession: loggingSession, + triggerData: { correlation }, }) if (!preprocessResult.success) { @@ -72,12 +95,6 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) { logger.info(`[${requestId}] Preprocessing passed. Using actor: ${actorUserId}`) - await loggingSession.safeStart({ - userId: actorUserId, - workspaceId, - variables: {}, - }) - const workflow = preprocessResult.workflowRecord! const metadata: ExecutionMetadata = { @@ -93,6 +110,7 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) { startTime: new Date().toISOString(), isClientSession: false, callChain: payload.callChain, + correlation, } const snapshot = new ExecutionSnapshot( @@ -178,6 +196,10 @@ export async function executeWorkflowJob(payload: WorkflowExecutionPayload) { executionId, }) + if (wasExecutionFinalizedByCore(error, executionId)) { + throw error + } + const executionResult = hasExecutionResult(error) ? error.executionResult : undefined const { traceSpans } = executionResult ? buildTraceSpans(executionResult) : { traceSpans: [] } diff --git a/apps/sim/executor/execution/types.ts b/apps/sim/executor/execution/types.ts index 933aaa2060..b343be9f97 100644 --- a/apps/sim/executor/execution/types.ts +++ b/apps/sim/executor/execution/types.ts @@ -1,4 +1,5 @@ import type { Edge } from 'reactflow' +import type { AsyncExecutionCorrelation } from '@/lib/core/async-jobs/types' import type { NodeMetadata } from '@/executor/dag/types' import type { BlockLog, @@ -34,6 +35,7 @@ export interface ExecutionMetadata { deploymentVersionId?: string } callChain?: string[] + correlation?: AsyncExecutionCorrelation } export interface SerializableExecutionState { diff --git a/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts b/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts index 18b01a3c22..436024e24c 100644 --- a/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts +++ b/apps/sim/lib/core/async-jobs/backends/trigger-dev.ts @@ -80,6 +80,10 @@ export class TriggerDevJobQueue implements JobQueueBackend { const metadata: JobMetadata = { workflowId: payload?.workflowId as string | undefined, userId: payload?.userId as string | undefined, + correlation: + payload?.correlation && typeof payload.correlation === 'object' + ? (payload.correlation as JobMetadata['correlation']) + : undefined, } return { diff --git a/apps/sim/lib/core/async-jobs/types.ts b/apps/sim/lib/core/async-jobs/types.ts index 398513b69f..27137ddadc 100644 --- a/apps/sim/lib/core/async-jobs/types.ts +++ b/apps/sim/lib/core/async-jobs/types.ts @@ -22,6 +22,21 @@ export type JobStatus = (typeof JOB_STATUS)[keyof typeof JOB_STATUS] export type JobType = 'workflow-execution' | 'schedule-execution' | 'webhook-execution' +export type AsyncExecutionCorrelationSource = 'workflow' | 'schedule' | 'webhook' + +export interface AsyncExecutionCorrelation { + executionId: string + requestId: string + source: AsyncExecutionCorrelationSource + workflowId: string + triggerType?: string + webhookId?: string + scheduleId?: string + path?: string + provider?: string + scheduledFor?: string +} + export interface Job { id: string type: JobType @@ -40,6 +55,7 @@ export interface Job { export interface JobMetadata { workflowId?: string userId?: string + correlation?: AsyncExecutionCorrelation [key: string]: unknown } diff --git a/apps/sim/lib/execution/preprocessing.test.ts b/apps/sim/lib/execution/preprocessing.test.ts new file mode 100644 index 0000000000..6d9258b341 --- /dev/null +++ b/apps/sim/lib/execution/preprocessing.test.ts @@ -0,0 +1,92 @@ +/** + * @vitest-environment node + */ + +import { describe, expect, it, vi } from 'vitest' + +const { mockGetWorkspaceBilledAccountUserId } = vi.hoisted(() => ({ + mockGetWorkspaceBilledAccountUserId: vi.fn(), +})) + +vi.mock('@sim/db', () => ({ db: {} })) +vi.mock('@sim/db/schema', () => ({ workflow: {} })) +vi.mock('@sim/logger', () => ({ + createLogger: vi.fn().mockReturnValue({ + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + }), +})) +vi.mock('drizzle-orm', () => ({ eq: vi.fn() })) +vi.mock('@/lib/billing/calculations/usage-monitor', () => ({ + checkServerSideUsageLimits: vi.fn(), +})) +vi.mock('@/lib/billing/core/subscription', () => ({ + getHighestPrioritySubscription: vi.fn(), +})) +vi.mock('@/lib/core/execution-limits', () => ({ + getExecutionTimeout: vi.fn(() => 0), +})) +vi.mock('@/lib/core/rate-limiter/rate-limiter', () => ({ + RateLimiter: vi.fn(), +})) +vi.mock('@/lib/logs/execution/logging-session', () => ({ + LoggingSession: vi.fn(), +})) +vi.mock('@/lib/workspaces/utils', () => ({ + getWorkspaceBilledAccountUserId: mockGetWorkspaceBilledAccountUserId, +})) + +import { preprocessExecution } from './preprocessing' + +describe('preprocessExecution correlation logging', () => { + it('preserves trigger correlation when logging preprocessing failures', async () => { + mockGetWorkspaceBilledAccountUserId.mockResolvedValueOnce(null) + + const loggingSession = { + safeStart: vi.fn().mockResolvedValue(true), + safeCompleteWithError: vi.fn().mockResolvedValue(undefined), + } + + const correlation = { + executionId: 'execution-1', + requestId: 'request-1', + source: 'schedule' as const, + workflowId: 'workflow-1', + scheduleId: 'schedule-1', + triggerType: 'schedule', + scheduledFor: '2025-01-01T00:00:00.000Z', + } + + const result = await preprocessExecution({ + workflowId: 'workflow-1', + userId: 'unknown', + triggerType: 'schedule', + executionId: 'execution-1', + requestId: 'request-1', + loggingSession: loggingSession as any, + triggerData: { correlation }, + workflowRecord: { + id: 'workflow-1', + workspaceId: 'workspace-1', + isDeployed: true, + } as any, + }) + + expect(result).toMatchObject({ + success: false, + error: { + statusCode: 500, + logCreated: true, + }, + }) + + expect(loggingSession.safeStart).toHaveBeenCalledWith({ + userId: 'unknown', + workspaceId: 'workspace-1', + variables: {}, + triggerData: { correlation }, + }) + }) +}) diff --git a/apps/sim/lib/execution/preprocessing.ts b/apps/sim/lib/execution/preprocessing.ts index 90bbfcdfd3..a86c2ca0eb 100644 --- a/apps/sim/lib/execution/preprocessing.ts +++ b/apps/sim/lib/execution/preprocessing.ts @@ -8,7 +8,7 @@ import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription' import { getExecutionTimeout } from '@/lib/core/execution-limits' import { RateLimiter } from '@/lib/core/rate-limiter/rate-limiter' import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types' -import { LoggingSession } from '@/lib/logs/execution/logging-session' +import { LoggingSession, type SessionStartParams } from '@/lib/logs/execution/logging-session' import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils' import type { CoreTriggerType } from '@/stores/logs/filters/types' @@ -36,6 +36,7 @@ export interface PreprocessExecutionOptions { // Context information workspaceId?: string // If known, used for billing resolution loggingSession?: LoggingSession // If provided, will be used for error logging + triggerData?: SessionStartParams['triggerData'] isResumeContext?: boolean // Deprecated: no billing fallback is allowed useAuthenticatedUserAsActor?: boolean // If true, use the authenticated userId as actorUserId (for client-side executions and personal API keys) /** @deprecated No longer used - background/async executions always use deployed state */ @@ -85,6 +86,7 @@ export async function preprocessExecution( skipUsageLimits = false, workspaceId: providedWorkspaceId, loggingSession: providedLoggingSession, + triggerData, isResumeContext: _isResumeContext = false, useAuthenticatedUserAsActor = false, workflowRecord: prefetchedWorkflowRecord, @@ -125,6 +127,7 @@ export async function preprocessExecution( errorMessage: 'Workflow not found. The workflow may have been deleted or is no longer accessible.', loggingSession: providedLoggingSession, + triggerData, }) return { @@ -150,6 +153,7 @@ export async function preprocessExecution( workspaceId: providedWorkspaceId || '', errorMessage: 'Internal error while fetching workflow', loggingSession: providedLoggingSession, + triggerData, }) return { @@ -229,6 +233,7 @@ export async function preprocessExecution( workspaceId, errorMessage: BILLING_ERROR_MESSAGES.BILLING_REQUIRED, loggingSession: providedLoggingSession, + triggerData, }) return { @@ -252,6 +257,7 @@ export async function preprocessExecution( workspaceId, errorMessage: BILLING_ERROR_MESSAGES.BILLING_ERROR_GENERIC, loggingSession: providedLoggingSession, + triggerData, }) return { @@ -293,6 +299,7 @@ export async function preprocessExecution( usageCheck.message || `Usage limit exceeded: $${usageCheck.currentUsage?.toFixed(2)} used of $${usageCheck.limit?.toFixed(2)} limit. Please upgrade your plan to continue.`, loggingSession: providedLoggingSession, + triggerData, }) return { @@ -321,6 +328,7 @@ export async function preprocessExecution( errorMessage: 'Unable to determine usage limits. Execution blocked for security. Please contact support.', loggingSession: providedLoggingSession, + triggerData, }) return { @@ -363,6 +371,7 @@ export async function preprocessExecution( workspaceId, errorMessage: `Rate limit exceeded. ${rateLimitInfo.remaining} requests remaining. Resets at ${rateLimitInfo.resetAt.toISOString()}.`, loggingSession: providedLoggingSession, + triggerData, }) return { @@ -386,6 +395,7 @@ export async function preprocessExecution( workspaceId, errorMessage: 'Error checking rate limits. Execution blocked for safety.', loggingSession: providedLoggingSession, + triggerData, }) return { @@ -434,6 +444,7 @@ async function logPreprocessingError(params: { workspaceId: string errorMessage: string loggingSession?: LoggingSession + triggerData?: SessionStartParams['triggerData'] }): Promise { const { workflowId, @@ -444,6 +455,7 @@ async function logPreprocessingError(params: { workspaceId, errorMessage, loggingSession, + triggerData, } = params if (!workspaceId) { @@ -463,6 +475,7 @@ async function logPreprocessingError(params: { userId, workspaceId, variables: {}, + triggerData, }) await session.safeCompleteWithError({ diff --git a/apps/sim/lib/execution/preprocessing.webhook-correlation.test.ts b/apps/sim/lib/execution/preprocessing.webhook-correlation.test.ts new file mode 100644 index 0000000000..dbb975359c --- /dev/null +++ b/apps/sim/lib/execution/preprocessing.webhook-correlation.test.ts @@ -0,0 +1,93 @@ +/** + * @vitest-environment node + */ + +import { describe, expect, it, vi } from 'vitest' + +const { mockGetWorkspaceBilledAccountUserId } = vi.hoisted(() => ({ + mockGetWorkspaceBilledAccountUserId: vi.fn(), +})) + +vi.mock('@sim/db', () => ({ db: {} })) +vi.mock('@sim/db/schema', () => ({ workflow: {} })) +vi.mock('@sim/logger', () => ({ + createLogger: vi.fn().mockReturnValue({ + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + }), +})) +vi.mock('drizzle-orm', () => ({ eq: vi.fn() })) +vi.mock('@/lib/billing/calculations/usage-monitor', () => ({ + checkServerSideUsageLimits: vi.fn(), +})) +vi.mock('@/lib/billing/core/subscription', () => ({ + getHighestPrioritySubscription: vi.fn(), +})) +vi.mock('@/lib/core/execution-limits', () => ({ + getExecutionTimeout: vi.fn(() => 0), +})) +vi.mock('@/lib/core/rate-limiter/rate-limiter', () => ({ + RateLimiter: vi.fn(), +})) +vi.mock('@/lib/logs/execution/logging-session', () => ({ + LoggingSession: vi.fn(), +})) +vi.mock('@/lib/workspaces/utils', () => ({ + getWorkspaceBilledAccountUserId: mockGetWorkspaceBilledAccountUserId, +})) + +import { preprocessExecution } from './preprocessing' + +describe('preprocessExecution webhook correlation logging', () => { + it('preserves webhook correlation when logging preprocessing failures', async () => { + mockGetWorkspaceBilledAccountUserId.mockResolvedValueOnce(null) + + const loggingSession = { + safeStart: vi.fn().mockResolvedValue(true), + safeCompleteWithError: vi.fn().mockResolvedValue(undefined), + } + + const correlation = { + executionId: 'execution-webhook-1', + requestId: 'request-webhook-1', + source: 'webhook' as const, + workflowId: 'workflow-1', + webhookId: 'webhook-1', + path: 'incoming/slack', + provider: 'slack', + triggerType: 'webhook', + } + + const result = await preprocessExecution({ + workflowId: 'workflow-1', + userId: 'unknown', + triggerType: 'webhook', + executionId: 'execution-webhook-1', + requestId: 'request-webhook-1', + loggingSession: loggingSession as any, + triggerData: { correlation }, + workflowRecord: { + id: 'workflow-1', + workspaceId: 'workspace-1', + isDeployed: true, + } as any, + }) + + expect(result).toMatchObject({ + success: false, + error: { + statusCode: 500, + logCreated: true, + }, + }) + + expect(loggingSession.safeStart).toHaveBeenCalledWith({ + userId: 'unknown', + workspaceId: 'workspace-1', + variables: {}, + triggerData: { correlation }, + }) + }) +}) diff --git a/apps/sim/lib/logs/execution/logger.test.ts b/apps/sim/lib/logs/execution/logger.test.ts index 00f73c627e..a1bd9962d8 100644 --- a/apps/sim/lib/logs/execution/logger.test.ts +++ b/apps/sim/lib/logs/execution/logger.test.ts @@ -1,6 +1,6 @@ import { databaseMock, loggerMock } from '@sim/testing' import { beforeEach, describe, expect, test, vi } from 'vitest' -import { ExecutionLogger } from '@/lib/logs/execution/logger' +import { ExecutionLogger } from './logger' vi.mock('@sim/db', () => databaseMock) @@ -111,6 +111,57 @@ describe('ExecutionLogger', () => { test('should have getWorkflowExecution method', () => { expect(typeof logger.getWorkflowExecution).toBe('function') }) + + test('preserves start correlation data when execution completes', () => { + const loggerInstance = new ExecutionLogger() as any + + const completedData = loggerInstance.buildCompletedExecutionData({ + existingExecutionData: { + environment: { + variables: {}, + workflowId: 'workflow-123', + executionId: 'execution-123', + userId: 'user-123', + workspaceId: 'workspace-123', + }, + trigger: { + type: 'webhook', + source: 'webhook', + timestamp: '2025-01-01T00:00:00.000Z', + data: { + correlation: { + executionId: 'execution-123', + requestId: 'req-1234', + source: 'webhook', + workflowId: 'workflow-123', + webhookId: 'webhook-123', + path: 'incoming/slack', + triggerType: 'webhook', + }, + }, + }, + }, + traceSpans: [], + finalOutput: { ok: true }, + executionCost: { + tokens: { input: 0, output: 0, total: 0 }, + models: {}, + }, + }) + + expect(completedData.environment?.workflowId).toBe('workflow-123') + expect(completedData.trigger?.data?.correlation).toEqual({ + executionId: 'execution-123', + requestId: 'req-1234', + source: 'webhook', + workflowId: 'workflow-123', + webhookId: 'webhook-123', + path: 'incoming/slack', + triggerType: 'webhook', + }) + expect(completedData.correlation).toEqual(completedData.trigger?.data?.correlation) + expect(completedData.finalOutput).toEqual({ ok: true }) + }) }) describe('file extraction', () => { diff --git a/apps/sim/lib/logs/execution/logger.ts b/apps/sim/lib/logs/execution/logger.ts index c9e2fb8d65..7cb61bd705 100644 --- a/apps/sim/lib/logs/execution/logger.ts +++ b/apps/sim/lib/logs/execution/logger.ts @@ -49,6 +49,46 @@ export interface ToolCall { const logger = createLogger('ExecutionLogger') export class ExecutionLogger implements IExecutionLoggerService { + private buildCompletedExecutionData(params: { + existingExecutionData?: WorkflowExecutionLog['executionData'] + traceSpans?: TraceSpan[] + finalOutput: BlockOutputData + executionCost: { + tokens: { + input: number + output: number + total: number + } + models: NonNullable + } + executionState?: SerializableExecutionState + }): WorkflowExecutionLog['executionData'] { + const { existingExecutionData, traceSpans, finalOutput, executionCost, executionState } = params + + return { + ...(existingExecutionData?.environment + ? { environment: existingExecutionData.environment } + : {}), + ...(existingExecutionData?.trigger ? { trigger: existingExecutionData.trigger } : {}), + ...(existingExecutionData?.correlation || existingExecutionData?.trigger?.data?.correlation + ? { + correlation: + existingExecutionData?.correlation || + existingExecutionData?.trigger?.data?.correlation, + } + : {}), + traceSpans, + finalOutput, + tokens: { + input: executionCost.tokens.input, + output: executionCost.tokens.output, + total: executionCost.tokens.total, + }, + models: executionCost.models, + ...(executionState ? { executionState } : {}), + } + } + async startWorkflowExecution(params: { workflowId: string workspaceId: string @@ -131,6 +171,7 @@ export class ExecutionLogger implements IExecutionLoggerService { executionData: { environment, trigger, + ...(trigger.data?.correlation ? { correlation: trigger.data.correlation } : {}), }, cost: { total: BASE_EXECUTION_CHARGE, @@ -216,7 +257,7 @@ export class ExecutionLogger implements IExecutionLoggerService { .limit(1) const billingUserId = this.extractBillingUserId(existingLog?.executionData) const existingExecutionData = existingLog?.executionData as - | { traceSpans?: TraceSpan[] } + | WorkflowExecutionLog['executionData'] | undefined // Determine if workflow failed by checking trace spans for unhandled errors @@ -280,17 +321,13 @@ export class ExecutionLogger implements IExecutionLoggerService { endedAt: new Date(endedAt), totalDurationMs: totalDuration, files: executionFiles.length > 0 ? executionFiles : null, - executionData: { + executionData: this.buildCompletedExecutionData({ + existingExecutionData, traceSpans: redactedTraceSpans, finalOutput: redactedFinalOutput, - tokens: { - input: executionCost.tokens.input, - output: executionCost.tokens.output, - total: executionCost.tokens.total, - }, - models: executionCost.models, - ...(executionState ? { executionState } : {}), - }, + executionCost, + executionState, + }), cost: executionCost, }) .where(eq(workflowExecutionLogs.executionId, executionId)) diff --git a/apps/sim/lib/logs/execution/logging-session.test.ts b/apps/sim/lib/logs/execution/logging-session.test.ts new file mode 100644 index 0000000000..2f9bd2370f --- /dev/null +++ b/apps/sim/lib/logs/execution/logging-session.test.ts @@ -0,0 +1,199 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { completeWorkflowExecutionMock } = vi.hoisted(() => ({ + completeWorkflowExecutionMock: vi.fn(), +})) + +vi.mock('@sim/db', () => ({ + db: {}, +})) + +vi.mock('@sim/db/schema', () => ({ + workflowExecutionLogs: {}, +})) + +vi.mock('@sim/logger', () => ({ + createLogger: () => ({ + info: vi.fn(), + error: vi.fn(), + warn: vi.fn(), + debug: vi.fn(), + }), +})) + +vi.mock('drizzle-orm', () => ({ + eq: vi.fn(), + sql: vi.fn(), +})) + +vi.mock('@/lib/logs/execution/logger', () => ({ + executionLogger: { + startWorkflowExecution: vi.fn(), + completeWorkflowExecution: completeWorkflowExecutionMock, + }, +})) + +vi.mock('@/lib/logs/execution/logging-factory', () => ({ + calculateCostSummary: vi.fn().mockReturnValue({ + totalCost: 0, + totalInputCost: 0, + totalOutputCost: 0, + totalTokens: 0, + totalPromptTokens: 0, + totalCompletionTokens: 0, + baseExecutionCharge: 0, + modelCost: 0, + models: {}, + }), + createEnvironmentObject: vi.fn(), + createTriggerObject: vi.fn(), + loadDeployedWorkflowStateForLogging: vi.fn(), + loadWorkflowStateForExecution: vi.fn(), +})) + +import { LoggingSession } from './logging-session' + +describe('LoggingSession completion retries', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it('keeps completion best-effort when a later error completion retries after full completion and fallback both fail', async () => { + const session = new LoggingSession('workflow-1', 'execution-1', 'api', 'req-1') + + completeWorkflowExecutionMock + .mockRejectedValueOnce(new Error('success finalize failed')) + .mockRejectedValueOnce(new Error('cost only failed')) + .mockResolvedValueOnce({}) + + await expect(session.safeComplete({ finalOutput: { ok: true } })).resolves.toBeUndefined() + + await expect( + session.safeCompleteWithError({ + error: { message: 'fallback error finalize' }, + }) + ).resolves.toBeUndefined() + + expect(completeWorkflowExecutionMock).toHaveBeenCalledTimes(3) + expect(session.hasCompleted()).toBe(true) + }) + + it('reuses the settled completion promise for repeated completion attempts', async () => { + const session = new LoggingSession('workflow-1', 'execution-1', 'api', 'req-1') + + completeWorkflowExecutionMock + .mockRejectedValueOnce(new Error('success finalize failed')) + .mockRejectedValueOnce(new Error('cost only failed')) + + await expect(session.safeComplete({ finalOutput: { ok: true } })).resolves.toBeUndefined() + + await expect(session.safeComplete({ finalOutput: { ok: true } })).resolves.toBeUndefined() + + expect(completeWorkflowExecutionMock).toHaveBeenCalledTimes(2) + }) + + it('starts a new error completion attempt after a non-error completion and fallback both fail', async () => { + const session = new LoggingSession('workflow-1', 'execution-3', 'api', 'req-1') + + completeWorkflowExecutionMock + .mockRejectedValueOnce(new Error('success finalize failed')) + .mockRejectedValueOnce(new Error('cost only failed')) + .mockResolvedValueOnce({}) + + await expect(session.safeComplete({ finalOutput: { ok: true } })).resolves.toBeUndefined() + + await expect( + session.safeCompleteWithError({ + error: { message: 'late error finalize' }, + }) + ).resolves.toBeUndefined() + + expect(completeWorkflowExecutionMock).toHaveBeenCalledTimes(3) + expect(completeWorkflowExecutionMock).toHaveBeenLastCalledWith( + expect.objectContaining({ + executionId: 'execution-3', + finalOutput: { error: 'late error finalize' }, + }) + ) + expect(session.hasCompleted()).toBe(true) + }) + + it('persists failed error semantics when completeWithError receives non-error trace spans', async () => { + const session = new LoggingSession('workflow-1', 'execution-4', 'api', 'req-1') + const traceSpans = [ + { + id: 'span-1', + name: 'Block A', + type: 'tool', + duration: 25, + startTime: '2026-03-13T10:00:00.000Z', + endTime: '2026-03-13T10:00:00.025Z', + status: 'success', + }, + ] + + completeWorkflowExecutionMock.mockResolvedValue({}) + + await expect( + session.safeCompleteWithError({ + error: { message: 'persist me as failed' }, + traceSpans, + }) + ).resolves.toBeUndefined() + + expect(completeWorkflowExecutionMock).toHaveBeenCalledWith( + expect.objectContaining({ + executionId: 'execution-4', + finalOutput: { error: 'persist me as failed' }, + traceSpans, + level: 'error', + status: 'failed', + }) + ) + }) + + it('marks paused completions as completed and deduplicates later attempts', async () => { + const session = new LoggingSession('workflow-1', 'execution-1', 'api', 'req-1') + + completeWorkflowExecutionMock.mockResolvedValue({}) + + await expect( + session.safeCompleteWithPause({ + endedAt: new Date().toISOString(), + totalDurationMs: 10, + traceSpans: [], + workflowInput: { hello: 'world' }, + }) + ).resolves.toBeUndefined() + + expect(session.hasCompleted()).toBe(true) + + await expect( + session.safeCompleteWithError({ + error: { message: 'should be ignored' }, + }) + ).resolves.toBeUndefined() + + expect(completeWorkflowExecutionMock).toHaveBeenCalledTimes(1) + }) + + it('falls back to cost-only logging when paused completion fails', async () => { + const session = new LoggingSession('workflow-1', 'execution-2', 'api', 'req-1') + + completeWorkflowExecutionMock + .mockRejectedValueOnce(new Error('pause finalize failed')) + .mockResolvedValueOnce({}) + + await expect( + session.safeCompleteWithPause({ + endedAt: new Date().toISOString(), + totalDurationMs: 10, + traceSpans: [], + workflowInput: { hello: 'world' }, + }) + ).resolves.toBeUndefined() + + expect(session.hasCompleted()).toBe(true) + expect(completeWorkflowExecutionMock).toHaveBeenCalledTimes(2) + }) +}) diff --git a/apps/sim/lib/logs/execution/logging-session.ts b/apps/sim/lib/logs/execution/logging-session.ts index 10a4fc577e..d41b1eda06 100644 --- a/apps/sim/lib/logs/execution/logging-session.ts +++ b/apps/sim/lib/logs/execution/logging-session.ts @@ -19,13 +19,19 @@ import type { } from '@/lib/logs/types' import type { SerializableExecutionState } from '@/executor/execution/types' +type TriggerData = Record & { + correlation?: NonNullable['correlation'] +} + const logger = createLogger('LoggingSession') +type CompletionAttempt = 'complete' | 'error' | 'cancelled' | 'paused' + export interface SessionStartParams { userId?: string workspaceId: string variables?: Record - triggerData?: Record + triggerData?: TriggerData skipLogCreation?: boolean // For resume executions - reuse existing log entry deploymentVersionId?: string // ID of the deployment version used (null for manual/editor executions) } @@ -87,12 +93,15 @@ export class LoggingSession { private trigger?: ExecutionTrigger private environment?: ExecutionEnvironment private workflowState?: WorkflowState + private correlation?: NonNullable['correlation'] private isResume = false private completed = false /** Synchronous flag to prevent concurrent completion attempts (race condition guard) */ private completing = false /** Tracks the in-flight completion promise so callers can await it */ private completionPromise: Promise | null = null + private completionAttempt: CompletionAttempt | null = null + private completionAttemptFailed = false private accumulatedCost: AccumulatedCost = { total: BASE_EXECUTION_CHARGE, input: 0, @@ -216,6 +225,7 @@ export class LoggingSession { try { this.trigger = createTriggerObject(this.triggerType, triggerData) + this.correlation = triggerData?.correlation this.environment = createEnvironmentObject( this.workflowId, this.executionId, @@ -399,6 +409,8 @@ export class LoggingSession { costSummary, finalOutput: { error: message }, traceSpans: spans, + level: 'error', + status: 'failed', }) this.completed = true @@ -539,6 +551,11 @@ export class LoggingSession { } async completeWithPause(params: SessionPausedParams = {}): Promise { + if (this.completed || this.completing) { + return + } + this.completing = true + try { const { endedAt, totalDurationMs, traceSpans, workflowInput } = params @@ -570,6 +587,8 @@ export class LoggingSession { status: 'pending', }) + this.completed = true + try { const { PlatformEvents, createOTelSpansForWorkflowExecution } = await import( '@/lib/core/telemetry' @@ -606,6 +625,7 @@ export class LoggingSession { ) } } catch (pauseError) { + this.completing = false logger.error(`Failed to complete paused logging for execution ${this.executionId}:`, { requestId: this.requestId, workflowId: this.workflowId, @@ -633,6 +653,7 @@ export class LoggingSession { try { const { userId, workspaceId, variables, triggerData, deploymentVersionId } = params this.trigger = createTriggerObject(this.triggerType, triggerData) + this.correlation = triggerData?.correlation this.environment = createEnvironmentObject( this.workflowId, this.executionId, @@ -688,12 +709,36 @@ export class LoggingSession { } } - async safeComplete(params: SessionCompleteParams = {}): Promise { - if (this.completionPromise) return this.completionPromise - this.completionPromise = this._safeCompleteImpl(params) + hasCompleted(): boolean { + return this.completed + } + + private shouldStartNewCompletionAttempt(attempt: CompletionAttempt): boolean { + return this.completionAttemptFailed && this.completionAttempt !== 'error' && attempt === 'error' + } + + private runCompletionAttempt( + attempt: CompletionAttempt, + run: () => Promise + ): Promise { + if (this.completionPromise && !this.shouldStartNewCompletionAttempt(attempt)) { + return this.completionPromise + } + + this.completionAttempt = attempt + this.completionAttemptFailed = false + this.completionPromise = run().catch((error) => { + this.completionAttemptFailed = true + throw error + }) + return this.completionPromise } + async safeComplete(params: SessionCompleteParams = {}): Promise { + return this.runCompletionAttempt('complete', () => this._safeCompleteImpl(params)) + } + private async _safeCompleteImpl(params: SessionCompleteParams = {}): Promise { try { await this.complete(params) @@ -714,9 +759,7 @@ export class LoggingSession { } async safeCompleteWithError(params?: SessionErrorCompleteParams): Promise { - if (this.completionPromise) return this.completionPromise - this.completionPromise = this._safeCompleteWithErrorImpl(params) - return this.completionPromise + return this.runCompletionAttempt('error', () => this._safeCompleteWithErrorImpl(params)) } private async _safeCompleteWithErrorImpl(params?: SessionErrorCompleteParams): Promise { @@ -741,9 +784,9 @@ export class LoggingSession { } async safeCompleteWithCancellation(params?: SessionCancelledParams): Promise { - if (this.completionPromise) return this.completionPromise - this.completionPromise = this._safeCompleteWithCancellationImpl(params) - return this.completionPromise + return this.runCompletionAttempt('cancelled', () => + this._safeCompleteWithCancellationImpl(params) + ) } private async _safeCompleteWithCancellationImpl(params?: SessionCancelledParams): Promise { @@ -767,9 +810,7 @@ export class LoggingSession { } async safeCompleteWithPause(params?: SessionPausedParams): Promise { - if (this.completionPromise) return this.completionPromise - this.completionPromise = this._safeCompleteWithPauseImpl(params) - return this.completionPromise + return this.runCompletionAttempt('paused', () => this._safeCompleteWithPauseImpl(params)) } private async _safeCompleteWithPauseImpl(params?: SessionPausedParams): Promise { @@ -880,6 +921,7 @@ export class LoggingSession { ) } catch (fallbackError) { this.completing = false + this.completionAttemptFailed = true logger.error( `[${this.requestId || 'unknown'}] Cost-only fallback also failed for execution ${this.executionId}:`, { error: fallbackError instanceof Error ? fallbackError.message : String(fallbackError) } diff --git a/apps/sim/lib/logs/types.ts b/apps/sim/lib/logs/types.ts index eb568398fc..c785c3037d 100644 --- a/apps/sim/lib/logs/types.ts +++ b/apps/sim/lib/logs/types.ts @@ -1,4 +1,5 @@ import type { Edge } from 'reactflow' +import type { AsyncExecutionCorrelation } from '@/lib/core/async-jobs/types' import type { ParentIteration, SerializableExecutionState } from '@/executor/execution/types' import type { BlockLog, NormalizedBlockOutput } from '@/executor/types' import type { DeploymentStatus } from '@/stores/workflows/registry/types' @@ -57,7 +58,9 @@ import type { CoreTriggerType } from '@/stores/logs/filters/types' export interface ExecutionTrigger { type: CoreTriggerType | string source: string - data?: Record + data?: Record & { + correlation?: AsyncExecutionCorrelation + } timestamp: string } @@ -101,6 +104,7 @@ export interface WorkflowExecutionLog { executionData: { environment?: ExecutionEnvironment trigger?: ExecutionTrigger + correlation?: AsyncExecutionCorrelation traceSpans?: TraceSpan[] tokens?: { input?: number; output?: number; total?: number } models?: Record< diff --git a/apps/sim/lib/webhooks/processor.test.ts b/apps/sim/lib/webhooks/processor.test.ts new file mode 100644 index 0000000000..20ae4408cd --- /dev/null +++ b/apps/sim/lib/webhooks/processor.test.ts @@ -0,0 +1,219 @@ +/** + * @vitest-environment node + */ + +import { createMockRequest } from '@sim/testing' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { + mockUuidV4, + mockPreprocessExecution, + mockEnqueue, + mockGetJobQueue, + mockShouldExecuteInline, +} = vi.hoisted(() => ({ + mockUuidV4: vi.fn(), + mockPreprocessExecution: vi.fn(), + mockEnqueue: vi.fn(), + mockGetJobQueue: vi.fn(), + mockShouldExecuteInline: vi.fn(), +})) + +vi.mock('@sim/db', () => ({ + db: {}, + webhook: {}, + workflow: {}, + workflowDeploymentVersion: {}, +})) + +vi.mock('@sim/db/schema', () => ({ + credentialSet: {}, + subscription: {}, +})) + +vi.mock('@sim/logger', () => ({ + createLogger: vi.fn().mockReturnValue({ + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + }), +})) + +vi.mock('drizzle-orm', () => ({ + and: vi.fn(), + eq: vi.fn(), + isNull: vi.fn(), + or: vi.fn(), +})) + +vi.mock('uuid', () => ({ + v4: mockUuidV4, +})) + +vi.mock('@/lib/billing/subscriptions/utils', () => ({ + checkEnterprisePlan: vi.fn().mockReturnValue(true), + checkTeamPlan: vi.fn().mockReturnValue(true), +})) + +vi.mock('@/lib/core/async-jobs', () => ({ + getInlineJobQueue: vi.fn(), + getJobQueue: mockGetJobQueue, + shouldExecuteInline: mockShouldExecuteInline, +})) + +vi.mock('@/lib/core/config/feature-flags', () => ({ + isProd: false, +})) + +vi.mock('@/lib/core/security/encryption', () => ({ + safeCompare: vi.fn().mockReturnValue(true), +})) + +vi.mock('@/lib/environment/utils', () => ({ + getEffectiveDecryptedEnv: vi.fn().mockResolvedValue({}), +})) + +vi.mock('@/lib/execution/preprocessing', () => ({ + preprocessExecution: mockPreprocessExecution, +})) + +vi.mock('@/lib/webhooks/pending-verification', () => ({ + getPendingWebhookVerification: vi.fn(), + matchesPendingWebhookVerificationProbe: vi.fn().mockReturnValue(false), + requiresPendingWebhookVerification: vi.fn().mockReturnValue(false), +})) + +vi.mock('@/lib/webhooks/utils', () => ({ + convertSquareBracketsToTwiML: vi.fn((value: string) => value), +})) + +vi.mock('@/lib/webhooks/utils.server', () => ({ + handleSlackChallenge: vi.fn().mockReturnValue(null), + handleWhatsAppVerification: vi.fn().mockResolvedValue(null), + validateAttioSignature: vi.fn().mockReturnValue(true), + validateCalcomSignature: vi.fn().mockReturnValue(true), + validateCirclebackSignature: vi.fn().mockReturnValue(true), + validateFirefliesSignature: vi.fn().mockReturnValue(true), + validateGitHubSignature: vi.fn().mockReturnValue(true), + validateJiraSignature: vi.fn().mockReturnValue(true), + validateLinearSignature: vi.fn().mockReturnValue(true), + validateMicrosoftTeamsSignature: vi.fn().mockReturnValue(true), + validateTwilioSignature: vi.fn().mockResolvedValue(true), + validateTypeformSignature: vi.fn().mockReturnValue(true), + verifyProviderWebhook: vi.fn().mockReturnValue(null), +})) + +vi.mock('@/background/webhook-execution', () => ({ + executeWebhookJob: vi.fn().mockResolvedValue({ success: true }), +})) + +vi.mock('@/executor/utils/reference-validation', () => ({ + resolveEnvVarReferences: vi.fn((value: string) => value), +})) + +vi.mock('@/triggers/confluence/utils', () => ({ + isConfluencePayloadMatch: vi.fn().mockReturnValue(true), +})) + +vi.mock('@/triggers/constants', () => ({ + isPollingWebhookProvider: vi.fn((provider: string) => provider === 'gmail'), +})) + +vi.mock('@/triggers/github/utils', () => ({ + isGitHubEventMatch: vi.fn().mockReturnValue(true), +})) + +vi.mock('@/triggers/hubspot/utils', () => ({ + isHubSpotContactEventMatch: vi.fn().mockReturnValue(true), +})) + +vi.mock('@/triggers/jira/utils', () => ({ + isJiraEventMatch: vi.fn().mockReturnValue(true), +})) + +import { checkWebhookPreprocessing, queueWebhookExecution } from '@/lib/webhooks/processor' + +describe('webhook processor execution identity', () => { + beforeEach(() => { + vi.clearAllMocks() + mockPreprocessExecution.mockResolvedValue({ + success: true, + actorUserId: 'actor-user-1', + }) + mockEnqueue.mockResolvedValue('job-1') + mockGetJobQueue.mockResolvedValue({ enqueue: mockEnqueue }) + mockShouldExecuteInline.mockReturnValue(false) + mockUuidV4.mockReturnValue('generated-execution-id') + }) + + it('reuses preprocessing execution identity when queueing a polling webhook', async () => { + const preprocessingResult = await checkWebhookPreprocessing( + { + id: 'workflow-1', + userId: 'owner-1', + workspaceId: 'workspace-1', + }, + { + id: 'webhook-1', + path: 'incoming/gmail', + provider: 'gmail', + }, + 'request-1' + ) + + expect(preprocessingResult).toMatchObject({ + error: null, + actorUserId: 'actor-user-1', + executionId: 'generated-execution-id', + correlation: { + executionId: 'generated-execution-id', + requestId: 'request-1', + source: 'webhook', + workflowId: 'workflow-1', + webhookId: 'webhook-1', + path: 'incoming/gmail', + provider: 'gmail', + triggerType: 'webhook', + }, + }) + + await queueWebhookExecution( + { + id: 'webhook-1', + path: 'incoming/gmail', + provider: 'gmail', + providerConfig: {}, + blockId: 'block-1', + }, + { + id: 'workflow-1', + workspaceId: 'workspace-1', + }, + { event: 'message.received' }, + createMockRequest('POST', { event: 'message.received' }) as any, + { + requestId: 'request-1', + path: 'incoming/gmail', + actorUserId: preprocessingResult.actorUserId, + executionId: preprocessingResult.executionId, + correlation: preprocessingResult.correlation, + } + ) + + expect(mockUuidV4).toHaveBeenCalledTimes(1) + expect(mockEnqueue).toHaveBeenCalledWith( + 'webhook-execution', + expect.objectContaining({ + executionId: 'generated-execution-id', + requestId: 'request-1', + correlation: preprocessingResult.correlation, + }), + expect.objectContaining({ + metadata: expect.objectContaining({ + correlation: preprocessingResult.correlation, + }), + }) + ) + }) +}) diff --git a/apps/sim/lib/webhooks/processor.ts b/apps/sim/lib/webhooks/processor.ts index 46d8133721..9bb5978b28 100644 --- a/apps/sim/lib/webhooks/processor.ts +++ b/apps/sim/lib/webhooks/processor.ts @@ -6,6 +6,7 @@ import { type NextRequest, NextResponse } from 'next/server' import { v4 as uuidv4 } from 'uuid' import { checkEnterprisePlan, checkTeamPlan } from '@/lib/billing/subscriptions/utils' import { getInlineJobQueue, getJobQueue, shouldExecuteInline } from '@/lib/core/async-jobs' +import type { AsyncExecutionCorrelation } from '@/lib/core/async-jobs/types' import { isProd } from '@/lib/core/config/feature-flags' import { safeCompare } from '@/lib/core/security/encryption' import { getEffectiveDecryptedEnv } from '@/lib/environment/utils' @@ -46,11 +47,15 @@ export interface WebhookProcessorOptions { path?: string webhookId?: string actorUserId?: string + executionId?: string + correlation?: AsyncExecutionCorrelation } export interface WebhookPreprocessingResult { error: NextResponse | null actorUserId?: string + executionId?: string + correlation?: AsyncExecutionCorrelation } function getExternalUrl(request: NextRequest): string { @@ -242,7 +247,10 @@ export function handleProviderReachabilityTest( logger.info( `[${requestId}] Grain reachability test detected - returning 200 for webhook verification` ) - return NextResponse.json({ status: 'ok', message: 'Webhook endpoint verified' }) + return NextResponse.json({ + status: 'ok', + message: 'Webhook endpoint verified', + }) } } @@ -323,7 +331,10 @@ export function handlePreDeploymentVerification( logger.info( `[${requestId}] ${webhook.provider} webhook - block not in deployment, returning 200 OK for URL validation` ) - return NextResponse.json({ status: 'ok', message: 'Webhook endpoint verified' }) + return NextResponse.json({ + status: 'ok', + message: 'Webhook endpoint verified', + }) } return null } @@ -501,7 +512,9 @@ export async function verifyProviderAuth( foundWorkflow.workspaceId ) } catch (error) { - logger.error(`[${requestId}] Failed to fetch environment variables`, { error }) + logger.error(`[${requestId}] Failed to fetch environment variables`, { + error, + }) } // Step 2: Resolve {{VARIABLE}} references in providerConfig @@ -516,7 +529,9 @@ export async function verifyProviderAuth( logger.warn( `[${requestId}] Microsoft Teams outgoing webhook missing HMAC authorization header` ) - return new NextResponse('Unauthorized - Missing HMAC signature', { status: 401 }) + return new NextResponse('Unauthorized - Missing HMAC signature', { + status: 401, + }) } const isValidSignature = validateMicrosoftTeamsSignature( @@ -527,7 +542,9 @@ export async function verifyProviderAuth( if (!isValidSignature) { logger.warn(`[${requestId}] Microsoft Teams HMAC signature verification failed`) - return new NextResponse('Unauthorized - Invalid HMAC signature', { status: 401 }) + return new NextResponse('Unauthorized - Invalid HMAC signature', { + status: 401, + }) } } } @@ -563,7 +580,9 @@ export async function verifyProviderAuth( if (!isTokenValid) { logger.warn(`[${requestId}] Google Forms webhook authentication failed`) - return new NextResponse('Unauthorized - Invalid secret', { status: 401 }) + return new NextResponse('Unauthorized - Invalid secret', { + status: 401, + }) } } } @@ -577,7 +596,9 @@ export async function verifyProviderAuth( if (!signature) { logger.warn(`[${requestId}] Twilio Voice webhook missing signature header`) - return new NextResponse('Unauthorized - Missing Twilio signature', { status: 401 }) + return new NextResponse('Unauthorized - Missing Twilio signature', { + status: 401, + }) } let params: Record = {} @@ -591,7 +612,9 @@ export async function verifyProviderAuth( `[${requestId}] Error parsing Twilio webhook body for signature validation:`, error ) - return new NextResponse('Bad Request - Invalid body format', { status: 400 }) + return new NextResponse('Bad Request - Invalid body format', { + status: 400, + }) } const fullUrl = getExternalUrl(request) @@ -604,7 +627,9 @@ export async function verifyProviderAuth( paramsCount: Object.keys(params).length, authTokenLength: authToken.length, }) - return new NextResponse('Unauthorized - Invalid Twilio signature', { status: 401 }) + return new NextResponse('Unauthorized - Invalid Twilio signature', { + status: 401, + }) } } } @@ -617,7 +642,9 @@ export async function verifyProviderAuth( if (!signature) { logger.warn(`[${requestId}] Typeform webhook missing signature header`) - return new NextResponse('Unauthorized - Missing Typeform signature', { status: 401 }) + return new NextResponse('Unauthorized - Missing Typeform signature', { + status: 401, + }) } const isValidSignature = validateTypeformSignature(secret, signature, rawBody) @@ -627,7 +654,9 @@ export async function verifyProviderAuth( signatureLength: signature.length, secretLength: secret.length, }) - return new NextResponse('Unauthorized - Invalid Typeform signature', { status: 401 }) + return new NextResponse('Unauthorized - Invalid Typeform signature', { + status: 401, + }) } } } @@ -644,7 +673,9 @@ export async function verifyProviderAuth( if (!signature) { logger.warn(`[${requestId}] Attio webhook missing signature header`) - return new NextResponse('Unauthorized - Missing Attio signature', { status: 401 }) + return new NextResponse('Unauthorized - Missing Attio signature', { + status: 401, + }) } const isValidSignature = validateAttioSignature(secret, signature, rawBody) @@ -654,7 +685,9 @@ export async function verifyProviderAuth( signatureLength: signature.length, secretLength: secret.length, }) - return new NextResponse('Unauthorized - Invalid Attio signature', { status: 401 }) + return new NextResponse('Unauthorized - Invalid Attio signature', { + status: 401, + }) } } } @@ -667,7 +700,9 @@ export async function verifyProviderAuth( if (!signature) { logger.warn(`[${requestId}] Linear webhook missing signature header`) - return new NextResponse('Unauthorized - Missing Linear signature', { status: 401 }) + return new NextResponse('Unauthorized - Missing Linear signature', { + status: 401, + }) } const isValidSignature = validateLinearSignature(secret, signature, rawBody) @@ -677,7 +712,9 @@ export async function verifyProviderAuth( signatureLength: signature.length, secretLength: secret.length, }) - return new NextResponse('Unauthorized - Invalid Linear signature', { status: 401 }) + return new NextResponse('Unauthorized - Invalid Linear signature', { + status: 401, + }) } } } @@ -690,7 +727,9 @@ export async function verifyProviderAuth( if (!signature) { logger.warn(`[${requestId}] Circleback webhook missing signature header`) - return new NextResponse('Unauthorized - Missing Circleback signature', { status: 401 }) + return new NextResponse('Unauthorized - Missing Circleback signature', { + status: 401, + }) } const isValidSignature = validateCirclebackSignature(secret, signature, rawBody) @@ -700,7 +739,9 @@ export async function verifyProviderAuth( signatureLength: signature.length, secretLength: secret.length, }) - return new NextResponse('Unauthorized - Invalid Circleback signature', { status: 401 }) + return new NextResponse('Unauthorized - Invalid Circleback signature', { + status: 401, + }) } } } @@ -713,7 +754,9 @@ export async function verifyProviderAuth( if (!signature) { logger.warn(`[${requestId}] Cal.com webhook missing signature header`) - return new NextResponse('Unauthorized - Missing Cal.com signature', { status: 401 }) + return new NextResponse('Unauthorized - Missing Cal.com signature', { + status: 401, + }) } const isValidSignature = validateCalcomSignature(secret, signature, rawBody) @@ -723,7 +766,9 @@ export async function verifyProviderAuth( signatureLength: signature.length, secretLength: secret.length, }) - return new NextResponse('Unauthorized - Invalid Cal.com signature', { status: 401 }) + return new NextResponse('Unauthorized - Invalid Cal.com signature', { + status: 401, + }) } } } @@ -736,7 +781,9 @@ export async function verifyProviderAuth( if (!signature) { logger.warn(`[${requestId}] Jira webhook missing signature header`) - return new NextResponse('Unauthorized - Missing Jira signature', { status: 401 }) + return new NextResponse('Unauthorized - Missing Jira signature', { + status: 401, + }) } const isValidSignature = validateJiraSignature(secret, signature, rawBody) @@ -746,7 +793,9 @@ export async function verifyProviderAuth( signatureLength: signature.length, secretLength: secret.length, }) - return new NextResponse('Unauthorized - Invalid Jira signature', { status: 401 }) + return new NextResponse('Unauthorized - Invalid Jira signature', { + status: 401, + }) } } } @@ -759,7 +808,9 @@ export async function verifyProviderAuth( if (!signature) { logger.warn(`[${requestId}] Confluence webhook missing signature header`) - return new NextResponse('Unauthorized - Missing Confluence signature', { status: 401 }) + return new NextResponse('Unauthorized - Missing Confluence signature', { + status: 401, + }) } const isValidSignature = validateJiraSignature(secret, signature, rawBody) @@ -769,7 +820,9 @@ export async function verifyProviderAuth( signatureLength: signature.length, secretLength: secret.length, }) - return new NextResponse('Unauthorized - Invalid Confluence signature', { status: 401 }) + return new NextResponse('Unauthorized - Invalid Confluence signature', { + status: 401, + }) } } } @@ -785,7 +838,9 @@ export async function verifyProviderAuth( if (!signature) { logger.warn(`[${requestId}] GitHub webhook missing signature header`) - return new NextResponse('Unauthorized - Missing GitHub signature', { status: 401 }) + return new NextResponse('Unauthorized - Missing GitHub signature', { + status: 401, + }) } const isValidSignature = validateGitHubSignature(secret, signature, rawBody) @@ -796,7 +851,9 @@ export async function verifyProviderAuth( secretLength: secret.length, usingSha256: !!signature256, }) - return new NextResponse('Unauthorized - Invalid GitHub signature', { status: 401 }) + return new NextResponse('Unauthorized - Invalid GitHub signature', { + status: 401, + }) } } } @@ -809,7 +866,9 @@ export async function verifyProviderAuth( if (!signature) { logger.warn(`[${requestId}] Fireflies webhook missing signature header`) - return new NextResponse('Unauthorized - Missing Fireflies signature', { status: 401 }) + return new NextResponse('Unauthorized - Missing Fireflies signature', { + status: 401, + }) } const isValidSignature = validateFirefliesSignature(secret, signature, rawBody) @@ -819,7 +878,9 @@ export async function verifyProviderAuth( signatureLength: signature.length, secretLength: secret.length, }) - return new NextResponse('Unauthorized - Invalid Fireflies signature', { status: 401 }) + return new NextResponse('Unauthorized - Invalid Fireflies signature', { + status: 401, + }) } } } @@ -872,6 +933,16 @@ export async function checkWebhookPreprocessing( ): Promise { try { const executionId = uuidv4() + const correlation = { + executionId, + requestId, + source: 'webhook' as const, + workflowId: foundWorkflow.id, + webhookId: foundWebhook.id, + path: foundWebhook.path, + provider: foundWebhook.provider, + triggerType: 'webhook', + } const preprocessResult = await preprocessExecution({ workflowId: foundWorkflow.id, @@ -879,6 +950,7 @@ export async function checkWebhookPreprocessing( triggerType: 'webhook', executionId, requestId, + triggerData: { correlation }, checkRateLimit: true, checkDeployment: true, workspaceId: foundWorkflow.workspaceId, @@ -905,10 +977,17 @@ export async function checkWebhookPreprocessing( } } - return { error: NextResponse.json({ error: error.message }, { status: error.statusCode }) } + return { + error: NextResponse.json({ error: error.message }, { status: error.statusCode }), + } } - return { error: null, actorUserId: preprocessResult.actorUserId } + return { + error: null, + actorUserId: preprocessResult.actorUserId, + executionId, + correlation, + } } catch (preprocessError) { logger.error(`[${requestId}] Error during webhook preprocessing:`, preprocessError) @@ -1034,7 +1113,10 @@ export async function queueWebhookExecution( bodyKeys: Object.keys(body), } ) - return NextResponse.json({ status: 'skipped', reason: 'event_type_mismatch' }) + return NextResponse.json({ + status: 'skipped', + reason: 'event_type_mismatch', + }) } } } @@ -1132,10 +1214,27 @@ export async function queueWebhookExecution( return NextResponse.json({ error: 'Unable to resolve billing account' }, { status: 500 }) } + const executionId = options.executionId ?? uuidv4() + const correlation = + options.correlation ?? + ({ + executionId, + requestId: options.requestId, + source: 'webhook' as const, + workflowId: foundWorkflow.id, + webhookId: foundWebhook.id, + path: options.path || foundWebhook.path, + provider: foundWebhook.provider, + triggerType: 'webhook', + } satisfies AsyncExecutionCorrelation) + const payload = { webhookId: foundWebhook.id, workflowId: foundWorkflow.id, userId: actorUserId, + executionId, + requestId: options.requestId, + correlation, provider: foundWebhook.provider, body, headers, @@ -1150,7 +1249,11 @@ export async function queueWebhookExecution( if (isPolling && !shouldExecuteInline()) { const jobQueue = await getJobQueue() const jobId = await jobQueue.enqueue('webhook-execution', payload, { - metadata: { workflowId: foundWorkflow.id, userId: actorUserId }, + metadata: { + workflowId: foundWorkflow.id, + userId: actorUserId, + correlation, + }, }) logger.info( `[${options.requestId}] Queued polling webhook execution task ${jobId} for ${foundWebhook.provider} webhook via job queue` @@ -1158,7 +1261,11 @@ export async function queueWebhookExecution( } else { const jobQueue = await getInlineJobQueue() const jobId = await jobQueue.enqueue('webhook-execution', payload, { - metadata: { workflowId: foundWorkflow.id, userId: actorUserId }, + metadata: { + workflowId: foundWorkflow.id, + userId: actorUserId, + correlation, + }, }) logger.info( `[${options.requestId}] Executing ${foundWebhook.provider} webhook ${jobId} inline` diff --git a/apps/sim/lib/workflows/executor/execution-core.test.ts b/apps/sim/lib/workflows/executor/execution-core.test.ts new file mode 100644 index 0000000000..21c8a5b6b7 --- /dev/null +++ b/apps/sim/lib/workflows/executor/execution-core.test.ts @@ -0,0 +1,582 @@ +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { + loadWorkflowFromNormalizedTablesMock, + loadDeployedWorkflowStateMock, + getPersonalAndWorkspaceEnvMock, + mergeSubblockStateWithValuesMock, + safeStartMock, + safeCompleteMock, + safeCompleteWithErrorMock, + safeCompleteWithCancellationMock, + safeCompleteWithPauseMock, + hasCompletedMock, + updateWorkflowRunCountsMock, + clearExecutionCancellationMock, + buildTraceSpansMock, + serializeWorkflowMock, + executorExecuteMock, +} = vi.hoisted(() => ({ + loadWorkflowFromNormalizedTablesMock: vi.fn(), + loadDeployedWorkflowStateMock: vi.fn(), + getPersonalAndWorkspaceEnvMock: vi.fn(), + mergeSubblockStateWithValuesMock: vi.fn(), + safeStartMock: vi.fn(), + safeCompleteMock: vi.fn(), + safeCompleteWithErrorMock: vi.fn(), + safeCompleteWithCancellationMock: vi.fn(), + safeCompleteWithPauseMock: vi.fn(), + hasCompletedMock: vi.fn(), + updateWorkflowRunCountsMock: vi.fn(), + clearExecutionCancellationMock: vi.fn(), + buildTraceSpansMock: vi.fn(), + serializeWorkflowMock: vi.fn(), + executorExecuteMock: vi.fn(), +})) + +vi.mock('@sim/logger', () => ({ + createLogger: () => ({ + info: vi.fn(), + error: vi.fn(), + warn: vi.fn(), + debug: vi.fn(), + }), +})) + +vi.mock('@/lib/environment/utils', () => ({ + getPersonalAndWorkspaceEnv: getPersonalAndWorkspaceEnvMock, +})) + +vi.mock('@/lib/execution/cancellation', () => ({ + clearExecutionCancellation: clearExecutionCancellationMock, +})) + +vi.mock('@/lib/logs/execution/trace-spans/trace-spans', () => ({ + buildTraceSpans: buildTraceSpansMock, +})) + +vi.mock('@/lib/workflows/persistence/utils', () => ({ + loadWorkflowFromNormalizedTables: loadWorkflowFromNormalizedTablesMock, + loadDeployedWorkflowState: loadDeployedWorkflowStateMock, +})) + +vi.mock('@/lib/workflows/subblocks', () => ({ + mergeSubblockStateWithValues: mergeSubblockStateWithValuesMock, +})) + +vi.mock('@/lib/workflows/triggers/triggers', () => ({ + TriggerUtils: { + findStartBlock: vi.fn().mockReturnValue({ + blockId: 'start-block', + block: { type: 'start_trigger' }, + path: ['start-block'], + }), + }, +})) + +vi.mock('@/lib/workflows/utils', () => ({ + updateWorkflowRunCounts: updateWorkflowRunCountsMock, +})) + +vi.mock('@/executor', () => ({ + Executor: vi.fn().mockImplementation(() => ({ + execute: executorExecuteMock, + executeFromBlock: executorExecuteMock, + })), +})) + +vi.mock('@/serializer', () => ({ + Serializer: vi.fn().mockImplementation(() => ({ + serializeWorkflow: serializeWorkflowMock, + })), +})) + +import { + executeWorkflowCore, + FINALIZED_EXECUTION_ID_TTL_MS, + wasExecutionFinalizedByCore, +} from './execution-core' + +describe('executeWorkflowCore terminal finalization sequencing', () => { + const loggingSession = { + safeStart: safeStartMock, + safeComplete: safeCompleteMock, + safeCompleteWithError: safeCompleteWithErrorMock, + safeCompleteWithCancellation: safeCompleteWithCancellationMock, + safeCompleteWithPause: safeCompleteWithPauseMock, + hasCompleted: hasCompletedMock, + } + + const createSnapshot = () => ({ + metadata: { + requestId: 'req-1', + workflowId: 'workflow-1', + userId: 'user-1', + workspaceId: 'workspace-1', + triggerType: 'api', + executionId: 'execution-1', + triggerBlockId: undefined, + useDraftState: true, + isClientSession: false, + enforceCredentialAccess: false, + startTime: new Date().toISOString(), + }, + workflow: { + id: 'workflow-1', + userId: 'workflow-owner', + variables: {}, + }, + input: { hello: 'world' }, + workflowVariables: {}, + selectedOutputs: [], + state: undefined, + }) + + beforeEach(() => { + vi.clearAllMocks() + vi.useRealTimers() + + loadWorkflowFromNormalizedTablesMock.mockResolvedValue({ + blocks: { + 'start-block': { + id: 'start-block', + type: 'start_trigger', + subBlocks: {}, + name: 'Start', + }, + }, + edges: [], + loops: {}, + parallels: {}, + }) + + loadDeployedWorkflowStateMock.mockResolvedValue({ + blocks: {}, + edges: [], + loops: {}, + parallels: {}, + deploymentVersionId: 'dep-1', + }) + + getPersonalAndWorkspaceEnvMock.mockResolvedValue({ + personalEncrypted: {}, + workspaceEncrypted: {}, + personalDecrypted: {}, + workspaceDecrypted: {}, + }) + + mergeSubblockStateWithValuesMock.mockImplementation((blocks) => blocks) + serializeWorkflowMock.mockReturnValue({ loops: {}, parallels: {} }) + buildTraceSpansMock.mockReturnValue({ traceSpans: [{ id: 'span-1' }], totalDuration: 123 }) + safeStartMock.mockResolvedValue(true) + safeCompleteMock.mockResolvedValue(undefined) + safeCompleteWithErrorMock.mockResolvedValue(undefined) + safeCompleteWithCancellationMock.mockResolvedValue(undefined) + safeCompleteWithPauseMock.mockResolvedValue(undefined) + hasCompletedMock.mockReturnValue(true) + updateWorkflowRunCountsMock.mockResolvedValue(undefined) + clearExecutionCancellationMock.mockResolvedValue(undefined) + }) + + it('awaits terminal completion before updating run counts and returning', async () => { + const callOrder: string[] = [] + + executorExecuteMock.mockResolvedValue({ + success: true, + status: 'completed', + output: { done: true }, + logs: [], + metadata: { duration: 123, startTime: 'start', endTime: 'end' }, + }) + + safeCompleteMock.mockImplementation(async () => { + callOrder.push('safeComplete:start') + await Promise.resolve() + callOrder.push('safeComplete:end') + }) + + clearExecutionCancellationMock.mockImplementation(async () => { + callOrder.push('clearCancellation') + }) + + updateWorkflowRunCountsMock.mockImplementation(async () => { + callOrder.push('updateRunCounts') + }) + + const result = await executeWorkflowCore({ + snapshot: createSnapshot() as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + + expect(result.status).toBe('completed') + expect(callOrder).toEqual([ + 'safeComplete:start', + 'safeComplete:end', + 'clearCancellation', + 'updateRunCounts', + ]) + }) + + it('clears cancellation even when success finalization throws', async () => { + executorExecuteMock.mockResolvedValue({ + success: true, + status: 'completed', + output: { done: true }, + logs: [], + metadata: { duration: 123, startTime: 'start', endTime: 'end' }, + }) + + const completionError = new Error('completion failed') + safeCompleteMock.mockRejectedValue(completionError) + + await expect( + executeWorkflowCore({ + snapshot: createSnapshot() as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).rejects.toThrow('completion failed') + + expect(clearExecutionCancellationMock).toHaveBeenCalledWith('execution-1') + expect(updateWorkflowRunCountsMock).not.toHaveBeenCalled() + }) + + it('routes cancelled executions through safeCompleteWithCancellation', async () => { + executorExecuteMock.mockResolvedValue({ + success: false, + status: 'cancelled', + output: {}, + logs: [], + metadata: { duration: 123, startTime: 'start', endTime: 'end' }, + }) + + const result = await executeWorkflowCore({ + snapshot: createSnapshot() as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + + expect(result.status).toBe('cancelled') + expect(safeCompleteWithCancellationMock).toHaveBeenCalledTimes(1) + expect(safeCompleteWithCancellationMock).toHaveBeenCalledWith( + expect.objectContaining({ + totalDurationMs: 123, + traceSpans: [{ id: 'span-1' }], + }) + ) + expect(safeCompleteMock).not.toHaveBeenCalled() + expect(safeCompleteWithPauseMock).not.toHaveBeenCalled() + expect(updateWorkflowRunCountsMock).not.toHaveBeenCalled() + }) + + it('routes paused executions through safeCompleteWithPause', async () => { + executorExecuteMock.mockResolvedValue({ + success: true, + status: 'paused', + output: {}, + logs: [], + metadata: { duration: 123, startTime: 'start', endTime: 'end' }, + }) + + const result = await executeWorkflowCore({ + snapshot: createSnapshot() as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + + expect(result.status).toBe('paused') + expect(safeCompleteWithPauseMock).toHaveBeenCalledTimes(1) + expect(safeCompleteWithPauseMock).toHaveBeenCalledWith( + expect.objectContaining({ + totalDurationMs: 123, + traceSpans: [{ id: 'span-1' }], + workflowInput: { hello: 'world' }, + }) + ) + expect(safeCompleteMock).not.toHaveBeenCalled() + expect(safeCompleteWithCancellationMock).not.toHaveBeenCalled() + expect(updateWorkflowRunCountsMock).not.toHaveBeenCalled() + }) + + it('finalizes errors before rethrowing and marks them as core-finalized', async () => { + const error = new Error('engine failed') + const executionResult = { + success: false, + status: 'failed', + output: {}, + error: 'engine failed', + logs: [], + metadata: { duration: 55, startTime: 'start', endTime: 'end' }, + } + + Object.assign(error, { executionResult }) + executorExecuteMock.mockRejectedValue(error) + + await expect( + executeWorkflowCore({ + snapshot: createSnapshot() as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).rejects.toBe(error) + + expect(safeCompleteWithErrorMock).toHaveBeenCalledTimes(1) + expect(clearExecutionCancellationMock).toHaveBeenCalledWith('execution-1') + expect(wasExecutionFinalizedByCore(error, 'execution-1')).toBe(true) + }) + + it('marks non-Error throws as core-finalized using executionId guard', async () => { + executorExecuteMock.mockRejectedValue('engine failed') + + await expect( + executeWorkflowCore({ + snapshot: createSnapshot() as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).rejects.toBe('engine failed') + + expect(safeCompleteWithErrorMock).toHaveBeenCalledTimes(1) + expect(wasExecutionFinalizedByCore('engine failed', 'execution-1')).toBe(true) + expect(wasExecutionFinalizedByCore('engine failed', 'execution-1')).toBe(true) + }) + + it('expires stale finalized execution ids for callers that never consume the guard', async () => { + vi.useFakeTimers() + vi.setSystemTime(new Date('2026-03-13T00:00:00.000Z')) + + executorExecuteMock.mockRejectedValue('engine failed') + + await expect( + executeWorkflowCore({ + snapshot: { + ...createSnapshot(), + metadata: { + ...createSnapshot().metadata, + executionId: 'execution-stale', + }, + } as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).rejects.toBe('engine failed') + + vi.setSystemTime(new Date(Date.now() + FINALIZED_EXECUTION_ID_TTL_MS + 1)) + + await expect( + executeWorkflowCore({ + snapshot: { + ...createSnapshot(), + metadata: { + ...createSnapshot().metadata, + executionId: 'execution-fresh', + }, + } as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).rejects.toBe('engine failed') + + expect(wasExecutionFinalizedByCore('engine failed', 'execution-stale')).toBe(false) + expect(wasExecutionFinalizedByCore('engine failed', 'execution-fresh')).toBe(true) + }) + + it('removes expired finalized ids even when a reused id stays earlier in map order', async () => { + vi.useFakeTimers() + vi.setSystemTime(new Date('2026-03-13T00:00:00.000Z')) + + executorExecuteMock.mockRejectedValue('engine failed') + + await expect( + executeWorkflowCore({ + snapshot: { + ...createSnapshot(), + metadata: { + ...createSnapshot().metadata, + executionId: 'execution-a', + }, + } as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).rejects.toBe('engine failed') + + vi.setSystemTime(new Date('2026-03-13T00:01:00.000Z')) + + await expect( + executeWorkflowCore({ + snapshot: { + ...createSnapshot(), + metadata: { + ...createSnapshot().metadata, + executionId: 'execution-b', + }, + } as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).rejects.toBe('engine failed') + + vi.setSystemTime(new Date('2026-03-13T00:02:00.000Z')) + + await expect( + executeWorkflowCore({ + snapshot: { + ...createSnapshot(), + metadata: { + ...createSnapshot().metadata, + executionId: 'execution-a', + }, + } as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).rejects.toBe('engine failed') + + vi.setSystemTime(new Date('2026-03-13T00:06:01.000Z')) + + expect(wasExecutionFinalizedByCore('engine failed', 'execution-b')).toBe(false) + expect(wasExecutionFinalizedByCore('engine failed', 'execution-a')).toBe(true) + }) + + it('falls back to error finalization when success finalization rejects', async () => { + executorExecuteMock.mockResolvedValue({ + success: true, + status: 'completed', + output: { done: true }, + logs: [], + metadata: { duration: 123, startTime: 'start', endTime: 'end' }, + }) + + safeCompleteMock.mockRejectedValue(new Error('completion failed')) + + await expect( + executeWorkflowCore({ + snapshot: createSnapshot() as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).rejects.toThrow('completion failed') + + expect(safeCompleteWithErrorMock).toHaveBeenCalledTimes(1) + expect(safeCompleteWithErrorMock).toHaveBeenCalledWith( + expect.objectContaining({ + error: expect.objectContaining({ + message: 'completion failed', + }), + }) + ) + }) + + it('does not replace a successful outcome when cancellation cleanup fails', async () => { + executorExecuteMock.mockResolvedValue({ + success: true, + status: 'completed', + output: { done: true }, + logs: [], + metadata: { duration: 123, startTime: 'start', endTime: 'end' }, + }) + + clearExecutionCancellationMock.mockRejectedValue(new Error('cleanup failed')) + + await expect( + executeWorkflowCore({ + snapshot: createSnapshot() as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).resolves.toMatchObject({ status: 'completed', success: true }) + + expect(safeCompleteWithErrorMock).not.toHaveBeenCalled() + }) + + it('does not replace the original error when cancellation cleanup fails', async () => { + const error = new Error('engine failed') + executorExecuteMock.mockRejectedValue(error) + clearExecutionCancellationMock.mockRejectedValue(new Error('cleanup failed')) + + await expect( + executeWorkflowCore({ + snapshot: createSnapshot() as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).rejects.toBe(error) + + expect(safeCompleteWithErrorMock).toHaveBeenCalledTimes(1) + }) + + it('does not mark core finalization when error completion never persists a log row', async () => { + const error = new Error('engine failed') + executorExecuteMock.mockRejectedValue(error) + hasCompletedMock.mockReturnValue(false) + const snapshot = { + ...createSnapshot(), + metadata: { + ...createSnapshot().metadata, + executionId: 'execution-unfinalized', + }, + } + + await expect( + executeWorkflowCore({ + snapshot: snapshot as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).rejects.toBe(error) + + expect(safeCompleteWithErrorMock).toHaveBeenCalledTimes(1) + expect(wasExecutionFinalizedByCore(error, 'execution-unfinalized')).toBe(false) + }) + + it('starts a minimal log session before error completion when setup fails early', async () => { + const envError = new Error('env lookup failed') + getPersonalAndWorkspaceEnvMock.mockRejectedValue(envError) + + await expect( + executeWorkflowCore({ + snapshot: createSnapshot() as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).rejects.toBe(envError) + + expect(safeStartMock).toHaveBeenCalledTimes(1) + expect(safeStartMock).toHaveBeenCalledWith( + expect.objectContaining({ + userId: 'user-1', + workspaceId: 'workspace-1', + variables: {}, + }) + ) + expect(safeCompleteWithErrorMock).toHaveBeenCalledTimes(1) + expect(wasExecutionFinalizedByCore(envError, 'execution-1')).toBe(true) + }) + + it('skips core finalization when minimal error logging cannot start', async () => { + const envError = new Error('env lookup failed') + getPersonalAndWorkspaceEnvMock.mockRejectedValue(envError) + safeStartMock.mockResolvedValue(false) + const snapshot = { + ...createSnapshot(), + metadata: { + ...createSnapshot().metadata, + executionId: 'execution-no-log-start', + }, + } + + await expect( + executeWorkflowCore({ + snapshot: snapshot as any, + callbacks: {}, + loggingSession: loggingSession as any, + }) + ).rejects.toBe(envError) + + expect(safeStartMock).toHaveBeenCalledTimes(1) + expect(safeCompleteWithErrorMock).not.toHaveBeenCalled() + expect(wasExecutionFinalizedByCore(envError, 'execution-no-log-start')).toBe(false) + }) +}) diff --git a/apps/sim/lib/workflows/executor/execution-core.ts b/apps/sim/lib/workflows/executor/execution-core.ts index b96a872dbf..69610ff801 100644 --- a/apps/sim/lib/workflows/executor/execution-core.ts +++ b/apps/sim/lib/workflows/executor/execution-core.ts @@ -112,6 +112,137 @@ function parseVariableValueByType(value: unknown, type: string): unknown { return typeof value === 'string' ? value : String(value) } +type ExecutionErrorWithFinalizationFlag = Error & { + executionFinalizedByCore?: boolean +} + +export const FINALIZED_EXECUTION_ID_TTL_MS = 5 * 60 * 1000 + +const finalizedExecutionIds = new Map() + +function cleanupExpiredFinalizedExecutionIds(now = Date.now()): void { + for (const [executionId, expiresAt] of finalizedExecutionIds.entries()) { + if (expiresAt <= now) { + finalizedExecutionIds.delete(executionId) + } + } +} + +function rememberFinalizedExecutionId(executionId: string): void { + const now = Date.now() + + cleanupExpiredFinalizedExecutionIds(now) + finalizedExecutionIds.set(executionId, now + FINALIZED_EXECUTION_ID_TTL_MS) +} + +async function clearExecutionCancellationSafely( + executionId: string, + requestId: string +): Promise { + try { + await clearExecutionCancellation(executionId) + } catch (error) { + logger.error(`[${requestId}] Failed to clear execution cancellation`, { error, executionId }) + } +} + +function markExecutionFinalizedByCore(error: unknown, executionId: string): void { + rememberFinalizedExecutionId(executionId) + + if (error instanceof Error) { + ;(error as ExecutionErrorWithFinalizationFlag).executionFinalizedByCore = true + } +} + +export function wasExecutionFinalizedByCore(error: unknown, executionId?: string): boolean { + cleanupExpiredFinalizedExecutionIds() + + if (executionId && finalizedExecutionIds.has(executionId)) { + return true + } + + return ( + error instanceof Error && + (error as ExecutionErrorWithFinalizationFlag).executionFinalizedByCore === true + ) +} + +async function finalizeExecutionOutcome(params: { + result: ExecutionResult + loggingSession: LoggingSession + executionId: string + requestId: string + workflowInput: unknown +}): Promise { + const { result, loggingSession, executionId, requestId, workflowInput } = params + const { traceSpans, totalDuration } = buildTraceSpans(result) + const endedAt = new Date().toISOString() + + try { + if (result.status === 'cancelled') { + await loggingSession.safeCompleteWithCancellation({ + endedAt, + totalDurationMs: totalDuration || 0, + traceSpans: traceSpans || [], + }) + return + } + + if (result.status === 'paused') { + await loggingSession.safeCompleteWithPause({ + endedAt, + totalDurationMs: totalDuration || 0, + traceSpans: traceSpans || [], + workflowInput, + }) + return + } + + await loggingSession.safeComplete({ + endedAt, + totalDurationMs: totalDuration || 0, + finalOutput: result.output || {}, + traceSpans: traceSpans || [], + workflowInput, + executionState: result.executionState, + }) + } finally { + await clearExecutionCancellationSafely(executionId, requestId) + } +} + +async function finalizeExecutionError(params: { + error: unknown + loggingSession: LoggingSession + executionId: string + requestId: string +}): Promise { + const { error, loggingSession, executionId, requestId } = params + const executionResult = hasExecutionResult(error) ? error.executionResult : undefined + const { traceSpans } = executionResult ? buildTraceSpans(executionResult) : { traceSpans: [] } + + try { + await loggingSession.safeCompleteWithError({ + endedAt: new Date().toISOString(), + totalDurationMs: executionResult?.metadata?.duration || 0, + error: { + message: error instanceof Error ? error.message : 'Execution failed', + stackTrace: error instanceof Error ? error.stack : undefined, + }, + traceSpans, + }) + + return loggingSession.hasCompleted() + } catch (postExecError) { + logger.error(`[${requestId}] Post-execution error logging failed`, { + error: postExecError, + }) + return false + } finally { + await clearExecutionCancellationSafely(executionId, requestId) + } +} + export async function executeWorkflowCore( options: ExecuteWorkflowCoreOptions ): Promise { @@ -137,13 +268,14 @@ export async function executeWorkflowCore( } let processedInput = input || {} + let deploymentVersionId: string | undefined + let loggingStarted = false try { let blocks let edges: Edge[] let loops let parallels - let deploymentVersionId: string | undefined // Use workflowStateOverride if provided (for diff workflows) if (metadata.workflowStateOverride) { @@ -200,10 +332,11 @@ export async function executeWorkflowCore( // Use already-decrypted values for execution (no redundant decryption) const decryptedEnvVars: Record = { ...personalDecrypted, ...workspaceDecrypted } - await loggingSession.safeStart({ + loggingStarted = await loggingSession.safeStart({ userId, workspaceId: providedWorkspaceId, variables, + triggerData: metadata.correlation ? { correlation: metadata.correlation } : undefined, skipLogCreation, deploymentVersionId, }) @@ -360,48 +493,21 @@ export async function executeWorkflowCore( )) as ExecutionResult) : ((await executorInstance.execute(workflowId, resolvedTriggerBlockId)) as ExecutionResult) - // Fire-and-forget: post-execution logging, billing, and cleanup - void (async () => { - try { - const { traceSpans, totalDuration } = buildTraceSpans(result) - - if (result.success && result.status !== 'paused') { - try { - await updateWorkflowRunCounts(workflowId) - } catch (runCountError) { - logger.error(`[${requestId}] Failed to update run counts`, { error: runCountError }) - } - } - - if (result.status === 'cancelled') { - await loggingSession.safeCompleteWithCancellation({ - endedAt: new Date().toISOString(), - totalDurationMs: totalDuration || 0, - traceSpans: traceSpans || [], - }) - } else if (result.status === 'paused') { - await loggingSession.safeCompleteWithPause({ - endedAt: new Date().toISOString(), - totalDurationMs: totalDuration || 0, - traceSpans: traceSpans || [], - workflowInput: processedInput, - }) - } else { - await loggingSession.safeComplete({ - endedAt: new Date().toISOString(), - totalDurationMs: totalDuration || 0, - finalOutput: result.output || {}, - traceSpans: traceSpans || [], - workflowInput: processedInput, - executionState: result.executionState, - }) - } + await finalizeExecutionOutcome({ + result, + loggingSession, + executionId, + requestId, + workflowInput: processedInput, + }) - await clearExecutionCancellation(executionId) - } catch (postExecError) { - logger.error(`[${requestId}] Post-execution logging failed`, { error: postExecError }) + if (result.success && result.status !== 'paused') { + try { + await updateWorkflowRunCounts(workflowId) + } catch (runCountError) { + logger.error(`[${requestId}] Failed to update run counts`, { error: runCountError }) } - })() + } logger.info(`[${requestId}] Workflow execution completed`, { success: result.success, @@ -413,31 +519,29 @@ export async function executeWorkflowCore( } catch (error: unknown) { logger.error(`[${requestId}] Execution failed:`, error) - // Fire-and-forget: error logging and cleanup - void (async () => { - try { - const executionResult = hasExecutionResult(error) ? error.executionResult : undefined - const { traceSpans } = executionResult - ? buildTraceSpans(executionResult) - : { traceSpans: [] } - - await loggingSession.safeCompleteWithError({ - endedAt: new Date().toISOString(), - totalDurationMs: executionResult?.metadata?.duration || 0, - error: { - message: error instanceof Error ? error.message : 'Execution failed', - stackTrace: error instanceof Error ? error.stack : undefined, - }, - traceSpans, - }) + if (!loggingStarted) { + loggingStarted = await loggingSession.safeStart({ + userId, + workspaceId: providedWorkspaceId, + variables: {}, + triggerData: metadata.correlation ? { correlation: metadata.correlation } : undefined, + skipLogCreation, + deploymentVersionId, + }) + } - await clearExecutionCancellation(executionId) - } catch (postExecError) { - logger.error(`[${requestId}] Post-execution error logging failed`, { - error: postExecError, + const finalized = loggingStarted + ? await finalizeExecutionError({ + error, + loggingSession, + executionId, + requestId, }) - } - })() + : false + + if (finalized) { + markExecutionFinalizedByCore(error, executionId) + } throw error } From 7e740e617b9be635c7d0db8edbb8b9588cf16ee5 Mon Sep 17 00:00:00 2001 From: Vikhyath Mondreti Date: Fri, 13 Mar 2026 17:47:02 -0700 Subject: [PATCH 3/6] improvement(copilot): state persistence, subflow recreation, dynamic handle topologies (#3569) * improvement(copilot): state persistence, subflow recreation, dynamic handle topologies * address comments --- .../[id]/execute/route.async.test.ts | 5 + .../sim/app/api/workflows/[id]/state/route.ts | 12 +- .../condition-input/condition-input.tsx | 17 +- .../components/subflows/subflow-node.tsx | 10 +- .../workflow-block/workflow-block.tsx | 61 +--- .../w/[workflowId]/hooks/index.ts | 1 + .../hooks/use-dynamic-handle-refresh.ts | 33 +++ .../w/[workflowId]/utils/auto-layout-utils.ts | 6 +- .../w/[workflowId]/utils/index.ts | 1 + .../w/[workflowId]/utils/node-derivation.ts | 32 +++ .../[workspaceId]/w/[workflowId]/workflow.tsx | 63 +++-- apps/sim/hooks/use-collaborative-workflow.ts | 4 +- .../workflow/edit-workflow/builders.test.ts | 32 ++- .../server/workflow/edit-workflow/builders.ts | 51 +++- .../server/workflow/edit-workflow/engine.ts | 50 +++- .../server/workflow/edit-workflow/index.ts | 107 ++++--- .../workflow/edit-workflow/operations.test.ts | 264 ++++++++++++++++++ .../workflow/edit-workflow/operations.ts | 234 ++++++++++------ .../server/workflow/edit-workflow/types.ts | 1 + .../workflow/edit-workflow/validation.test.ts | 90 ++++++ .../workflow/edit-workflow/validation.ts | 29 ++ .../workflows/dynamic-handle-topology.test.ts | 85 ++++++ .../lib/workflows/dynamic-handle-topology.ts | 143 ++++++++++ .../workflows/operations/socket-operations.ts | 13 +- apps/sim/stores/panel/copilot/store.ts | 2 +- apps/sim/stores/workflow-diff/store.ts | 16 -- apps/sim/stores/workflows/registry/store.ts | 4 +- apps/sim/stores/workflows/utils.ts | 27 +- .../workflows/workflow/edge-validation.ts | 86 ++++++ .../stores/workflows/workflow/store.test.ts | 34 +++ apps/sim/stores/workflows/workflow/store.ts | 104 ++++--- apps/sim/stores/workflows/workflow/types.ts | 1 + .../stores/workflows/workflow/validation.ts | 67 +++++ 33 files changed, 1374 insertions(+), 311 deletions(-) create mode 100644 apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-dynamic-handle-refresh.ts create mode 100644 apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/node-derivation.ts create mode 100644 apps/sim/lib/copilot/tools/server/workflow/edit-workflow/operations.test.ts create mode 100644 apps/sim/lib/copilot/tools/server/workflow/edit-workflow/validation.test.ts create mode 100644 apps/sim/lib/workflows/dynamic-handle-topology.test.ts create mode 100644 apps/sim/lib/workflows/dynamic-handle-topology.ts create mode 100644 apps/sim/stores/workflows/workflow/edge-validation.ts create mode 100644 apps/sim/stores/workflows/workflow/validation.ts diff --git a/apps/sim/app/api/workflows/[id]/execute/route.async.test.ts b/apps/sim/app/api/workflows/[id]/execute/route.async.test.ts index 0a9aa008ba..7d6c599dcf 100644 --- a/apps/sim/app/api/workflows/[id]/execute/route.async.test.ts +++ b/apps/sim/app/api/workflows/[id]/execute/route.async.test.ts @@ -19,6 +19,11 @@ const { vi.mock('@/lib/auth/hybrid', () => ({ checkHybridAuth: mockCheckHybridAuth, + AuthType: { + SESSION: 'session', + API_KEY: 'api_key', + INTERNAL_JWT: 'internal_jwt', + }, })) vi.mock('@/lib/workflows/utils', () => ({ diff --git a/apps/sim/app/api/workflows/[id]/state/route.ts b/apps/sim/app/api/workflows/[id]/state/route.ts index 60417bf4eb..7cca499088 100644 --- a/apps/sim/app/api/workflows/[id]/state/route.ts +++ b/apps/sim/app/api/workflows/[id]/state/route.ts @@ -11,6 +11,7 @@ import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils' import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/sanitization/validation' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' +import { validateEdges } from '@/stores/workflows/workflow/edge-validation' import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types' import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils' @@ -180,12 +181,16 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ ) const typedBlocks = filteredBlocks as Record + const validatedEdges = validateEdges(state.edges as WorkflowState['edges'], typedBlocks) + const validationWarnings = validatedEdges.dropped.map( + ({ edge, reason }) => `Dropped edge "${edge.id}": ${reason}` + ) const canonicalLoops = generateLoopBlocks(typedBlocks) const canonicalParallels = generateParallelBlocks(typedBlocks) const workflowState = { blocks: filteredBlocks, - edges: state.edges, + edges: validatedEdges.valid, loops: canonicalLoops, parallels: canonicalParallels, lastSaved: state.lastSaved || Date.now(), @@ -276,7 +281,10 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ ) } - return NextResponse.json({ success: true, warnings }, { status: 200 }) + return NextResponse.json( + { success: true, warnings: [...warnings, ...validationWarnings] }, + { status: 200 } + ) } catch (error: any) { const elapsed = Date.now() - startTime logger.error( diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/condition-input/condition-input.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/condition-input/condition-input.tsx index cc2f75cef3..7f2a1d0546 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/condition-input/condition-input.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/condition-input/condition-input.tsx @@ -4,7 +4,6 @@ import { createLogger } from '@sim/logger' import { ChevronDown, ChevronsUpDown, ChevronUp, Plus } from 'lucide-react' import { useParams } from 'next/navigation' import Editor from 'react-simple-code-editor' -import { useUpdateNodeInternals } from 'reactflow' import { Button, Code, @@ -173,7 +172,6 @@ export function ConditionInput({ const [visualLineHeights, setVisualLineHeights] = useState<{ [key: string]: number[] }>({}) - const updateNodeInternals = useUpdateNodeInternals() const batchRemoveEdges = useWorkflowStore((state) => state.batchRemoveEdges) const edges = useWorkflowStore((state) => state.edges) @@ -352,17 +350,8 @@ export function ConditionInput({ if (newValue !== prevStoreValueRef.current) { prevStoreValueRef.current = newValue setStoreValue(newValue) - updateNodeInternals(blockId) } - }, [ - conditionalBlocks, - blockId, - subBlockId, - setStoreValue, - updateNodeInternals, - isReady, - isPreview, - ]) + }, [conditionalBlocks, blockId, subBlockId, setStoreValue, isReady, isPreview]) // Cleanup when component unmounts useEffect(() => { @@ -708,8 +697,6 @@ export function ConditionInput({ shouldPersistRef.current = true setConditionalBlocks((blocks) => updateBlockTitles(blocks.filter((block) => block.id !== id))) - - setTimeout(() => updateNodeInternals(blockId), 0) } const moveBlock = (id: string, direction: 'up' | 'down') => { @@ -737,8 +724,6 @@ export function ConditionInput({ ] shouldPersistRef.current = true setConditionalBlocks(updateBlockTitles(newBlocks)) - - setTimeout(() => updateNodeInternals(blockId), 0) } // Add useEffect to handle keyboard events for both dropdowns diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node.tsx index 9118e0dd9b..c2e2880769 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node.tsx @@ -198,14 +198,14 @@ export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps {/* - * Click-catching background — selects this subflow when the body area is clicked. - * No event bubbling concern: ReactFlow renders child nodes as viewport-level siblings, - * not as DOM children of this component, so child clicks never reach this div. + * Subflow body background. Uses pointer-events: none so that edges rendered + * inside the subflow remain clickable. The subflow node wrapper also has + * pointer-events: none (set in workflow.tsx), so body-area clicks pass + * through to the pane. Subflow selection is done via the header above. */}
setCurrentBlockId(id)} + style={{ pointerEvents: 'none' }} /> {!isPreview && ( diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx index 5a55980191..6aeb3c8243 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx @@ -11,6 +11,7 @@ import { createMcpToolId } from '@/lib/mcp/shared' import { getProviderIdFromServiceId } from '@/lib/oauth' import type { FilterRule, SortRule } from '@/lib/table/types' import { BLOCK_DIMENSIONS, HANDLE_POSITIONS } from '@/lib/workflows/blocks/block-dimensions' +import { getConditionRows, getRouterRows } from '@/lib/workflows/dynamic-handle-topology' import { buildCanonicalIndex, evaluateSubBlockCondition, @@ -1049,6 +1050,9 @@ export const WorkflowBlock = memo(function WorkflowBlock({ const subBlockRows = subBlockRowsData.rows const subBlockState = subBlockRowsData.stateToUse + const topologySubBlocks = data.isPreview + ? (data.blockState?.subBlocks ?? {}) + : (currentStoreBlock?.subBlocks ?? {}) const effectiveAdvanced = useMemo(() => { const rawValues = Object.entries(subBlockState).reduce>( (acc, [key, entry]) => { @@ -1108,34 +1112,8 @@ export const WorkflowBlock = memo(function WorkflowBlock({ */ const conditionRows = useMemo(() => { if (type !== 'condition') return [] as { id: string; title: string; value: string }[] - - const conditionsValue = subBlockState.conditions?.value - const raw = typeof conditionsValue === 'string' ? conditionsValue : undefined - - try { - if (raw) { - const parsed = JSON.parse(raw) as unknown - if (Array.isArray(parsed)) { - return parsed.map((item: unknown, index: number) => { - const conditionItem = item as { id?: string; value?: unknown } - const title = index === 0 ? 'if' : index === parsed.length - 1 ? 'else' : 'else if' - return { - id: conditionItem?.id ?? `${id}-cond-${index}`, - title, - value: typeof conditionItem?.value === 'string' ? conditionItem.value : '', - } - }) - } - } - } catch (error) { - logger.warn('Failed to parse condition subblock value', { error, blockId: id }) - } - - return [ - { id: `${id}-if`, title: 'if', value: '' }, - { id: `${id}-else`, title: 'else', value: '' }, - ] - }, [type, subBlockState, id]) + return getConditionRows(id, topologySubBlocks.conditions?.value) + }, [type, topologySubBlocks, id]) /** * Compute per-route rows (id/value) for router_v2 blocks so we can render @@ -1144,31 +1122,8 @@ export const WorkflowBlock = memo(function WorkflowBlock({ */ const routerRows = useMemo(() => { if (type !== 'router_v2') return [] as { id: string; value: string }[] - - const routesValue = subBlockState.routes?.value - const raw = typeof routesValue === 'string' ? routesValue : undefined - - try { - if (raw) { - const parsed = JSON.parse(raw) as unknown - if (Array.isArray(parsed)) { - return parsed.map((item: unknown, index: number) => { - const routeItem = item as { id?: string; value?: string } - return { - // Use stable ID format that matches ConditionInput's generateStableId - id: routeItem?.id ?? `${id}-route${index + 1}`, - value: routeItem?.value ?? '', - } - }) - } - } - } catch (error) { - logger.warn('Failed to parse router routes value', { error, blockId: id }) - } - - // Fallback must match ConditionInput's default: generateStableId(blockId, 'route1') = `${blockId}-route1` - return [{ id: `${id}-route1`, value: '' }] - }, [type, subBlockState, id]) + return getRouterRows(id, topologySubBlocks.routes?.value) + }, [type, topologySubBlocks, id]) /** * Compute and publish deterministic layout metrics for workflow blocks. diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/index.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/index.ts index 31ebdc27a9..922df81d02 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/index.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/index.ts @@ -6,6 +6,7 @@ export { useBlockOutputFields } from './use-block-output-fields' export { useBlockVisual } from './use-block-visual' export { useCanvasContextMenu } from './use-canvas-context-menu' export { type CurrentWorkflow, useCurrentWorkflow } from './use-current-workflow' +export { useDynamicHandleRefresh } from './use-dynamic-handle-refresh' export { useNodeUtilities } from './use-node-utilities' export { usePreventZoom } from './use-prevent-zoom' export { useScrollManagement } from './use-scroll-management' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-dynamic-handle-refresh.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-dynamic-handle-refresh.ts new file mode 100644 index 0000000000..aa6c9c7e33 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-dynamic-handle-refresh.ts @@ -0,0 +1,33 @@ +import { useEffect, useMemo, useRef } from 'react' +import { useUpdateNodeInternals } from 'reactflow' +import { + collectDynamicHandleTopologySignatures, + getChangedDynamicHandleBlockIds, +} from '@/lib/workflows/dynamic-handle-topology' +import { useWorkflowStore } from '@/stores/workflows/workflow/store' + +export function useDynamicHandleRefresh() { + const updateNodeInternals = useUpdateNodeInternals() + const blocks = useWorkflowStore((state) => state.blocks) + const previousSignaturesRef = useRef>(new Map()) + + const signatures = useMemo(() => collectDynamicHandleTopologySignatures(blocks), [blocks]) + + useEffect(() => { + const changedBlockIds = getChangedDynamicHandleBlockIds( + previousSignaturesRef.current, + signatures + ) + previousSignaturesRef.current = signatures + + if (changedBlockIds.length === 0) { + return + } + + const frameId = requestAnimationFrame(() => { + changedBlockIds.forEach((blockId) => updateNodeInternals(blockId)) + }) + + return () => cancelAnimationFrame(frameId) + }, [signatures, updateNodeInternals]) +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/auto-layout-utils.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/auto-layout-utils.ts index 5f494b29db..3b4d5a73ee 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/auto-layout-utils.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/auto-layout-utils.ts @@ -116,7 +116,7 @@ export async function applyAutoLayoutAndUpdateStore( lastSaved: Date.now(), } - useWorkflowStore.setState(newWorkflowState) + useWorkflowStore.getState().replaceWorkflowState(newWorkflowState) logger.info('Successfully updated workflow store with auto layout', { workflowId }) @@ -168,9 +168,9 @@ export async function applyAutoLayoutAndUpdateStore( }) // Revert the store changes since database save failed - useWorkflowStore.setState({ + useWorkflowStore.getState().replaceWorkflowState({ ...workflowStore.getWorkflowState(), - blocks: blocks, + blocks, lastSaved: workflowStore.lastSaved, }) diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/index.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/index.ts index 88772d16fa..0e0ec02793 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/index.ts +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/index.ts @@ -1,6 +1,7 @@ export * from './auto-layout-utils' export * from './block-protection-utils' export * from './block-ring-utils' +export * from './node-derivation' export * from './node-position-utils' export * from './workflow-canvas-helpers' export * from './workflow-execution-utils' diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/node-derivation.ts b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/node-derivation.ts new file mode 100644 index 0000000000..b1f9e45b49 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/utils/node-derivation.ts @@ -0,0 +1,32 @@ +import type { BlockState } from '@/stores/workflows/workflow/types' + +export const Z_INDEX = { + ROOT_BLOCK: 10, + CHILD_BLOCK: 1000, +} as const + +export function computeContainerZIndex( + block: Pick, + allBlocks: Record> +): number { + let depth = 0 + let parentId = block.data?.parentId + + while (parentId && depth < 100) { + depth++ + parentId = allBlocks[parentId]?.data?.parentId + } + + return depth +} + +export function computeBlockZIndex( + block: Pick, + allBlocks: Record> +): number { + if (block.type === 'loop' || block.type === 'parallel') { + return computeContainerZIndex(block, allBlocks) + } + + return block.data?.parentId ? Z_INDEX.CHILD_BLOCK : Z_INDEX.ROOT_BLOCK +} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/workflow.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/workflow.tsx index 577af370cb..d9e561c071 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/workflow.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/workflow.tsx @@ -45,6 +45,7 @@ import { useAutoLayout, useCanvasContextMenu, useCurrentWorkflow, + useDynamicHandleRefresh, useNodeUtilities, useShiftSelectionLock, useWorkflowExecution, @@ -53,6 +54,7 @@ import { calculateContainerDimensions, clampPositionToContainer, clearDragHighlights, + computeBlockZIndex, computeClampedPositionUpdates, estimateBlockDimensions, filterProtectedBlocks, @@ -64,6 +66,7 @@ import { isInEditableElement, resolveParentChildSelectionConflicts, validateTriggerPaste, + Z_INDEX, } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils' import { useSocket } from '@/app/workspace/providers/socket-provider' import { getBlock } from '@/blocks' @@ -248,6 +251,7 @@ const WorkflowContent = React.memo(() => { const { screenToFlowPosition, getNodes, setNodes, getIntersectingNodes } = reactFlowInstance const { fitViewToBounds, getViewportCenter } = useCanvasViewport(reactFlowInstance) const { emitCursorUpdate } = useSocket() + useDynamicHandleRefresh() const workspaceId = params.workspaceId as string const workflowIdParam = params.workflowId as string @@ -727,6 +731,7 @@ const WorkflowContent = React.memo(() => { ...node, position: update.newPosition, parentId: update.newParentId ?? undefined, + zIndex: update.newParentId ? Z_INDEX.CHILD_BLOCK : Z_INDEX.ROOT_BLOCK, } } return node @@ -2364,13 +2369,6 @@ const WorkflowContent = React.memo(() => { // Handle container nodes differently if (block.type === 'loop' || block.type === 'parallel') { - // Compute nesting depth so children always render above parents - let depth = 0 - let pid = block.data?.parentId as string | undefined - while (pid && depth < 100) { - depth++ - pid = blocks[pid]?.data?.parentId as string | undefined - } nodeArray.push({ id: block.id, type: 'subflowNode', @@ -2379,8 +2377,9 @@ const WorkflowContent = React.memo(() => { extent: block.data?.extent || undefined, dragHandle: '.workflow-drag-handle', draggable: !isBlockProtected(block.id, blocks), - zIndex: depth, + zIndex: computeBlockZIndex(block, blocks), className: block.data?.parentId ? 'nested-subflow-node' : undefined, + style: { pointerEvents: 'none' }, data: { ...block.data, name: block.name, @@ -2409,12 +2408,6 @@ const WorkflowContent = React.memo(() => { const nodeType = block.type === 'note' ? 'noteBlock' : 'workflowBlock' const dragHandle = block.type === 'note' ? '.note-drag-handle' : '.workflow-drag-handle' - // Compute zIndex for blocks inside containers so they render above the - // parent subflow's interactive body area (which needs pointer-events for - // click-to-select). Container nodes use zIndex: depth (0, 1, 2...), - // so child blocks use a baseline that is always above any container. - const childZIndex = block.data?.parentId ? 1000 : undefined - // Create stable node object - React Flow will handle shallow comparison nodeArray.push({ id: block.id, @@ -2423,7 +2416,7 @@ const WorkflowContent = React.memo(() => { parentId: block.data?.parentId, dragHandle, draggable: !isBlockProtected(block.id, blocks), - ...(childZIndex !== undefined && { zIndex: childZIndex }), + zIndex: computeBlockZIndex(block, blocks), extent: (() => { // Clamp children to subflow body (exclude header) const parentId = block.data?.parentId as string | undefined @@ -2609,6 +2602,7 @@ const WorkflowContent = React.memo(() => { position: absPos, parentId: undefined, extent: undefined, + zIndex: Z_INDEX.ROOT_BLOCK, } } return n @@ -3330,6 +3324,7 @@ const WorkflowContent = React.memo(() => { position: relativePositionBefore, parentId: potentialParentId, extent: 'parent' as const, + zIndex: Z_INDEX.CHILD_BLOCK, } } return n @@ -3372,6 +3367,7 @@ const WorkflowContent = React.memo(() => { position: absolutePosition, parentId: undefined, extent: undefined, + zIndex: Z_INDEX.ROOT_BLOCK, } } return n @@ -3594,12 +3590,43 @@ const WorkflowContent = React.memo(() => { const handleNodeClick = useCallback( (event: React.MouseEvent, node: Node) => { const isMultiSelect = event.shiftKey || event.metaKey || event.ctrlKey - setNodes((nodes) => - nodes.map((n) => ({ + setNodes((nodes) => { + const updated = nodes.map((n) => ({ ...n, selected: isMultiSelect ? (n.id === node.id ? true : n.selected) : n.id === node.id, })) - ) + if (!isMultiSelect) return updated + + const clickedId = node.id + const clickedParentId = node.parentId + + const selectedIds = new Set(updated.filter((n) => n.selected).map((n) => n.id)) + + let hasConflict = false + const resolved = updated.map((n) => { + if (!n.selected || n.id === clickedId) return n + const nParentId = n.parentId + + if (nParentId === clickedId) { + hasConflict = true + return { ...n, selected: false } + } + + if (clickedParentId === n.id) { + hasConflict = true + return { ...n, selected: false } + } + + if (nParentId && selectedIds.has(nParentId)) { + hasConflict = true + return { ...n, selected: false } + } + + return n + }) + + return hasConflict ? resolved : updated + }) }, [setNodes] ) diff --git a/apps/sim/hooks/use-collaborative-workflow.ts b/apps/sim/hooks/use-collaborative-workflow.ts index db3ac1a6c6..b8b85b3cbd 100644 --- a/apps/sim/hooks/use-collaborative-workflow.ts +++ b/apps/sim/hooks/use-collaborative-workflow.ts @@ -475,6 +475,7 @@ export function useCollaborativeWorkflow() { try { useSubBlockStore.getState().setValue(blockId, subblockId, value) + useWorkflowStore.getState().syncDynamicHandleSubblockValue(blockId, subblockId, value) const blockType = useWorkflowStore.getState().blocks?.[blockId]?.type if (activeWorkflowId && blockType === 'function' && subblockId === 'code') { useCodeUndoRedoStore.getState().clear(activeWorkflowId, blockId, subblockId) @@ -555,7 +556,7 @@ export function useCollaborativeWorkflow() { isApplyingRemoteChange.current = true try { // Update the main workflow state using the API response - useWorkflowStore.setState({ + useWorkflowStore.getState().replaceWorkflowState({ blocks: workflowData.state.blocks || {}, edges: workflowData.state.edges || [], loops: workflowData.state.loops || {}, @@ -1230,6 +1231,7 @@ export function useCollaborativeWorkflow() { // ALWAYS update local store first for immediate UI feedback useSubBlockStore.getState().setValue(blockId, subblockId, value) + useWorkflowStore.getState().syncDynamicHandleSubblockValue(blockId, subblockId, value) if (activeWorkflowId) { const operationId = crypto.randomUUID() diff --git a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/builders.test.ts b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/builders.test.ts index a83a7efd47..b17b7ca32b 100644 --- a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/builders.test.ts +++ b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/builders.test.ts @@ -13,9 +13,17 @@ const agentBlockConfig = { subBlocks: [{ id: 'responseFormat', type: 'response-format' }], } +const conditionBlockConfig = { + type: 'condition', + name: 'Condition', + outputs: {}, + subBlocks: [{ id: 'conditions', type: 'condition-input' }], +} + vi.mock('@/blocks/registry', () => ({ - getAllBlocks: () => [agentBlockConfig], - getBlock: (type: string) => (type === 'agent' ? agentBlockConfig : undefined), + getAllBlocks: () => [agentBlockConfig, conditionBlockConfig], + getBlock: (type: string) => + type === 'agent' ? agentBlockConfig : type === 'condition' ? conditionBlockConfig : undefined, })) describe('createBlockFromParams', () => { @@ -41,4 +49,24 @@ describe('createBlockFromParams', () => { expect(block.outputs.answer).toBeDefined() expect(block.outputs.answer.type).toBe('string') }) + + it('preserves configured subblock types and normalizes condition branch ids', () => { + const block = createBlockFromParams('condition-1', { + type: 'condition', + name: 'Condition 1', + inputs: { + conditions: JSON.stringify([ + { id: 'arbitrary-if', title: 'if', value: 'true' }, + { id: 'arbitrary-else', title: 'else', value: '' }, + ]), + }, + triggerMode: false, + }) + + expect(block.subBlocks.conditions.type).toBe('condition-input') + + const parsed = JSON.parse(block.subBlocks.conditions.value) + expect(parsed[0].id).toBe('condition-1-if') + expect(parsed[1].id).toBe('condition-1-else') + }) }) diff --git a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/builders.ts b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/builders.ts index 9d87c14b26..eb9e8afee7 100644 --- a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/builders.ts +++ b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/builders.ts @@ -99,6 +99,8 @@ export function createBlockFromParams( sanitizedValue = normalizeArrayWithIds(value) } + sanitizedValue = normalizeConditionRouterIds(blockId, key, sanitizedValue) + // Special handling for tools - normalize and filter disallowed if (key === 'tools' && Array.isArray(value)) { sanitizedValue = filterDisallowedTools( @@ -114,9 +116,10 @@ export function createBlockFromParams( sanitizedValue = normalizeResponseFormat(value) } + const subBlockDef = blockConfig?.subBlocks.find((subBlock) => subBlock.id === key) blockState.subBlocks[key] = { id: key, - type: 'short-input', + type: subBlockDef?.type || 'short-input', value: sanitizedValue, } }) @@ -272,6 +275,52 @@ export function shouldNormalizeArrayIds(key: string): boolean { return ARRAY_WITH_ID_SUBBLOCK_TYPES.has(key) } +/** + * Normalizes condition/router branch IDs to use canonical block-scoped format. + * The LLM provides branch structure (if/else-if/else or routes) but should not + * have to generate the internal IDs -- we assign them based on the block ID. + */ +export function normalizeConditionRouterIds(blockId: string, key: string, value: unknown): unknown { + if (key !== 'conditions' && key !== 'routes') return value + + let parsed: any[] + if (typeof value === 'string') { + try { + parsed = JSON.parse(value) + if (!Array.isArray(parsed)) return value + } catch { + return value + } + } else if (Array.isArray(value)) { + parsed = value + } else { + return value + } + + let elseIfCounter = 0 + const normalized = parsed.map((item, index) => { + if (!item || typeof item !== 'object') return item + + let canonicalId: string + if (key === 'conditions') { + if (index === 0) { + canonicalId = `${blockId}-if` + } else if (index === parsed.length - 1) { + canonicalId = `${blockId}-else` + } else { + canonicalId = `${blockId}-else-if-${elseIfCounter}` + elseIfCounter++ + } + } else { + canonicalId = `${blockId}-route${index + 1}` + } + + return { ...item, id: canonicalId } + }) + + return typeof value === 'string' ? JSON.stringify(normalized) : normalized +} + /** * Normalize responseFormat to ensure consistent storage * Handles both string (JSON) and object formats diff --git a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/engine.ts b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/engine.ts index 7bb5d4c0d2..b0c6fa1cb0 100644 --- a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/engine.ts +++ b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/engine.ts @@ -1,6 +1,7 @@ import { createLogger } from '@sim/logger' import type { PermissionGroupConfig } from '@/lib/permission-groups/types' import { isValidKey } from '@/lib/workflows/sanitization/key-validation' +import { validateEdges } from '@/stores/workflows/workflow/edge-validation' import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils' import { addConnectionsAsEdges, normalizeBlockIdsInOperations } from './builders' import { @@ -213,8 +214,8 @@ export function applyOperationsToWorkflowState( handler(operation, ctx) } - // Pass 2: Add all deferred connections from add/insert operations - // Now all blocks exist, so connections can be safely created + // Pass 2: Create all edges from deferred connections + // All blocks exist at this point, so forward references resolve correctly if (ctx.deferredConnections.length > 0) { logger.info('Processing deferred connections from add/insert operations', { deferredConnectionCount: ctx.deferredConnections.length, @@ -238,6 +239,12 @@ export function applyOperationsToWorkflowState( totalEdges: (modifiedState as any).edges?.length, }) } + // Remove edges that cross scope boundaries. This runs after all operations + // and deferred connections are applied so that every block has its final + // parentId. Running it per-operation would incorrectly drop edges between + // blocks that are both being moved into the same subflow in one batch. + removeInvalidScopeEdges(modifiedState, skippedItems) + // Regenerate loops and parallels after modifications ;(modifiedState as any).loops = generateLoopBlocks((modifiedState as any).blocks) @@ -272,3 +279,42 @@ export function applyOperationsToWorkflowState( return { state: modifiedState, validationErrors, skippedItems } } + +/** + * Removes edges that cross scope boundaries after all operations are applied. + * An edge is invalid if: + * - Either endpoint no longer exists (dangling reference) + * - The source and target are in incompatible scopes + * - A child block connects to its own parent container (non-handle edge) + * + * Valid scope relationships: + * - Same scope: both blocks share the same parentId + * - Container→child: source is the parent container of the target (start handles) + * - Child→container: target is the parent container of the source (end handles) + */ +function removeInvalidScopeEdges(modifiedState: any, skippedItems: SkippedItem[]): void { + const { valid, dropped } = validateEdges(modifiedState.edges || [], modifiedState.blocks || {}) + modifiedState.edges = valid + + if (dropped.length > 0) { + for (const { edge, reason } of dropped) { + logSkippedItem(skippedItems, { + type: 'invalid_edge_scope', + operationType: 'add_edge', + blockId: edge.source, + reason: `Edge from "${edge.source}" to "${edge.target}" skipped - ${reason}`, + details: { + edgeId: edge.id, + sourceHandle: edge.sourceHandle, + targetHandle: edge.targetHandle, + targetId: edge.target, + }, + }) + } + + logger.info('Removed invalid workflow edges', { + removed: dropped.length, + reasons: dropped.map(({ reason }) => reason), + }) + } +} diff --git a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/index.ts b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/index.ts index 6b8592f79e..8912fbf791 100644 --- a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/index.ts +++ b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/index.ts @@ -3,7 +3,11 @@ import { workflow as workflowTable } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { eq } from 'drizzle-orm' import type { BaseServerTool } from '@/lib/copilot/tools/server/base-tool' -import { applyAutoLayout } from '@/lib/workflows/autolayout' +import { applyTargetedLayout } from '@/lib/workflows/autolayout' +import { + DEFAULT_HORIZONTAL_SPACING, + DEFAULT_VERTICAL_SPACING, +} from '@/lib/workflows/autolayout/constants' import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence' import { loadWorkflowFromNormalizedTables, @@ -13,6 +17,7 @@ import { validateWorkflowState } from '@/lib/workflows/sanitization/validation' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check' import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils' +import { normalizeWorkflowState } from '@/stores/workflows/workflow/validation' import { applyOperationsToWorkflowState } from './engine' import type { EditWorkflowParams, ValidationError } from './types' import { preValidateCredentialInputs, validateWorkflowSelectorIds } from './validation' @@ -30,42 +35,29 @@ async function getCurrentWorkflowStateFromDb( const normalized = await loadWorkflowFromNormalizedTables(workflowId) if (!normalized) throw new Error('Workflow has no normalized data') - // Validate and fix blocks without types - const blocks = { ...normalized.blocks } - const invalidBlocks: string[] = [] - - Object.entries(blocks).forEach(([id, block]: [string, any]) => { - if (!block.type) { - logger.warn(`Block ${id} loaded without type from database`, { - blockKeys: Object.keys(block), - blockName: block.name, - }) - invalidBlocks.push(id) - } - }) - - // Remove invalid blocks - invalidBlocks.forEach((id) => delete blocks[id]) - - // Remove edges connected to invalid blocks - const edges = normalized.edges.filter( - (edge: any) => !invalidBlocks.includes(edge.source) && !invalidBlocks.includes(edge.target) - ) - - const workflowState: any = { - blocks, - edges, + const { state: validatedState, warnings } = normalizeWorkflowState({ + blocks: normalized.blocks, + edges: normalized.edges, loops: normalized.loops || {}, parallels: normalized.parallels || {}, + }) + + if (warnings.length > 0) { + logger.warn('Normalized workflow state loaded from DB for copilot', { + workflowId, + warningCount: warnings.length, + warnings, + }) } + const subBlockValues: Record> = {} - Object.entries(normalized.blocks).forEach(([blockId, block]) => { + Object.entries(validatedState.blocks).forEach(([blockId, block]) => { subBlockValues[blockId] = {} Object.entries((block as any).subBlocks || {}).forEach(([subId, sub]) => { if ((sub as any).value !== undefined) subBlockValues[blockId][subId] = (sub as any).value }) }) - return { workflowState, subBlockValues } + return { workflowState: validatedState, subBlockValues } } export const editWorkflowServerTool: BaseServerTool = { @@ -137,17 +129,18 @@ export const editWorkflowServerTool: BaseServerTool // Add credential validation errors validationErrors.push(...credentialErrors) - // Get workspaceId for selector validation let workspaceId: string | undefined + let workflowName: string | undefined try { const [workflowRecord] = await db - .select({ workspaceId: workflowTable.workspaceId }) + .select({ workspaceId: workflowTable.workspaceId, name: workflowTable.name }) .from(workflowTable) .where(eq(workflowTable.id, workflowId)) .limit(1) workspaceId = workflowRecord?.workspaceId ?? undefined + workflowName = workflowRecord?.name ?? undefined } catch (error) { - logger.warn('Failed to get workspaceId for selector validation', { error, workflowId }) + logger.warn('Failed to get workflow metadata for validation', { error, workflowId }) } // Validate selector IDs exist in the database @@ -233,21 +226,38 @@ export const editWorkflowServerTool: BaseServerTool // Persist the workflow state to the database const finalWorkflowState = validation.sanitizedState || modifiedWorkflowState - // Apply autolayout to position blocks properly - const layoutResult = applyAutoLayout(finalWorkflowState.blocks, finalWorkflowState.edges, { - horizontalSpacing: 250, - verticalSpacing: 100, - padding: { x: 100, y: 100 }, + // Identify blocks that need layout by comparing against the pre-operation + // state. New blocks and blocks inserted into subflows (position reset to + // 0,0) need repositioning. Extracted blocks are excluded — their handler + // already computed valid absolute positions from the container offset. + const preOperationBlockIds = new Set(Object.keys(workflowState.blocks || {})) + const blocksNeedingLayout = Object.keys(finalWorkflowState.blocks).filter((id) => { + if (!preOperationBlockIds.has(id)) return true + const prevParent = workflowState.blocks[id]?.data?.parentId ?? null + const currParent = finalWorkflowState.blocks[id]?.data?.parentId ?? null + if (prevParent === currParent) return false + // Parent changed — only needs layout if position was reset to (0,0) + // by insert_into_subflow. extract_from_subflow computes absolute + // positions directly, so those blocks don't need repositioning. + const pos = finalWorkflowState.blocks[id]?.position + return pos?.x === 0 && pos?.y === 0 }) - const layoutedBlocks = - layoutResult.success && layoutResult.blocks ? layoutResult.blocks : finalWorkflowState.blocks + let layoutedBlocks = finalWorkflowState.blocks - if (!layoutResult.success) { - logger.warn('Autolayout failed, using default positions', { - workflowId, - error: layoutResult.error, - }) + if (blocksNeedingLayout.length > 0) { + try { + layoutedBlocks = applyTargetedLayout(finalWorkflowState.blocks, finalWorkflowState.edges, { + changedBlockIds: blocksNeedingLayout, + horizontalSpacing: DEFAULT_HORIZONTAL_SPACING, + verticalSpacing: DEFAULT_VERTICAL_SPACING, + }) + } catch (error) { + logger.warn('Targeted autolayout failed, using default positions', { + workflowId, + error: error instanceof Error ? error.message : String(error), + }) + } } const workflowStateForDb = { @@ -279,20 +289,25 @@ export const editWorkflowServerTool: BaseServerTool logger.info('Workflow state persisted to database', { workflowId }) - // Return the modified workflow state with autolayout applied + const sanitizationWarnings = validation.warnings.length > 0 ? validation.warnings : undefined + return { success: true, + workflowId, + workflowName: workflowName ?? 'Workflow', workflowState: { ...finalWorkflowState, blocks: layoutedBlocks }, - // Include input validation errors so the LLM can see what was rejected ...(inputErrors && { inputValidationErrors: inputErrors, inputValidationMessage: `${inputErrors.length} input(s) were rejected due to validation errors. The workflow was still updated with valid inputs only. Errors: ${inputErrors.join('; ')}`, }), - // Include skipped items so the LLM can see what operations were skipped ...(skippedMessages && { skippedItems: skippedMessages, skippedItemsMessage: `${skippedItems.length} operation(s) were skipped due to invalid references. Details: ${skippedMessages.join('; ')}`, }), + ...(sanitizationWarnings && { + sanitizationWarnings, + sanitizationMessage: `${sanitizationWarnings.length} field(s) were automatically sanitized: ${sanitizationWarnings.join('; ')}`, + }), } }, } diff --git a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/operations.test.ts b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/operations.test.ts new file mode 100644 index 0000000000..2a9248ed6b --- /dev/null +++ b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/operations.test.ts @@ -0,0 +1,264 @@ +/** + * @vitest-environment node + */ +import { describe, expect, it, vi } from 'vitest' +import { applyOperationsToWorkflowState } from './engine' + +vi.mock('@sim/logger', () => ({ + createLogger: () => ({ + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + debug: vi.fn(), + }), +})) + +vi.mock('@/blocks/registry', () => ({ + getAllBlocks: () => [ + { + type: 'condition', + name: 'Condition', + subBlocks: [{ id: 'conditions', type: 'condition-input' }], + }, + { + type: 'agent', + name: 'Agent', + subBlocks: [ + { id: 'systemPrompt', type: 'long-input' }, + { id: 'model', type: 'combobox' }, + ], + }, + { + type: 'function', + name: 'Function', + subBlocks: [ + { id: 'code', type: 'code' }, + { id: 'language', type: 'dropdown' }, + ], + }, + ], + getBlock: (type: string) => { + const blocks: Record = { + condition: { + type: 'condition', + name: 'Condition', + subBlocks: [{ id: 'conditions', type: 'condition-input' }], + }, + agent: { + type: 'agent', + name: 'Agent', + subBlocks: [ + { id: 'systemPrompt', type: 'long-input' }, + { id: 'model', type: 'combobox' }, + ], + }, + function: { + type: 'function', + name: 'Function', + subBlocks: [ + { id: 'code', type: 'code' }, + { id: 'language', type: 'dropdown' }, + ], + }, + } + return blocks[type] || undefined + }, +})) + +function makeLoopWorkflow() { + return { + blocks: { + 'loop-1': { + id: 'loop-1', + type: 'loop', + name: 'Loop 1', + position: { x: 0, y: 0 }, + enabled: true, + subBlocks: {}, + outputs: {}, + data: { loopType: 'for', count: 5 }, + }, + 'condition-1': { + id: 'condition-1', + type: 'condition', + name: 'Condition 1', + position: { x: 100, y: 100 }, + enabled: true, + subBlocks: { + conditions: { + id: 'conditions', + type: 'condition-input', + value: JSON.stringify([ + { id: 'condition-1-if', title: 'if', value: 'true' }, + { id: 'condition-1-else', title: 'else', value: '' }, + ]), + }, + }, + outputs: {}, + data: { parentId: 'loop-1', extent: 'parent' }, + }, + 'agent-1': { + id: 'agent-1', + type: 'agent', + name: 'Agent 1', + position: { x: 300, y: 100 }, + enabled: true, + subBlocks: { + systemPrompt: { id: 'systemPrompt', type: 'long-input', value: 'You are helpful' }, + model: { id: 'model', type: 'combobox', value: 'gpt-4o' }, + }, + outputs: {}, + data: { parentId: 'loop-1', extent: 'parent' }, + }, + }, + edges: [ + { + id: 'edge-1', + source: 'loop-1', + sourceHandle: 'loop-start-source', + target: 'condition-1', + targetHandle: 'target', + type: 'default', + }, + { + id: 'edge-2', + source: 'condition-1', + sourceHandle: 'condition-condition-1-if', + target: 'agent-1', + targetHandle: 'target', + type: 'default', + }, + ], + loops: {}, + parallels: {}, + } +} + +describe('handleEditOperation nestedNodes merge', () => { + it('preserves existing child block IDs when editing a loop with nestedNodes', () => { + const workflow = makeLoopWorkflow() + + const { state } = applyOperationsToWorkflowState(workflow, [ + { + operation_type: 'edit', + block_id: 'loop-1', + params: { + nestedNodes: { + 'new-condition': { + type: 'condition', + name: 'Condition 1', + inputs: { + conditions: [ + { id: 'x', title: 'if', value: 'x > 1' }, + { id: 'y', title: 'else', value: '' }, + ], + }, + }, + 'new-agent': { + type: 'agent', + name: 'Agent 1', + inputs: { systemPrompt: 'Updated prompt' }, + }, + }, + }, + }, + ]) + + expect(state.blocks['condition-1']).toBeDefined() + expect(state.blocks['agent-1']).toBeDefined() + expect(state.blocks['new-condition']).toBeUndefined() + expect(state.blocks['new-agent']).toBeUndefined() + }) + + it('preserves edges for matched children when connections are not provided', () => { + const workflow = makeLoopWorkflow() + + const { state } = applyOperationsToWorkflowState(workflow, [ + { + operation_type: 'edit', + block_id: 'loop-1', + params: { + nestedNodes: { + x: { type: 'condition', name: 'Condition 1' }, + y: { type: 'agent', name: 'Agent 1' }, + }, + }, + }, + ]) + + const conditionEdge = state.edges.find((e: any) => e.source === 'condition-1') + expect(conditionEdge).toBeDefined() + }) + + it('removes children not present in incoming nestedNodes', () => { + const workflow = makeLoopWorkflow() + + const { state } = applyOperationsToWorkflowState(workflow, [ + { + operation_type: 'edit', + block_id: 'loop-1', + params: { + nestedNodes: { + x: { type: 'condition', name: 'Condition 1' }, + }, + }, + }, + ]) + + expect(state.blocks['condition-1']).toBeDefined() + expect(state.blocks['agent-1']).toBeUndefined() + const agentEdges = state.edges.filter( + (e: any) => e.source === 'agent-1' || e.target === 'agent-1' + ) + expect(agentEdges).toHaveLength(0) + }) + + it('creates new children that do not match existing ones', () => { + const workflow = makeLoopWorkflow() + + const { state } = applyOperationsToWorkflowState(workflow, [ + { + operation_type: 'edit', + block_id: 'loop-1', + params: { + nestedNodes: { + x: { type: 'condition', name: 'Condition 1' }, + y: { type: 'agent', name: 'Agent 1' }, + 'new-func': { type: 'function', name: 'Function 1', inputs: { code: 'return 1' } }, + }, + }, + }, + ]) + + expect(state.blocks['condition-1']).toBeDefined() + expect(state.blocks['agent-1']).toBeDefined() + const funcBlock = Object.values(state.blocks).find((b: any) => b.name === 'Function 1') + expect(funcBlock).toBeDefined() + expect((funcBlock as any).data?.parentId).toBe('loop-1') + }) + + it('updates inputs on matched children without changing their ID', () => { + const workflow = makeLoopWorkflow() + + const { state } = applyOperationsToWorkflowState(workflow, [ + { + operation_type: 'edit', + block_id: 'loop-1', + params: { + nestedNodes: { + x: { + type: 'agent', + name: 'Agent 1', + inputs: { systemPrompt: 'New prompt' }, + }, + y: { type: 'condition', name: 'Condition 1' }, + }, + }, + }, + ]) + + const agent = state.blocks['agent-1'] + expect(agent).toBeDefined() + expect(agent.subBlocks.systemPrompt.value).toBe('New prompt') + }) +}) diff --git a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/operations.ts b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/operations.ts index 58b3b1ab50..4423afef5e 100644 --- a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/operations.ts +++ b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/operations.ts @@ -5,12 +5,11 @@ import { getBlock } from '@/blocks/registry' import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants' import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants' import { - addConnectionsAsEdges, applyTriggerConfigToBlockSubblocks, createBlockFromParams, - createValidatedEdge, filterDisallowedTools, normalizeArrayWithIds, + normalizeConditionRouterIds, normalizeResponseFormat, normalizeTools, shouldNormalizeArrayIds, @@ -78,7 +77,8 @@ export function handleDeleteOperation(op: EditWorkflowOperation, ctx: OperationC } export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationContext): void { - const { modifiedState, skippedItems, validationErrors, permissionConfig } = ctx + const { modifiedState, skippedItems, validationErrors, permissionConfig, deferredConnections } = + ctx const { block_id, params } = op if (!modifiedState.blocks[block_id]) { @@ -148,6 +148,8 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon sanitizedValue = normalizeArrayWithIds(value) } + sanitizedValue = normalizeConditionRouterIds(block_id, key, sanitizedValue) + // Special handling for tools - normalize and filter disallowed if (key === 'tools' && Array.isArray(value)) { sanitizedValue = filterDisallowedTools( @@ -164,9 +166,10 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon } if (!block.subBlocks[key]) { + const subBlockDef = getBlock(block.type)?.subBlocks.find((sb) => sb.id === key) block.subBlocks[key] = { id: key, - type: 'short-input', + type: subBlockDef?.type || 'short-input', value: sanitizedValue, } } else { @@ -335,38 +338,23 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon block.advancedMode = params.advancedMode } - // Handle nested nodes update (for loops/parallels) + // Handle nested nodes update (for loops/parallels) using merge strategy. + // Existing children that match an incoming node by name are updated in place + // (preserving their block ID). New children are created. Children not present + // in the incoming set are removed. if (params?.nestedNodes) { - // Remove all existing child blocks - const existingChildren = Object.keys(modifiedState.blocks).filter( - (id) => modifiedState.blocks[id].data?.parentId === block_id + const existingChildren: Array<[string, any]> = Object.entries(modifiedState.blocks).filter( + ([, b]: [string, any]) => b.data?.parentId === block_id ) - existingChildren.forEach((childId) => delete modifiedState.blocks[childId]) - // Remove edges to/from removed children - modifiedState.edges = modifiedState.edges.filter( - (edge: any) => - !existingChildren.includes(edge.source) && !existingChildren.includes(edge.target) - ) + const existingByName = new Map() + for (const [id, child] of existingChildren) { + existingByName.set(normalizeName(child.name), [id, child]) + } - // Add new nested blocks - Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => { - // Validate childId is a valid string - if (!isValidKey(childId)) { - logSkippedItem(skippedItems, { - type: 'missing_required_params', - operationType: 'add_nested_node', - blockId: String(childId || 'invalid'), - reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`, - }) - logger.error('Invalid childId detected in nestedNodes', { - parentBlockId: block_id, - childId, - childId_type: typeof childId, - }) - return - } + const matchedExistingIds = new Set() + Object.entries(params.nestedNodes).forEach(([childId, childBlock]: [string, any]) => { if (childBlock.type === 'loop' || childBlock.type === 'parallel') { logSkippedItem(skippedItems, { type: 'nested_subflow_not_allowed', @@ -378,22 +366,108 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon return } - const childBlockState = createBlockFromParams( - childId, - childBlock, - block_id, - validationErrors, - permissionConfig, - skippedItems - ) - modifiedState.blocks[childId] = childBlockState + const incomingName = normalizeName(childBlock.name || '') + const existingMatch = incomingName ? existingByName.get(incomingName) : undefined - // Add connections for child block - if (childBlock.connections) { - addConnectionsAsEdges(modifiedState, childId, childBlock.connections, logger, skippedItems) + if (existingMatch) { + const [existingId, existingBlock] = existingMatch + matchedExistingIds.add(existingId) + + if (childBlock.inputs) { + if (!existingBlock.subBlocks) existingBlock.subBlocks = {} + const childValidation = validateInputsForBlock( + existingBlock.type, + childBlock.inputs, + existingId + ) + validationErrors.push(...childValidation.errors) + + Object.entries(childValidation.validInputs).forEach(([key, value]) => { + if (TRIGGER_RUNTIME_SUBBLOCK_IDS.includes(key)) return + let sanitizedValue = value + if (shouldNormalizeArrayIds(key)) { + sanitizedValue = normalizeArrayWithIds(value) + } + sanitizedValue = normalizeConditionRouterIds(existingId, key, sanitizedValue) + if (key === 'tools' && Array.isArray(value)) { + sanitizedValue = filterDisallowedTools( + normalizeTools(value), + permissionConfig, + existingId, + skippedItems + ) + } + if (key === 'responseFormat' && value) { + sanitizedValue = normalizeResponseFormat(value) + } + + const subBlockDef = getBlock(existingBlock.type)?.subBlocks.find( + (sb: any) => sb.id === key + ) + if (!existingBlock.subBlocks[key]) { + existingBlock.subBlocks[key] = { + id: key, + type: subBlockDef?.type || 'short-input', + value: sanitizedValue, + } + } else { + existingBlock.subBlocks[key].value = sanitizedValue + } + }) + } + + if (childBlock.connections) { + modifiedState.edges = modifiedState.edges.filter( + (edge: any) => edge.source !== existingId + ) + deferredConnections.push({ + blockId: existingId, + connections: childBlock.connections, + }) + } + } else { + if (!isValidKey(childId)) { + logSkippedItem(skippedItems, { + type: 'missing_required_params', + operationType: 'add_nested_node', + blockId: String(childId || 'invalid'), + reason: `Invalid childId "${childId}" in nestedNodes - child block skipped`, + }) + return + } + + const childBlockState = createBlockFromParams( + childId, + childBlock, + block_id, + validationErrors, + permissionConfig, + skippedItems + ) + modifiedState.blocks[childId] = childBlockState + + if (childBlock.connections) { + deferredConnections.push({ + blockId: childId, + connections: childBlock.connections, + }) + } } }) + const removedIds = new Set() + for (const [existingId] of existingChildren) { + if (!matchedExistingIds.has(existingId)) { + delete modifiedState.blocks[existingId] + removedIds.add(existingId) + } + } + if (removedIds.size > 0) { + modifiedState.edges = modifiedState.edges.filter( + (edge: any) => !removedIds.has(edge.source) && !removedIds.has(edge.target) + ) + } + // Update loop/parallel configuration based on type (strict validation) if (block.type === 'loop') { block.data = block.data || {} @@ -446,47 +520,13 @@ export function handleEditOperation(op: EditWorkflowOperation, ctx: OperationCon } } - // Handle connections update (convert to edges) + // Defer connections to pass 2 so all blocks exist before edges are created if (params?.connections) { modifiedState.edges = modifiedState.edges.filter((edge: any) => edge.source !== block_id) - Object.entries(params.connections).forEach(([connectionType, targets]) => { - if (targets === null) return - - const mapConnectionTypeToHandle = (type: string): string => { - if (type === 'success') return 'source' - if (type === 'error') return 'error' - return type - } - - const sourceHandle = mapConnectionTypeToHandle(connectionType) - - const addEdgeForTarget = (targetBlock: string, targetHandle?: string) => { - createValidatedEdge( - modifiedState, - block_id, - targetBlock, - sourceHandle, - targetHandle || 'target', - 'edit', - logger, - skippedItems - ) - } - - if (typeof targets === 'string') { - addEdgeForTarget(targets) - } else if (Array.isArray(targets)) { - targets.forEach((target: any) => { - if (typeof target === 'string') { - addEdgeForTarget(target) - } else if (target?.block) { - addEdgeForTarget(target.block, target.handle) - } - }) - } else if (typeof targets === 'object' && (targets as any)?.block) { - addEdgeForTarget((targets as any).block, (targets as any).handle) - } + deferredConnections.push({ + blockId: block_id, + connections: params.connections, }) } @@ -827,12 +867,16 @@ export function handleInsertIntoSubflowOperation( return } - // Moving existing block into subflow - just update parent + // Moving existing block into subflow — update parent and reset position. + // Position must be reset because React Flow uses coordinates relative to + // the parent container; keeping the old absolute position would place the + // block far outside the container's bounds. existingBlock.data = { ...existingBlock.data, parentId: subflowId, extent: 'parent' as const, } + existingBlock.position = { x: 0, y: 0 } // Update inputs if provided (with validation) if (params.inputs) { @@ -853,6 +897,8 @@ export function handleInsertIntoSubflowOperation( sanitizedValue = normalizeArrayWithIds(value) } + sanitizedValue = normalizeConditionRouterIds(block_id, key, sanitizedValue) + // Special handling for tools - normalize and filter disallowed if (key === 'tools' && Array.isArray(value)) { sanitizedValue = filterDisallowedTools( @@ -869,9 +915,10 @@ export function handleInsertIntoSubflowOperation( } if (!existingBlock.subBlocks[key]) { + const subBlockDef = getBlock(existingBlock.type)?.subBlocks.find((sb) => sb.id === key) existingBlock.subBlocks[key] = { id: key, - type: 'short-input', + type: subBlockDef?.type || 'short-input', value: sanitizedValue, } } else { @@ -1006,12 +1053,25 @@ export function handleExtractFromSubflowOperation( }) } - // Remove parent relationship + // Convert from relative (to container) to absolute position so the block + // appears at roughly the same visual location after extraction. This avoids + // needing targeted layout to reposition it — extracted blocks often lose + // their edges to siblings still in the container, making them disconnected + // and causing layout to stack them at layer 0. + const container = modifiedState.blocks[subflowId] + if (container?.position && block.position) { + block.position = { + x: (container.position.x ?? 0) + (block.position.x ?? 0), + y: (container.position.y ?? 0) + (block.position.y ?? 0), + } + } else { + // Fallback to (0,0) which signals to blocksNeedingLayout in index.ts + // that this block requires targeted layout repositioning. + block.position = { x: 0, y: 0 } + } + if (block.data) { block.data.parentId = undefined block.data.extent = undefined } - - // Note: We keep the block and its edges, just remove parent relationship - // The block becomes a root-level block } diff --git a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/types.ts b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/types.ts index 09b766e069..9a118467b1 100644 --- a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/types.ts +++ b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/types.ts @@ -42,6 +42,7 @@ export type SkippedItemType = | 'tool_not_allowed' | 'invalid_edge_target' | 'invalid_edge_source' + | 'invalid_edge_scope' | 'invalid_source_handle' | 'invalid_target_handle' | 'invalid_subblock_field' diff --git a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/validation.test.ts b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/validation.test.ts new file mode 100644 index 0000000000..050c019203 --- /dev/null +++ b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/validation.test.ts @@ -0,0 +1,90 @@ +/** + * @vitest-environment node + */ +import { describe, expect, it, vi } from 'vitest' +import { normalizeConditionRouterIds } from './builders' +import { validateInputsForBlock } from './validation' + +const conditionBlockConfig = { + type: 'condition', + name: 'Condition', + outputs: {}, + subBlocks: [{ id: 'conditions', type: 'condition-input' }], +} + +const routerBlockConfig = { + type: 'router_v2', + name: 'Router', + outputs: {}, + subBlocks: [{ id: 'routes', type: 'router-input' }], +} + +vi.mock('@/blocks/registry', () => ({ + getBlock: (type: string) => + type === 'condition' + ? conditionBlockConfig + : type === 'router_v2' + ? routerBlockConfig + : undefined, +})) + +describe('validateInputsForBlock', () => { + it('accepts condition-input arrays with arbitrary item ids', () => { + const result = validateInputsForBlock( + 'condition', + { + conditions: JSON.stringify([ + { id: 'cond-1-if', title: 'if', value: 'true' }, + { id: 'cond-1-else', title: 'else', value: '' }, + ]), + }, + 'condition-1' + ) + + expect(result.validInputs.conditions).toBeDefined() + expect(result.errors).toHaveLength(0) + }) + + it('rejects non-array condition-input values', () => { + const result = validateInputsForBlock('condition', { conditions: 'not-json' }, 'condition-1') + + expect(result.validInputs.conditions).toBeUndefined() + expect(result.errors).toHaveLength(1) + expect(result.errors[0]?.error).toContain('expected a JSON array') + }) +}) + +describe('normalizeConditionRouterIds', () => { + it('assigns canonical block-scoped ids to condition branches', () => { + const input = JSON.stringify([ + { id: 'whatever', title: 'if', value: 'true' }, + { id: 'anything', title: 'else if', value: 'false' }, + { id: 'doesnt-matter', title: 'else', value: '' }, + ]) + + const result = normalizeConditionRouterIds('block-1', 'conditions', input) + const parsed = JSON.parse(result as string) + + expect(parsed[0].id).toBe('block-1-if') + expect(parsed[1].id).toBe('block-1-else-if-0') + expect(parsed[2].id).toBe('block-1-else') + }) + + it('assigns canonical block-scoped ids to router routes', () => { + const input = [ + { id: 'route-a', title: 'Support', value: 'support query' }, + { id: 'route-b', title: 'Sales', value: 'sales query' }, + ] + + const result = normalizeConditionRouterIds('block-1', 'routes', input) + const arr = result as any[] + + expect(arr[0].id).toBe('block-1-route1') + expect(arr[1].id).toBe('block-1-route2') + }) + + it('passes through non-condition/router keys unchanged', () => { + const input = 'some value' + expect(normalizeConditionRouterIds('block-1', 'code', input)).toBe(input) + }) +}) diff --git a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/validation.ts b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/validation.ts index 18b787ba9b..1847dcbfc4 100644 --- a/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/validation.ts +++ b/apps/sim/lib/copilot/tools/server/workflow/edit-workflow/validation.ts @@ -244,6 +244,35 @@ export function validateValueForSubBlockType( return { valid: true, value } } + case 'condition-input': + case 'router-input': { + const parsedValue = + typeof value === 'string' + ? (() => { + try { + return JSON.parse(value) + } catch { + return null + } + })() + : value + + if (!Array.isArray(parsedValue)) { + return { + valid: false, + error: { + blockId, + blockType, + field: fieldName, + value, + error: `Invalid ${type} value for field "${fieldName}" - expected a JSON array`, + }, + } + } + + return { valid: true, value } + } + case 'tool-input': { // Should be an array of tool objects if (!Array.isArray(value)) { diff --git a/apps/sim/lib/workflows/dynamic-handle-topology.test.ts b/apps/sim/lib/workflows/dynamic-handle-topology.test.ts new file mode 100644 index 0000000000..ca3a87ad8f --- /dev/null +++ b/apps/sim/lib/workflows/dynamic-handle-topology.test.ts @@ -0,0 +1,85 @@ +/** + * @vitest-environment node + */ +import { describe, expect, it } from 'vitest' +import type { BlockState } from '@/stores/workflows/workflow/types' +import { + collectDynamicHandleTopologySignatures, + getChangedDynamicHandleBlockIds, + getConditionRows, + getDynamicHandleTopologySignature, + getRouterRows, +} from './dynamic-handle-topology' + +describe('dynamic handle topology', () => { + it('falls back to canonical condition rows when value is empty', () => { + expect(getConditionRows('condition-1', null)).toEqual([ + { id: 'condition-1-if', title: 'if', value: '' }, + { id: 'condition-1-else', title: 'else', value: '' }, + ]) + }) + + it('falls back to canonical router rows when value is empty', () => { + expect(getRouterRows('router-1', null)).toEqual([{ id: 'router-1-route1', value: '' }]) + }) + + it('builds topology signatures from condition ids', () => { + const block = { + id: 'condition-1', + type: 'condition', + subBlocks: { + conditions: { + id: 'conditions', + type: 'condition-input', + value: JSON.stringify([ + { id: 'condition-1-if', title: 'if', value: 'true' }, + { id: 'condition-1-else', title: 'else', value: '' }, + ]), + }, + }, + } as BlockState + + expect(getDynamicHandleTopologySignature(block)).toBe( + 'condition:condition-1-if|condition-1-else' + ) + }) + + it('detects topology changes only for changed dynamic-handle blocks', () => { + const previous = new Map([ + ['condition-1', 'condition:condition-1-if|condition-1-else'], + ]) + const nextBlocks = { + 'condition-1': { + id: 'condition-1', + type: 'condition', + name: 'Condition 1', + position: { x: 0, y: 0 }, + enabled: true, + subBlocks: { + conditions: { + id: 'conditions', + type: 'condition-input', + value: JSON.stringify([ + { id: 'condition-1-if', title: 'if', value: 'true' }, + { id: 'condition-1-else-if-0', title: 'else if', value: 'false' }, + { id: 'condition-1-else', title: 'else', value: '' }, + ]), + }, + }, + outputs: {}, + }, + 'function-1': { + id: 'function-1', + type: 'function', + name: 'Function 1', + position: { x: 0, y: 0 }, + enabled: true, + subBlocks: {}, + outputs: {}, + }, + } as Record + + const next = collectDynamicHandleTopologySignatures(nextBlocks) + expect(getChangedDynamicHandleBlockIds(previous, next)).toEqual(['condition-1']) + }) +}) diff --git a/apps/sim/lib/workflows/dynamic-handle-topology.ts b/apps/sim/lib/workflows/dynamic-handle-topology.ts new file mode 100644 index 0000000000..91e05b5cd2 --- /dev/null +++ b/apps/sim/lib/workflows/dynamic-handle-topology.ts @@ -0,0 +1,143 @@ +import type { BlockState } from '@/stores/workflows/workflow/types' + +export interface ConditionRow { + id: string + title: string + value: string +} + +export interface RouterRow { + id: string + value: string +} + +function parseStructuredValue(value: unknown): unknown[] | null { + if (typeof value === 'string') { + try { + const parsed = JSON.parse(value) + return Array.isArray(parsed) ? parsed : null + } catch { + return null + } + } + + return Array.isArray(value) ? value : null +} + +export function isDynamicHandleBlockType( + type: string | undefined +): type is 'condition' | 'router_v2' { + return type === 'condition' || type === 'router_v2' +} + +export function getDynamicHandleSubblockId( + blockType: string | undefined +): 'conditions' | 'routes' | null { + if (blockType === 'condition') return 'conditions' + if (blockType === 'router_v2') return 'routes' + return null +} + +export function getDynamicHandleSubblockType( + blockType: string | undefined +): 'condition-input' | 'router-input' | null { + if (blockType === 'condition') return 'condition-input' + if (blockType === 'router_v2') return 'router-input' + return null +} + +export function isDynamicHandleSubblock( + blockType: string | undefined, + subblockId: string +): boolean { + return getDynamicHandleSubblockId(blockType) === subblockId +} + +export function getConditionRows(blockId: string, value: unknown): ConditionRow[] { + const parsed = parseStructuredValue(value) + + if (parsed) { + const rows = parsed.map((item, index) => { + const conditionItem = item as { id?: string; value?: unknown } + const title = index === 0 ? 'if' : index === parsed.length - 1 ? 'else' : 'else if' + return { + id: conditionItem?.id ?? `${blockId}-cond-${index}`, + title, + value: typeof conditionItem?.value === 'string' ? conditionItem.value : '', + } + }) + + if (rows.length > 0) { + return rows + } + } + + return [ + { id: `${blockId}-if`, title: 'if', value: '' }, + { id: `${blockId}-else`, title: 'else', value: '' }, + ] +} + +export function getRouterRows(blockId: string, value: unknown): RouterRow[] { + const parsed = parseStructuredValue(value) + + if (parsed) { + const rows = parsed.map((item, index) => { + const routeItem = item as { id?: string; value?: string } + return { + id: routeItem?.id ?? `${blockId}-route${index + 1}`, + value: routeItem?.value ?? '', + } + }) + + if (rows.length > 0) { + return rows + } + } + + return [{ id: `${blockId}-route1`, value: '' }] +} + +export function getDynamicHandleTopologySignature(block: BlockState): string | null { + if (block.type === 'condition') { + const rows = getConditionRows(block.id, block.subBlocks?.conditions?.value) + return `condition:${rows.map((row) => row.id).join('|')}` + } + + if (block.type === 'router_v2') { + const rows = getRouterRows(block.id, block.subBlocks?.routes?.value) + return `router:${rows.map((row) => row.id).join('|')}` + } + + return null +} + +export function collectDynamicHandleTopologySignatures( + blocks: Record +): Map { + const signatures = new Map() + + for (const [blockId, block] of Object.entries(blocks)) { + const signature = getDynamicHandleTopologySignature(block) + if (signature) { + signatures.set(blockId, signature) + } + } + + return signatures +} + +export function getChangedDynamicHandleBlockIds( + previous: Map, + next: Map +): string[] { + const changedIds: string[] = [] + + for (const [blockId, signature] of next) { + if (previous.get(blockId) !== signature) { + changedIds.push(blockId) + } + } + + return changedIds +} diff --git a/apps/sim/lib/workflows/operations/socket-operations.ts b/apps/sim/lib/workflows/operations/socket-operations.ts index e0abd45051..5aaf99ed99 100644 --- a/apps/sim/lib/workflows/operations/socket-operations.ts +++ b/apps/sim/lib/workflows/operations/socket-operations.ts @@ -2,6 +2,7 @@ import { createLogger } from '@sim/logger' import { client } from '@/lib/auth/auth-client' import { useOperationQueueStore } from '@/stores/operation-queue/store' import type { WorkflowState } from '@/stores/workflows/workflow/types' +import { normalizeWorkflowState } from '@/stores/workflows/workflow/validation' const logger = createLogger('WorkflowSocketOperations') @@ -76,11 +77,21 @@ export async function enqueueReplaceWorkflowState({ state, operationId, }: EnqueueReplaceStateArgs): Promise { + const { state: validatedState, warnings } = normalizeWorkflowState(state) + + if (warnings.length > 0) { + logger.warn('Normalized state before enqueuing replace-state', { + workflowId, + warningCount: warnings.length, + warnings, + }) + } + return enqueueWorkflowOperation({ workflowId, operation: 'replace-state', target: 'workflow', - payload: { state }, + payload: { state: validatedState }, operationId, }) } diff --git a/apps/sim/stores/panel/copilot/store.ts b/apps/sim/stores/panel/copilot/store.ts index bd4dd76e2a..e36d697f7d 100644 --- a/apps/sim/stores/panel/copilot/store.ts +++ b/apps/sim/stores/panel/copilot/store.ts @@ -1854,7 +1854,7 @@ export const useCopilotStore = create()( } // Apply to main workflow store - useWorkflowStore.setState({ + useWorkflowStore.getState().replaceWorkflowState({ blocks: reverted.blocks ?? {}, edges: reverted.edges ?? [], loops: reverted.loops ?? {}, diff --git a/apps/sim/stores/workflow-diff/store.ts b/apps/sim/stores/workflow-diff/store.ts index 339465ec59..de0537237d 100644 --- a/apps/sim/stores/workflow-diff/store.ts +++ b/apps/sim/stores/workflow-diff/store.ts @@ -190,22 +190,6 @@ export const useWorkflowDiffStore = create { - if (!persisted) { - logger.warn('Failed to persist copilot edits (state already applied locally)') - // Don't revert - user can retry or state will sync on next save - } else { - logger.info('Workflow diff persisted to database', { - workflowId: activeWorkflowId, - }) - } - }) - .catch((error) => { - logger.warn('Failed to persist workflow state (non-blocking)', { error }) - }) - // Emit event for undo/redo recording if (!options?.skipRecording) { window.dispatchEvent( diff --git a/apps/sim/stores/workflows/registry/store.ts b/apps/sim/stores/workflows/registry/store.ts index b94b48b10a..dca49b8dda 100644 --- a/apps/sim/stores/workflows/registry/store.ts +++ b/apps/sim/stores/workflows/registry/store.ts @@ -332,7 +332,7 @@ export const useWorkflowRegistry = create()( return } - useWorkflowStore.setState(workflowState) + useWorkflowStore.getState().replaceWorkflowState(workflowState) useSubBlockStore.getState().initializeFromWorkflow(workflowId, workflowState.blocks || {}) if (workflowData?.variables && typeof workflowData.variables === 'object') { @@ -637,7 +637,7 @@ export const useWorkflowRegistry = create()( useSubBlockStore.setState({ workflowValues: originalState.subBlockValues }) if (originalState.workflowStoreState) { - useWorkflowStore.setState(originalState.workflowStoreState) + useWorkflowStore.getState().replaceWorkflowState(originalState.workflowStoreState) logger.info(`Restored workflow store state for workflow ${id}`) } diff --git a/apps/sim/stores/workflows/utils.ts b/apps/sim/stores/workflows/utils.ts index e82bfcd731..eecb95c665 100644 --- a/apps/sim/stores/workflows/utils.ts +++ b/apps/sim/stores/workflows/utils.ts @@ -6,10 +6,10 @@ import { remapConditionBlockIds, remapConditionEdgeHandle } from '@/lib/workflow import { mergeSubblockStateWithValues } from '@/lib/workflows/subblocks' import { buildDefaultCanonicalModes } from '@/lib/workflows/subblocks/visibility' import { hasTriggerCapability } from '@/lib/workflows/triggers/trigger-utils' -import { TriggerUtils } from '@/lib/workflows/triggers/triggers' import { getBlock } from '@/blocks' -import { isAnnotationOnlyBlock, normalizeName } from '@/executor/constants' +import { normalizeName } from '@/executor/constants' import { useSubBlockStore } from '@/stores/workflows/subblock/store' +import { validateEdges } from '@/stores/workflows/workflow/edge-validation' import type { BlockState, Loop, @@ -23,30 +23,11 @@ import { TRIGGER_RUNTIME_SUBBLOCK_IDS } from '@/triggers/constants' /** Threshold to detect viewport-based offsets vs small duplicate offsets */ const LARGE_OFFSET_THRESHOLD = 300 -/** - * Checks if an edge is valid (source and target exist, not annotation-only, target is not a trigger) - */ -function isValidEdge( - edge: Edge, - blocks: Record -): boolean { - const sourceBlock = blocks[edge.source] - const targetBlock = blocks[edge.target] - if (!sourceBlock || !targetBlock) return false - if (isAnnotationOnlyBlock(sourceBlock.type)) return false - if (isAnnotationOnlyBlock(targetBlock.type)) return false - if (TriggerUtils.isTriggerBlock(targetBlock)) return false - return true -} - /** * Filters edges to only include valid ones (target exists and is not a trigger block) */ -export function filterValidEdges( - edges: Edge[], - blocks: Record -): Edge[] { - return edges.filter((edge) => isValidEdge(edge, blocks)) +export function filterValidEdges(edges: Edge[], blocks: Record): Edge[] { + return validateEdges(edges, blocks).valid } export function filterNewEdges(edgesToAdd: Edge[], currentEdges: Edge[]): Edge[] { diff --git a/apps/sim/stores/workflows/workflow/edge-validation.ts b/apps/sim/stores/workflows/workflow/edge-validation.ts new file mode 100644 index 0000000000..44b830fe40 --- /dev/null +++ b/apps/sim/stores/workflows/workflow/edge-validation.ts @@ -0,0 +1,86 @@ +import type { Edge } from 'reactflow' +import { TriggerUtils } from '@/lib/workflows/triggers/triggers' +import { isAnnotationOnlyBlock } from '@/executor/constants' +import type { BlockState } from '@/stores/workflows/workflow/types' + +export interface DroppedEdge { + edge: Edge + reason: string +} + +export interface EdgeValidationResult { + valid: Edge[] + dropped: DroppedEdge[] +} + +function isContainerBlock(block: BlockState | undefined): boolean { + return block?.type === 'loop' || block?.type === 'parallel' +} + +function getParentId(block: BlockState | undefined): string | null { + return block?.data?.parentId ?? null +} + +function getScopeDropReason(edge: Edge, blocks: Record): string | null { + const sourceBlock = blocks[edge.source] + const targetBlock = blocks[edge.target] + + if (!sourceBlock || !targetBlock) { + return 'edge references a missing block' + } + + const sourceParent = getParentId(sourceBlock) + const targetParent = getParentId(targetBlock) + + if (sourceParent === targetParent) { + return null + } + + if (targetParent === edge.source && isContainerBlock(sourceBlock)) { + return null + } + + if (sourceParent === edge.target && isContainerBlock(targetBlock)) { + return null + } + + return `blocks are in different scopes (${sourceParent ?? 'root'} -> ${targetParent ?? 'root'})` +} + +export function validateEdges( + edges: Edge[], + blocks: Record +): EdgeValidationResult { + const valid: Edge[] = [] + const dropped: DroppedEdge[] = [] + + for (const edge of edges) { + const sourceBlock = blocks[edge.source] + const targetBlock = blocks[edge.target] + + if (!sourceBlock || !targetBlock) { + dropped.push({ edge, reason: 'edge references a missing block' }) + continue + } + + if (isAnnotationOnlyBlock(sourceBlock.type) || isAnnotationOnlyBlock(targetBlock.type)) { + dropped.push({ edge, reason: 'edge references an annotation-only block' }) + continue + } + + if (TriggerUtils.isTriggerBlock(targetBlock)) { + dropped.push({ edge, reason: 'trigger blocks cannot be edge targets' }) + continue + } + + const scopeDropReason = getScopeDropReason(edge, blocks) + if (scopeDropReason) { + dropped.push({ edge, reason: scopeDropReason }) + continue + } + + valid.push(edge) + } + + return { valid, dropped } +} diff --git a/apps/sim/stores/workflows/workflow/store.test.ts b/apps/sim/stores/workflows/workflow/store.test.ts index d4814dcfd2..dc24da784e 100644 --- a/apps/sim/stores/workflows/workflow/store.test.ts +++ b/apps/sim/stores/workflows/workflow/store.test.ts @@ -792,6 +792,40 @@ describe('workflow store', () => { }) }) + describe('syncDynamicHandleSubblockValue', () => { + it('should sync condition topology values into the workflow store', () => { + addBlock('condition-1', 'condition', 'Condition 1', { x: 0, y: 0 }) + + useWorkflowStore.getState().syncDynamicHandleSubblockValue( + 'condition-1', + 'conditions', + JSON.stringify([ + { id: 'condition-1-if', title: 'if', value: 'true' }, + { id: 'condition-1-else', title: 'else', value: '' }, + ]) + ) + + const conditionBlock = useWorkflowStore.getState().blocks['condition-1'] + expect(conditionBlock.subBlocks.conditions?.type).toBe('condition-input') + expect(conditionBlock.subBlocks.conditions?.value).toBe( + JSON.stringify([ + { id: 'condition-1-if', title: 'if', value: 'true' }, + { id: 'condition-1-else', title: 'else', value: '' }, + ]) + ) + }) + + it('should ignore non-topology subblock updates', () => { + addBlock('function-1', 'function', 'Function 1', { x: 0, y: 0 }) + const beforeBlock = useWorkflowStore.getState().blocks['function-1'] + + useWorkflowStore.getState().syncDynamicHandleSubblockValue('function-1', 'code', 'return 1') + + const afterBlock = useWorkflowStore.getState().blocks['function-1'] + expect(afterBlock).toEqual(beforeBlock) + }) + }) + describe('getWorkflowState', () => { it('should return current workflow state', () => { const { getWorkflowState } = useWorkflowStore.getState() diff --git a/apps/sim/stores/workflows/workflow/store.ts b/apps/sim/stores/workflows/workflow/store.ts index bc97773d5c..10a580c36f 100644 --- a/apps/sim/stores/workflows/workflow/store.ts +++ b/apps/sim/stores/workflows/workflow/store.ts @@ -3,6 +3,10 @@ import type { Edge } from 'reactflow' import { create } from 'zustand' import { devtools } from 'zustand/middleware' import { DEFAULT_DUPLICATE_OFFSET } from '@/lib/workflows/autolayout/constants' +import { + getDynamicHandleSubblockType, + isDynamicHandleSubblock, +} from '@/lib/workflows/dynamic-handle-topology' import type { SubBlockConfig } from '@/blocks/types' import { normalizeName, RESERVED_BLOCK_NAMES } from '@/executor/constants' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' @@ -27,6 +31,7 @@ import { isBlockProtected, wouldCreateCycle, } from '@/stores/workflows/workflow/utils' +import { normalizeWorkflowState } from '@/stores/workflows/workflow/validation' const logger = createLogger('WorkflowStore') @@ -511,15 +516,25 @@ export const useWorkflowStore = create()( options?: { updateLastSaved?: boolean } ) => { set((state) => { - const nextBlocks = workflowState.blocks || {} - const nextEdges = filterValidEdges(workflowState.edges || [], nextBlocks) + const normalization = normalizeWorkflowState(workflowState) + const nextState = normalization.state + + if (normalization.warnings.length > 0) { + logger.warn('Normalized workflow state during replaceWorkflowState', { + warningCount: normalization.warnings.length, + warnings: normalization.warnings, + }) + } + + const nextBlocks = nextState.blocks || {} + const nextEdges = nextState.edges || [] const nextLoops = - Object.keys(workflowState.loops || {}).length > 0 - ? workflowState.loops + Object.keys(nextState.loops || {}).length > 0 + ? nextState.loops : generateLoopBlocks(nextBlocks) const nextParallels = - Object.keys(workflowState.parallels || {}).length > 0 - ? workflowState.parallels + Object.keys(nextState.parallels || {}).length > 0 + ? nextState.parallels : generateParallelBlocks(nextBlocks) return { @@ -528,15 +543,15 @@ export const useWorkflowStore = create()( edges: nextEdges, loops: nextLoops, parallels: nextParallels, - deploymentStatuses: workflowState.deploymentStatuses || state.deploymentStatuses, + deploymentStatuses: nextState.deploymentStatuses || state.deploymentStatuses, needsRedeployment: - workflowState.needsRedeployment !== undefined - ? workflowState.needsRedeployment + nextState.needsRedeployment !== undefined + ? nextState.needsRedeployment : state.needsRedeployment, lastSaved: options?.updateLastSaved === true ? Date.now() - : (workflowState.lastSaved ?? state.lastSaved), + : (nextState.lastSaved ?? state.lastSaved), } }) }, @@ -856,6 +871,48 @@ export const useWorkflowStore = create()( get().updateLastSaved() }, + syncDynamicHandleSubblockValue: (blockId: string, subblockId: string, value: unknown) => { + set((state) => { + const block = state.blocks[blockId] + if (!block || !isDynamicHandleSubblock(block.type, subblockId)) { + return state + } + + const expectedType = getDynamicHandleSubblockType(block.type) + if (!expectedType) { + return state + } + + const currentSubBlock = block.subBlocks?.[subblockId] + const currentValue = currentSubBlock?.value + const valuesEqual = + typeof currentValue === 'object' || typeof value === 'object' + ? JSON.stringify(currentValue) === JSON.stringify(value) + : currentValue === value + + if (valuesEqual && currentSubBlock?.type === expectedType) { + return state + } + + return { + blocks: { + ...state.blocks, + [blockId]: { + ...block, + subBlocks: { + ...block.subBlocks, + [subblockId]: { + id: subblockId, + type: expectedType, + value: value as SubBlockState['value'], + }, + }, + }, + }, + } + }) + }, + setBlockTriggerMode: (id: string, triggerMode: boolean) => { set((state) => ({ blocks: { @@ -1055,36 +1112,20 @@ export const useWorkflowStore = create()( return } - // Preserving the workflow-specific deployment status if it exists const deploymentStatus = useWorkflowRegistry .getState() .getWorkflowDeploymentStatus(activeWorkflowId) - const newState = { - blocks: deployedState.blocks, - edges: filterValidEdges(deployedState.edges ?? [], deployedState.blocks), - loops: deployedState.loops || {}, - parallels: deployedState.parallels || {}, + get().replaceWorkflowState({ + ...deployedState, needsRedeployment: false, - // Keep existing deployment statuses and update for the active workflow if needed deploymentStatuses: { ...get().deploymentStatuses, - ...(deploymentStatus - ? { - [activeWorkflowId]: deploymentStatus, - } - : {}), + ...(deploymentStatus ? { [activeWorkflowId]: deploymentStatus } : {}), }, - } - - // Update the main workflow state - set(newState) + }) - // Initialize subblock store with values from deployed state - const subBlockStore = useSubBlockStore.getState() const values: Record> = {} - - // Extract subblock values from deployed blocks Object.entries(deployedState.blocks).forEach(([blockId, block]) => { values[blockId] = {} Object.entries(block.subBlocks || {}).forEach(([subBlockId, subBlock]) => { @@ -1092,10 +1133,9 @@ export const useWorkflowStore = create()( }) }) - // Update subblock store with deployed values useSubBlockStore.setState({ workflowValues: { - ...subBlockStore.workflowValues, + ...useSubBlockStore.getState().workflowValues, [activeWorkflowId]: values, }, }) diff --git a/apps/sim/stores/workflows/workflow/types.ts b/apps/sim/stores/workflows/workflow/types.ts index ebf7734dac..edbc606188 100644 --- a/apps/sim/stores/workflows/workflow/types.ts +++ b/apps/sim/stores/workflows/workflow/types.ts @@ -212,6 +212,7 @@ export interface WorkflowActions { } setBlockAdvancedMode: (id: string, advancedMode: boolean) => void setBlockCanonicalMode: (id: string, canonicalId: string, mode: 'basic' | 'advanced') => void + syncDynamicHandleSubblockValue: (blockId: string, subblockId: string, value: unknown) => void setBlockTriggerMode: (id: string, triggerMode: boolean) => void updateBlockLayoutMetrics: (id: string, dimensions: { width: number; height: number }) => void triggerUpdate: () => void diff --git a/apps/sim/stores/workflows/workflow/validation.ts b/apps/sim/stores/workflows/workflow/validation.ts new file mode 100644 index 0000000000..bdfc3b5064 --- /dev/null +++ b/apps/sim/stores/workflows/workflow/validation.ts @@ -0,0 +1,67 @@ +import { validateEdges } from '@/stores/workflows/workflow/edge-validation' +import type { WorkflowState } from '@/stores/workflows/workflow/types' +import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/workflow/utils' + +export interface NormalizationResult { + state: WorkflowState + warnings: string[] +} + +function isContainerType(type: string | undefined): boolean { + return type === 'loop' || type === 'parallel' +} + +export function normalizeWorkflowState(workflowState: WorkflowState): NormalizationResult { + const warnings: string[] = [] + const blocks = structuredClone(workflowState.blocks || {}) + + for (const [blockId, block] of Object.entries(blocks)) { + if (!block?.type || !block?.name) { + warnings.push(`Dropped invalid block "${blockId}" because it is missing type or name`) + delete blocks[blockId] + } + } + + for (const [blockId, block] of Object.entries(blocks)) { + const parentId = block.data?.parentId + if (!parentId) { + continue + } + + const parentBlock = blocks[parentId] + const parentIsValidContainer = Boolean(parentBlock && isContainerType(parentBlock.type)) + + if (!parentIsValidContainer || parentId === blockId) { + warnings.push(`Cleared invalid parentId for block "${blockId}"`) + block.data = { + ...(block.data || {}), + parentId: undefined, + extent: undefined, + } + continue + } + + if (block.data?.extent !== 'parent') { + block.data = { + ...(block.data || {}), + extent: 'parent', + } + } + } + + const edgeValidation = validateEdges(workflowState.edges || [], blocks) + warnings.push( + ...edgeValidation.dropped.map(({ edge, reason }) => `Dropped edge "${edge.id}": ${reason}`) + ) + + return { + state: { + ...workflowState, + blocks, + edges: edgeValidation.valid, + loops: generateLoopBlocks(blocks), + parallels: generateParallelBlocks(blocks), + }, + warnings, + } +} From 5b9f0d73c2c966236e75ac5a9495a776965995b5 Mon Sep 17 00:00:00 2001 From: Siddharth Ganesan <33737564+Sg312@users.noreply.github.com> Date: Fri, 13 Mar 2026 21:02:08 -0700 Subject: [PATCH 4/6] feat(mothership): mothership (#3411) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix lint * improvement(sidebar): loading * fix(sidebar): use client-generated UUIDs for stable optimistic updates (#3439) * fix(sidebar): use client-generated UUIDs for stable optimistic updates * fix(folders): use zod schema validation for folder create API Replace inline UUID regex with zod schema validation for consistency with other API routes. Update test expectations accordingly. * fix(sidebar): add client UUID to single workflow duplicate hook The useDuplicateWorkflow hook was missing newId: crypto.randomUUID(), causing the same temp-ID-swap issue for single workflow duplication from the context menu. * fix(folders): avoid unnecessary Set re-creation in replaceOptimisticEntry Only create new expandedFolders/selectedFolders Sets when tempId differs from data.id. In the common happy path (client-generated UUIDs), this avoids unnecessary Zustand state reference changes and re-renders. * Mothership block logs * Fix mothership block logs * improvement(knowledge): make connector-synced document chunks readonly (#3440) * improvement(knowledge): make connector-synced document chunks readonly * fix(knowledge): enforce connector chunk readonly on server side * fix(knowledge): disable toggle and delete actions for connector-synced chunks * Job exeuction logs * Job logs * fix(connectors): remove unverifiable requiredScopes for Linear connector * fix(connectors): remove legacy requiredScopes from Jira and Confluence connectors Jira and Confluence OAuth tokens don't return legacy scope names like read:jira-work or read:confluence-content.all, causing the 'Update access' banner to always appear. Set requiredScopes to empty array like Linear. * feat(tasks): add rename to task context menu (#3442) * Revert "fix(connectors): remove legacy requiredScopes from Jira and Confluence connectors" This reverts commit a0be3ff414bebe69e85eabe46334b0b66a2d24b9. * fix(connectors): restore Linear connector requiredScopes Linear OAuth does return scopes in the token response. The previous fix of emptying requiredScopes was based on an incorrect assumption. Restoring requiredScopes: ['read'] as it should work correctly. Co-Authored-By: Claude Opus 4.6 * fix(knowledge): pass workspaceId to useOAuthCredentials in connector card The ConnectorCard was calling useOAuthCredentials(providerId) without a workspaceId, causing the credentials API to return an empty array. This meant the credential lookup always failed, getMissingRequiredScopes received undefined, and the "Update access" banner always appeared. Co-Authored-By: Claude Opus 4.6 * Fix oauth link callback from mothership task * feat(connectors): add Fireflies connector and API key auth support (#3448) * feat(connectors): add Fireflies connector and API key auth support Extend the connector system to support both OAuth and API key authentication via a discriminated union (`ConnectorAuthConfig`). Add Fireflies as the first API key connector, syncing meeting transcripts via the Fireflies GraphQL API. Schema changes: - Make `credentialId` nullable (null for API key connectors) - Add `encryptedApiKey` column (AES-256-GCM encrypted, null for OAuth) This eliminates the `'_apikey_'` sentinel and inline `sourceConfig._encryptedApiKey` patterns, giving each auth mode its own clean column. Co-Authored-By: Claude Opus 4.6 * fix(fireflies): allow 0 for maxTranscripts (means unlimited) Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Claude Opus 4.6 * Add context * fix(fireflies): correct types from live API validation (#3450) * fix(fireflies): correct types from live API validation - speakers.id is number, not string (API returns 0, 1, 2...) - summary.action_items is a single string, not string[] - Update formatTranscriptContent to handle action_items as string Co-Authored-By: Claude Opus 4.6 * fix(fireflies): correct tool types from live API validation - FirefliesSpeaker.id: string -> number - FirefliesSentence.speaker_id: string -> number - FirefliesSpeakerAnalytics.speaker_id: string -> number - FirefliesSummary.action_items: string[] -> string - FirefliesSummary.outline: string[] -> string - FirefliesSummary.shorthand_bullet: string[] -> string - FirefliesSummary.bullet_gist: string[] -> string - FirefliesSummary.topics_discussed: string[] -> string Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Claude Opus 4.6 * feat(knowledge): add connector tools and expand document metadata (#3452) * feat(knowledge): add connector tools and expand document metadata * fix(knowledge): address PR review feedback on new tools * fix(knowledge): remove unused params from get_document transform * refactor, improvement * fix: correct knowledge block canonical pair pattern and subblock migration - Rename manualDocumentId to documentId (advanced subblock ID should match canonicalParamId, consistent with airtable/gmail patterns) - Fix documentSelector.dependsOn to reference knowledgeBaseSelector (basic depends on basic, not advanced) - Remove unnecessary documentId migration (ID unchanged from main) Co-Authored-By: Claude Opus 4.6 * lint * fix: resolve post-merge test and lint failures - airtable: sync tableSelector condition with tableId (add getSchema) - backfillCanonicalModes test: add documentId mode to prevent false backfill - schedule PUT test: use invalid action string now that disable is valid - schedule execute tests: add ne mock, sourceType field, use mockReturnValueOnce for two db.update calls - knowledge tools: fix biome formatting (single-line arrow functions) Co-Authored-By: Claude Opus 4.6 * Fixes * Fixes * Clean vfs * Fix * Fix lint * fix(connectors): add rate limiting, concurrency controls, and bug fixes (#3457) * fix(connectors): add rate limiting, concurrency controls, and bug fixes across knowledge connectors - Add Retry-After header support to fetchWithRetry for all 18 connectors - Batch concurrent API calls (concurrency 5) in Dropbox, Google Docs, Google Drive, OneDrive, SharePoint - Batch concurrent API calls (concurrency 3) in Notion to match 3 req/s limit - Cache GitHub tree in syncContext to avoid re-fetching on every pagination page - Batch GitHub blob fetches with concurrency 5 - Fix GitHub base64 decoding: atob() → Buffer.from() for UTF-8 safety - Fix HubSpot OAuth scope: 'tickets' → 'crm.objects.tickets.read' (v3 API) - Fix HubSpot syncContext key: totalFetched → totalDocsFetched for consistency - Add jitter to nextSyncAt (10% of interval, capped at 5min) to prevent thundering herd - Fix Date consistency in connector DELETE route * fix(connectors): address PR review feedback on retry and SharePoint batching - Remove 120s cap on Retry-After — pass all values through to retry loop - Add maxDelayMs guard: if Retry-After exceeds maxDelayMs, throw immediately instead of hammering with shorter intervals (addresses validate timeout concern) - Add early exit in SharePoint batch loop when maxFiles limit is reached to avoid unnecessary API calls * fix(connectors): cap Retry-After at maxDelayMs instead of aborting Match Google Cloud SDK behavior: when Retry-After exceeds maxDelayMs, cap the wait to maxDelayMs and log a warning, rather than throwing immediately. This ensures retries are bounded in duration while still respecting server guidance within the configured limit. * fix(connectors): add early-exit guard to Dropbox, Google Docs, OneDrive batch loops Match the SharePoint fix — skip remaining batches once maxFiles limit is reached to avoid unnecessary API calls. * improvement(turbo): align turborepo config with best practices (#3458) * improvement(turbo): align turborepo config with best practices * fix(turbo): address PR review feedback * fix(turbo): add lint:check task for read-only lint+format CI checks lint:check previously delegated to format:check which only checked formatting. Now it runs biome check (no --write) which enforces both lint rules and formatting without mutating files. * upgrade turbo * improvement(perf): apply react and js performance optimizations across codebase (#3459) * improvement(perf): apply react and js performance optimizations across codebase - Parallelize independent DB queries with Promise.all in API routes - Defer PostHog and OneDollarStats via dynamic import() to reduce bundle size - Use functional setState in countdown timers to prevent stale closures - Replace O(n*m) .filter().find() with Set-based O(n) lookups in undo-redo - Use .toSorted() instead of .sort() for immutable state operations - Use lazy initializers for useState(new Set()) across 20 components - Remove useMemo wrapping trivially cheap expressions (typeof, ternary, template strings) - Add passive: true to scroll event listener * fix(perf): address PR review feedback - Extract IIFE Set patterns to named consts for readability in use-undo-redo - Hoist Set construction above loops in BATCH_UPDATE_PARENT cases - Add .catch() error handler to PostHog dynamic import - Convert session-provider posthog import to dynamic import() to complete bundle split * fix(analytics): add .catch() to onedollarstats dynamic import * improvement(resource): tables, files * improvement(resources): all outer page structure complete * refactor(queries): comprehensive TanStack Query best practices audit (#3460) * refactor: comprehensive TanStack Query best practices audit and migration - Add AbortSignal forwarding to all 41 queryFn implementations for proper request cancellation - Migrate manual fetch patterns to useMutation hooks (useResetPassword, useRedeemReferralCode, usePurchaseCredits, useImportWorkflow, useOpenBillingPortal, useAllowedMcpDomains) - Migrate standalone hooks to TanStack Query (use-next-available-slot, use-mcp-server-test, use-webhook-management, use-referral-attribution) - Fix query key factories: add missing `all` keys, replace inline keys with factory methods - Fix optimistic mutations: use onSettled instead of onSuccess for cache reconciliation - Replace overly broad cache invalidations with targeted key invalidation - Remove keepPreviousData from static-key queries where it provides no benefit - Add staleTime to queries missing explicit cache duration - Fix `any` type in UpdateSettingParams with proper GeneralSettings typing - Remove dead code: loadingWebhooks/checkedWebhooks from subblock store, unused helper functions - Update settings components (general, debug, referral-code, credit-balance, subscription, mcp) to use mutation state instead of manual useState for loading/error/success Co-Authored-By: Claude Opus 4.6 * fix: remove unstable mutation object from useCallback deps openBillingPortal mutation object is not referentially stable, but .mutate() is stable in TanStack Query v5. Remove from deps to prevent unnecessary handleBadgeClick recreations. Co-Authored-By: Claude Opus 4.6 * fix: add missing byWorkflows invalidation to useUpdateTemplate The onSettled handler was missing the byWorkflows() invalidation that was dropped during the onSuccess→onSettled migration. Without this, the deploy modal (useTemplateByWorkflow) would show stale data after a template update. Co-Authored-By: Claude Opus 4.6 * docs: add TanStack Query best practices to CLAUDE.md and cursor rules Add comprehensive React Query best practices covering: - Hierarchical query key factories with intermediate plural keys - AbortSignal forwarding in all queryFn implementations - Targeted cache invalidation over broad .all invalidation - onSettled for optimistic mutation cache reconciliation - keepPreviousData only on variable-key queries - No manual fetch in components rule - Stable mutation references in useCallback deps Co-Authored-By: Claude Opus 4.6 * fix: address PR review feedback - Fix syncedRef regression in use-webhook-management: only set syncedRef.current=true when webhook is found, so re-sync works after webhook creation (e.g., post-deploy) - Remove redundant detail(id) invalidation from useUpdateTemplate onSettled since onSuccess already populates cache via setQueryData Co-Authored-By: Claude Opus 4.6 * fix: address second round of PR review feedback - Reset syncedRef when blockId changes in use-webhook-management so component reuse with a different block syncs the new webhook - Add response.ok check in postAttribution so non-2xx responses throw and trigger TanStack Query retry logic Co-Authored-By: Claude Opus 4.6 * fix: use lists() prefix invalidation in useCreateWorkspaceCredential Use workspaceCredentialKeys.lists() instead of .list(workspaceId) so filtered list queries are also invalidated on credential creation, matching the pattern used by update and delete mutations. Co-Authored-By: Claude Opus 4.6 * fix: address third round of PR review feedback - Add nullish coalescing fallback for bonusAmount in referral-code to prevent rendering "undefined" when server omits the field - Reset syncedRef when queryEnabled becomes false so webhook data re-syncs when the query is re-enabled without component remount Co-Authored-By: Claude Opus 4.6 * fix: address fourth round of PR review feedback - Add AbortSignal to testMcpServerConnection for consistency - Wrap handleTestConnection in try/catch for mutateAsync error handling - Replace broad subscriptionKeys.all with targeted users()/usage() invalidation - Add intermediate users() key to subscription key factory for prefix matching - Add comment documenting syncedRef null-webhook behavior - Fix api-keys.ts silent error swallowing on non-ok responses - Move deployments.ts cache invalidation from onSuccess to onSettled Co-Authored-By: Claude Opus 4.6 * fix: achieve full TanStack Query best practices compliance - Add intermediate plural keys to api-keys, deployments, and schedules key factories for prefix-based invalidation support - Change copilot-keys from refetchQueries to invalidateQueries - Add signal parameter to organization.ts fetch functions (better-auth client does not support AbortSignal, documented accordingly) - Move useCreateMcpServer invalidation from onSuccess to onSettled Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Claude Opus 4.6 * ran lint * Fix tables row count * Update mothership to match copilot in logs * improvement(resource): layout * fix(knowledge): compute KB tokenCount from documents instead of stale column (#3463) The knowledge_base.token_count column was initialized to 0 and never updated. Replace with COALESCE(SUM(document.token_count), 0) in all read queries, which already JOIN on documents with GROUP BY. * improvement(resources): layout and items * feat(knowledge): add v1 knowledge base API, Obsidian/Evernote connectors, and docs (#3465) * feat(knowledge): add v1 knowledge base API, Obsidian/Evernote connectors, and docs - Add v1 REST API for knowledge bases (CRUD, document management, vector search) - Add Obsidian and Evernote knowledge base connectors - Add file type validation to v1 file and document upload endpoints - Update OpenAPI spec with knowledge base endpoints and schemas - Add connectors documentation page - Apply query hook formatting improvements * fix(knowledge): address PR review feedback - Remove validateFileType from v1/files route (general file upload, not document-only) - Reject tag filters when searching multiple KBs (tag defs are KB-specific) - Cache tag definitions to avoid duplicate getDocumentTagDefinitions call - Fix Obsidian connector silent empty results when syncContext is undefined * improvement(connectors): add syncContext to getDocument, clean up caching - Update docs to say 20+ connectors - Add syncContext param to ConnectorConfig.getDocument interface - Use syncContext in Evernote getDocument to cache tag/notebook maps - Replace index-based cache check with Map keyed by KB ID in search route * fix(knowledge): address second round of PR review feedback - Fix Zod .default('text') overriding tag definition's actual fieldType - Fix encodeURIComponent breaking multi-level folder paths in Obsidian - Use 413 instead of 400 for file-too-large in document upload - Add knowledge-bases to API reference docs navigation Co-Authored-By: Claude Opus 4.6 * fix(knowledge): prevent cross-workspace KB access in search Filter accessible KBs by matching workspaceId from the request, preventing users from querying KBs in other workspaces they have access to but didn't specify. Co-Authored-By: Claude Opus 4.6 * fix(knowledge): audit resourceId, SSRF protection, recursion depth limit - Fix recordAudit using knowledgeBaseId instead of newDocument.id - Add SSRF validation to Obsidian connector (reject private/loopback URLs) - Add max recursion depth (20) to listVaultFiles Co-Authored-By: Claude Opus 4.6 * fix(obsidian): remove SSRF check that blocks localhost usage The Obsidian connector is designed to connect to the Local REST API plugin running on localhost (127.0.0.1:27124). The SSRF check was incorrectly blocking this primary use case. Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Claude Opus 4.6 * improvement(resources): segmented API * fix(execution): ensure background tasks await post-execution DB status updates (#3466) The fire-and-forget IIFE in execution-core.ts for post-execution logging could be abandoned when trigger.dev tasks exit, leaving executions permanently stuck in "running" status. Store the promise on LoggingSession so background tasks can optionally await it before returning. * improvement(resource): sorting and icons * fix(resource): sorting * improvement(settings): fix mcp modal, add option to edit JSON and add Sim as an MCP client (#3467) * improvement(settings): fix mcp modal, add option to edit JSON and add Sim as an MCP client * added docs link in sidebar * ack comments * ack comments * fixed error msg * feat(mothership): billing (#3464) * Billing update * more billing improvements * credits UI * credit purchase safety * progress * ui improvements * fix cancel sub * fix types * fix daily refresh for teams * make max features differentiated * address bugbot comments * address greptile comments * revert isHosted * address more comments * fix org refresh bar * fix ui rounding * fix minor rounding * fix upgrade issue for legacy plans * fix formatPlanName * fix email dispay names * fix legacy team reference bugs * referral bonus in credits * fix org upgrade bug * improve logs * respect toggle for paid users * fix landing page pro features and usage limit checks * fixed query and usage * add unit test * address more comments * enterprise guard * fix limits bug * pass period start/end for overage * fix(sidebar): restore drag-and-drop for workflows and folders (#3470) * fix(sidebar): restore drag-and-drop for workflows and folders Made-with: Cursor * update docs, unrelated * improvement(tables): consolidation * feat(schedules): add schedule creator modal for standalone jobs Add modal to create standalone scheduled jobs from the Schedules page. Includes POST API endpoint, useCreateSchedule mutation hook, and full modal with schedule type selection, timezone, lifecycle, and live preview. Co-Authored-By: Claude Opus 4.6 * feat(schedules): add edit support with context menu for standalone jobs * style(schedules): apply linter formatting * improvement: tables, favicon * feat(files): inline file viewer with text editing (#3475) * feat(files): add inline file viewer with text editing and create file modal Add file preview/edit functionality to the workspace files page. Text files (md, json, txt, yaml, etc.) open in an editable textarea with Cmd/Ctrl+S save. PDFs render in an iframe. New file button creates empty .md files via a modal. Uses ResourceHeader breadcrumbs and ResourceOptionsBar for save/download/delete. Co-Authored-By: Claude Opus 4.6 * improvement(files): add UX polish, PR review fixes, and context menu - Add unsaved changes guard modal (matching credentials manager pattern) - Add delete confirmation modal for both viewer and context menu - Add save status feedback (Save → Saving... → Saved) - Add right-click context menu with Open, Download, Delete actions - Add 50MB file size limit on content update API - Add storage quota check before content updates - Add response.ok guard on download to prevent corrupt files - Add skeleton loading for pending file selection (prevents flicker) - Fix updateContent in handleSave dependency array Co-Authored-By: Claude Opus 4.6 * fix(files): propagate save errors and remove redundant sizeDiff - Remove try/catch in TextEditor.handleSave so errors propagate to parent, which correctly shows save failure status - Remove redundant inner sizeDiff declaration that shadowed outer scope Co-Authored-By: Claude Opus 4.6 * fix(files): remove unused textareaRef Co-Authored-By: Claude Opus 4.6 * fix(files): move Cmd+S to parent, add save error feedback, hide save for non-text files - Move Cmd+S keyboard handler from TextEditor to Files so it goes through the parent handleSave with proper status management - Add 'error' save status with red "Save failed" label that auto-resets - Only show Save button for text-editable file types (md, txt, json, etc.) Co-Authored-By: Claude Opus 4.6 * improvement(files): add save tooltip, deduplicate text-editable extensions - Add Tooltip on Save button showing Cmd+S / Ctrl+S shortcut - Export TEXT_EDITABLE_EXTENSIONS from file-viewer and reuse in files.tsx instead of duplicating the list inline Co-Authored-By: Claude Opus 4.6 * refactor: extract isMacPlatform to shared utility Move isMacPlatform() from global-commands-provider.tsx to lib/core/utils/platform.ts so it can be reused by files.tsx tooltip without duplication. Co-Authored-By: Claude Opus 4.6 * refactor(files): deduplicate delete modal, use shared formatFileSize - Extract DeleteConfirmModal component to eliminate duplicate modal markup between viewer and list modes - Replace local formatFileSize with shared utility from file-utils.ts Co-Authored-By: Claude Opus 4.6 * fix(files): fix a11y label lint error and remove mutation object from useCallback deps Co-Authored-By: Claude Opus 4.6 * fix(files): add isDirty guard on handleSave, return proper HTTP status codes Prevents "Saving → Saved" flash when pressing Cmd+S with no changes. Returns 404 for file-not-found and 402 for quota-exceeded instead of 500. Co-Authored-By: Claude Opus 4.6 * fix(files): reset isDirty/saveStatus on delete and discard, remove deprecated navigator.platform - Clear isDirty and saveStatus when deleting the currently-viewed file to prevent spurious beforeunload prompts - Reset saveStatus on discard to prevent stale "Save failed" when opening another file - Remove deprecated navigator.platform, userAgent fallback covers all cases Co-Authored-By: Claude Opus 4.6 * fix(files): prevent concurrent saves on rapid Cmd+S, add YAML MIME types - Add saveStatus === 'saving' guard to handleSave to prevent duplicate concurrent PUT requests from rapid keyboard shortcuts - Add yaml/yml MIME type mappings to getMimeTypeFromExtension Co-Authored-By: Claude Opus 4.6 * refactor(files): reuse shared extension constants, parallelize cancelQueries - Replace hand-rolled SUPPORTED_EXTENSIONS with composition from existing SUPPORTED_DOCUMENT/AUDIO/VIDEO_EXTENSIONS in validation.ts - Parallelize sequential cancelQueries calls in delete mutation onMutate Co-Authored-By: Claude Opus 4.6 * fix(files): guard handleCreate against duplicate calls while pending Co-Authored-By: Claude Opus 4.6 * fix(files): show upload progress on the Upload button, not New file Co-Authored-By: Claude Opus 4.6 * fix(files): use ref-based guard for create pending state to avoid stale closure The uploadFile.isPending check was stale because the mutation object is excluded from useCallback deps (per codebase convention). Using a ref ensures the guard works correctly across rapid Enter key presses. Co-Authored-By: Claude Opus 4.6 * cleanup(files): use shared icon import, remove no-op props, wrap handler in useCallback Co-Authored-By: Claude Opus 4.6 --------- Co-authored-by: Claude Opus 4.6 * improvement: tables, dropdown * improvement(docs): align sidebar method badges and polish API reference styling (#3484) * improvement(docs): align sidebar method badges and polish API reference styling * fix(docs): revert className prop on DocsPage for CI compatibility * fix(docs): restore oneOf schema for delete rows and use rem units in CSS * fix(docs): replace :has() selectors with direct className for reliable prod layout The API docs layout was intermittently narrow in production because CSS :has(.api-page-header) selectors are unreliable in Tailwind v4 production builds. Apply className="openapi-page" directly to DocsPage and replace all 64 :has() selectors with .openapi-page class targeting. Co-Authored-By: Claude Opus 4.6 * fix(docs): bypass TypeScript check for className prop on DocsPage Use spread with type assertion to pass className to DocsPage, working around a CI type resolution issue where the prop exists at runtime but is not recognized by TypeScript in the Vercel build environment. Co-Authored-By: Claude Opus 4.6 * fix(docs): use inline style tag for grid layout, revert CSS to :has() selectors The className prop on DocsPage doesn't exist in the fumadocs-ui version resolved on Vercel, so .openapi-page was never applied and all 64 CSS rules broke. Revert to :has(.api-page-header) selectors for styling and use an inline ' - const file = new File([maliciousContent], 'malicious.html', { type: 'text/html' }) + const htmlContent = '

Hello World

' + const file = new File([htmlContent], 'document.html', { type: 'text/html' }) formData.append('file', file) formData.append('context', 'workspace') formData.append('workspaceId', 'test-workspace-id') @@ -436,35 +436,14 @@ describe('File Upload Security Tests', () => { const response = await POST(req as unknown as NextRequest) - expect(response.status).toBe(400) - const data = await response.json() - expect(data.message).toContain("File type 'html' is not allowed") - }) - - it('should reject HTML files to prevent XSS', async () => { - const formData = new FormData() - const maliciousContent = '' - const file = new File([maliciousContent], 'malicious.html', { type: 'text/html' }) - formData.append('file', file) - formData.append('context', 'workspace') - formData.append('workspaceId', 'test-workspace-id') - - const req = new Request('http://localhost/api/files/upload', { - method: 'POST', - body: formData, - }) - - const response = await POST(req as unknown as NextRequest) - - expect(response.status).toBe(400) - const data = await response.json() - expect(data.message).toContain("File type 'html' is not allowed") + expect(response.status).toBe(200) }) - it('should reject SVG files to prevent XSS', async () => { + it('should accept SVG files (supported image type)', async () => { const formData = new FormData() - const maliciousSvg = '' - const file = new File([maliciousSvg], 'malicious.svg', { type: 'image/svg+xml' }) + const svgContent = + '' + const file = new File([svgContent], 'image.svg', { type: 'image/svg+xml' }) formData.append('file', file) formData.append('context', 'workspace') formData.append('workspaceId', 'test-workspace-id') @@ -476,9 +455,7 @@ describe('File Upload Security Tests', () => { const response = await POST(req as unknown as NextRequest) - expect(response.status).toBe(400) - const data = await response.json() - expect(data.message).toContain("File type 'svg' is not allowed") + expect(response.status).toBe(200) }) it('should reject JavaScript files', async () => { @@ -526,8 +503,8 @@ describe('File Upload Security Tests', () => { const validFile = new File(['valid content'], 'valid.pdf', { type: 'application/pdf' }) formData.append('file', validFile) - const invalidFile = new File([''], 'malicious.html', { - type: 'text/html', + const invalidFile = new File(['binary content'], 'malicious.exe', { + type: 'application/x-msdownload', }) formData.append('file', invalidFile) formData.append('context', 'workspace') @@ -542,7 +519,7 @@ describe('File Upload Security Tests', () => { expect(response.status).toBe(400) const data = await response.json() - expect(data.message).toContain("File type 'html' is not allowed") + expect(data.message).toContain("File type 'exe' is not allowed") }) }) diff --git a/apps/sim/app/api/files/upload/route.ts b/apps/sim/app/api/files/upload/route.ts index f227dd7420..cd2baf45d9 100644 --- a/apps/sim/app/api/files/upload/route.ts +++ b/apps/sim/app/api/files/upload/route.ts @@ -4,8 +4,13 @@ import { sanitizeFileName } from '@/executor/constants' import '@/lib/uploads/core/setup.server' import { getSession } from '@/lib/auth' import type { StorageContext } from '@/lib/uploads/config' -import { isImageFileType } from '@/lib/uploads/utils/file-utils' -import { validateFileType } from '@/lib/uploads/utils/validation' +import { isImageFileType, resolveFileType } from '@/lib/uploads/utils/file-utils' +import { + SUPPORTED_AUDIO_EXTENSIONS, + SUPPORTED_DOCUMENT_EXTENSIONS, + SUPPORTED_VIDEO_EXTENSIONS, + validateFileType, +} from '@/lib/uploads/utils/validation' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { createErrorResponse, @@ -13,38 +18,13 @@ import { InvalidRequestError, } from '@/app/api/files/utils' -const ALLOWED_EXTENSIONS = new Set([ - // Documents - 'pdf', - 'doc', - 'docx', - 'txt', - 'md', - 'csv', - 'xlsx', - 'xls', - 'json', - 'yaml', - 'yml', - // Images - 'png', - 'jpg', - 'jpeg', - 'gif', - // Audio - 'mp3', - 'm4a', - 'wav', - 'webm', - 'ogg', - 'flac', - 'aac', - 'opus', - // Video - 'mp4', - 'mov', - 'avi', - 'mkv', +const IMAGE_EXTENSIONS = ['png', 'jpg', 'jpeg', 'gif', 'webp', 'svg'] as const + +const ALLOWED_EXTENSIONS = new Set([ + ...SUPPORTED_DOCUMENT_EXTENSIONS, + ...IMAGE_EXTENSIONS, + ...SUPPORTED_AUDIO_EXTENSIONS, + ...SUPPORTED_VIDEO_EXTENSIONS, ]) function validateFileExtension(filename: string): boolean { @@ -251,9 +231,19 @@ export async function POST(request: NextRequest) { } } - // Handle image-only contexts (copilot, chat, profile-pictures) + // Handle copilot, chat, profile-pictures contexts if (context === 'copilot' || context === 'chat' || context === 'profile-pictures') { - if (!isImageFileType(file.type)) { + if (context === 'copilot') { + const { isSupportedFileType: isCopilotSupported } = await import( + '@/lib/uploads/contexts/copilot/copilot-file-manager' + ) + const resolvedType = resolveFileType(file) + if (!isImageFileType(resolvedType) && !isCopilotSupported(resolvedType)) { + throw new InvalidRequestError( + 'Unsupported file type. Allowed: images, PDF, and text files (TXT, CSV, MD, HTML, JSON, XML).' + ) + } + } else if (!isImageFileType(file.type)) { throw new InvalidRequestError( `Only image files (JPEG, PNG, GIF, WebP, SVG) are allowed for ${context} uploads` ) diff --git a/apps/sim/app/api/files/utils.test.ts b/apps/sim/app/api/files/utils.test.ts index a31c7ca42f..58d1791f92 100644 --- a/apps/sim/app/api/files/utils.test.ts +++ b/apps/sim/app/api/files/utils.test.ts @@ -170,9 +170,7 @@ describe('extractFilename', () => { 'inline; filename="safe-image.png"' ) expect(response.headers.get('X-Content-Type-Options')).toBe('nosniff') - expect(response.headers.get('Content-Security-Policy')).toBe( - "default-src 'none'; style-src 'unsafe-inline'; sandbox;" - ) + expect(response.headers.get('Content-Security-Policy')).toBeNull() }) it('should serve PDFs inline safely', () => { @@ -203,33 +201,31 @@ describe('extractFilename', () => { expect(response.headers.get('X-Content-Type-Options')).toBe('nosniff') }) - it('should force attachment for SVG files to prevent XSS', () => { + it('should serve SVG files inline with CSP sandbox protection', () => { const response = createFileResponse({ buffer: Buffer.from( '' ), contentType: 'image/svg+xml', - filename: 'malicious.svg', + filename: 'image.svg', }) expect(response.status).toBe(200) - expect(response.headers.get('Content-Type')).toBe('application/octet-stream') - expect(response.headers.get('Content-Disposition')).toBe( - 'attachment; filename="malicious.svg"' + expect(response.headers.get('Content-Type')).toBe('image/svg+xml') + expect(response.headers.get('Content-Disposition')).toBe('inline; filename="image.svg"') + expect(response.headers.get('Content-Security-Policy')).toBe( + "default-src 'none'; style-src 'unsafe-inline'; sandbox;" ) }) - it('should override dangerous content types to safe alternatives', () => { + it('should not apply CSP sandbox to non-SVG files', () => { const response = createFileResponse({ - buffer: Buffer.from('safe content'), - contentType: 'image/svg+xml', - filename: 'image.png', // Extension doesn't match content-type + buffer: Buffer.from('hello'), + contentType: 'text/plain', + filename: 'readme.txt', }) - expect(response.status).toBe(200) - // Should override SVG content type to plain text for safety - expect(response.headers.get('Content-Type')).toBe('text/plain') - expect(response.headers.get('Content-Disposition')).toBe('inline; filename="image.png"') + expect(response.headers.get('Content-Security-Policy')).toBeNull() }) it('should force attachment for JavaScript files', () => { @@ -302,15 +298,22 @@ describe('extractFilename', () => { }) describe('Content Security Policy', () => { - it('should include CSP header in all responses', () => { - const response = createFileResponse({ + it('should include CSP header only for SVG responses', () => { + const svgResponse = createFileResponse({ + buffer: Buffer.from(''), + contentType: 'image/svg+xml', + filename: 'icon.svg', + }) + expect(svgResponse.headers.get('Content-Security-Policy')).toBe( + "default-src 'none'; style-src 'unsafe-inline'; sandbox;" + ) + + const txtResponse = createFileResponse({ buffer: Buffer.from('test'), contentType: 'text/plain', filename: 'test.txt', }) - - const csp = response.headers.get('Content-Security-Policy') - expect(csp).toBe("default-src 'none'; style-src 'unsafe-inline'; sandbox;") + expect(txtResponse.headers.get('Content-Security-Policy')).toBeNull() }) it('should include X-Content-Type-Options header', () => { diff --git a/apps/sim/app/api/files/utils.ts b/apps/sim/app/api/files/utils.ts index 953c9b8989..a4831cdd3b 100644 --- a/apps/sim/app/api/files/utils.ts +++ b/apps/sim/app/api/files/utils.ts @@ -1,5 +1,5 @@ import { existsSync } from 'fs' -import { join, resolve, sep } from 'path' +import path from 'path' import { createLogger } from '@sim/logger' import { NextResponse } from 'next/server' import { UPLOAD_DIR } from '@/lib/uploads/config' @@ -21,6 +21,7 @@ export interface FileResponse { buffer: Buffer contentType: string filename: string + cacheControl?: string } export class FileNotFoundError extends Error { @@ -60,6 +61,8 @@ export const contentTypeMap: Record = { jpg: 'image/jpeg', jpeg: 'image/jpeg', gif: 'image/gif', + svg: 'image/svg+xml', + webp: 'image/webp', zip: 'application/zip', googleFolder: 'application/vnd.google-apps.folder', } @@ -76,6 +79,7 @@ export const binaryExtensions = [ 'jpg', 'jpeg', 'gif', + 'webp', 'pdf', ] @@ -155,7 +159,7 @@ function sanitizeFilename(filename: string): string { return sanitized }) - return sanitizedSegments.join(sep) + return sanitizedSegments.join(path.sep) } export function findLocalFile(filename: string): string | null { @@ -168,17 +172,18 @@ export function findLocalFile(filename: string): string | null { } const possiblePaths = [ - join(UPLOAD_DIR, sanitizedFilename), - join(process.cwd(), 'uploads', sanitizedFilename), + path.join(UPLOAD_DIR, sanitizedFilename), + path.join(process.cwd(), 'uploads', sanitizedFilename), ] - for (const path of possiblePaths) { - const resolvedPath = resolve(path) - const allowedDirs = [resolve(UPLOAD_DIR), resolve(process.cwd(), 'uploads')] + for (const filePath of possiblePaths) { + const resolvedPath = path.resolve(filePath) + const allowedDirs = [path.resolve(UPLOAD_DIR), path.resolve(process.cwd(), 'uploads')] // Must be within allowed directory but NOT the directory itself const isWithinAllowedDir = allowedDirs.some( - (allowedDir) => resolvedPath.startsWith(allowedDir + sep) && resolvedPath !== allowedDir + (allowedDir) => + resolvedPath.startsWith(allowedDir + path.sep) && resolvedPath !== allowedDir ) if (!isWithinAllowedDir) { @@ -202,13 +207,15 @@ const SAFE_INLINE_TYPES = new Set([ 'image/jpeg', 'image/jpg', 'image/gif', + 'image/svg+xml', + 'image/webp', 'application/pdf', 'text/plain', 'text/csv', 'application/json', ]) -const FORCE_ATTACHMENT_EXTENSIONS = new Set(['html', 'htm', 'svg', 'js', 'css', 'xml']) +const FORCE_ATTACHMENT_EXTENSIONS = new Set(['html', 'htm', 'js', 'css', 'xml']) function getSecureFileHeaders(filename: string, originalContentType: string) { const extension = filename.split('.').pop()?.toLowerCase() || '' @@ -222,7 +229,7 @@ function getSecureFileHeaders(filename: string, originalContentType: string) { let safeContentType = originalContentType - if (originalContentType === 'text/html' || originalContentType === 'image/svg+xml') { + if (originalContentType === 'text/html') { safeContentType = 'text/plain' } @@ -251,16 +258,18 @@ function encodeFilenameForHeader(storageKey: string): string { export function createFileResponse(file: FileResponse): NextResponse { const { contentType, disposition } = getSecureFileHeaders(file.filename, file.contentType) - return new NextResponse(file.buffer as BodyInit, { - status: 200, - headers: { - 'Content-Type': contentType, - 'Content-Disposition': `${disposition}; ${encodeFilenameForHeader(file.filename)}`, - 'Cache-Control': 'public, max-age=31536000', - 'X-Content-Type-Options': 'nosniff', - 'Content-Security-Policy': "default-src 'none'; style-src 'unsafe-inline'; sandbox;", - }, - }) + const headers: Record = { + 'Content-Type': contentType, + 'Content-Disposition': `${disposition}; ${encodeFilenameForHeader(file.filename)}`, + 'Cache-Control': file.cacheControl || 'public, max-age=31536000', + 'X-Content-Type-Options': 'nosniff', + } + + if (contentType === 'image/svg+xml') { + headers['Content-Security-Policy'] = "default-src 'none'; style-src 'unsafe-inline'; sandbox;" + } + + return new NextResponse(file.buffer as BodyInit, { status: 200, headers }) } export function createErrorResponse(error: Error, status = 500): NextResponse { diff --git a/apps/sim/app/api/folders/[id]/duplicate/route.ts b/apps/sim/app/api/folders/[id]/duplicate/route.ts index 54eafdf36f..e59cc943d1 100644 --- a/apps/sim/app/api/folders/[id]/duplicate/route.ts +++ b/apps/sim/app/api/folders/[id]/duplicate/route.ts @@ -17,6 +17,7 @@ const DuplicateRequestSchema = z.object({ workspaceId: z.string().optional(), parentId: z.string().nullable().optional(), color: z.string().optional(), + newId: z.string().uuid().optional(), }) // POST /api/folders/[id]/duplicate - Duplicate a folder with all its child folders and workflows @@ -33,7 +34,13 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: try { const body = await req.json() - const { name, workspaceId, parentId, color } = DuplicateRequestSchema.parse(body) + const { + name, + workspaceId, + parentId, + color, + newId: clientNewId, + } = DuplicateRequestSchema.parse(body) logger.info(`[${requestId}] Duplicating folder ${sourceFolderId} for user ${session.user.id}`) @@ -60,7 +67,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const targetWorkspaceId = workspaceId || sourceFolder.workspaceId const { newFolderId, folderMapping } = await db.transaction(async (tx) => { - const newFolderId = crypto.randomUUID() + const newFolderId = clientNewId || crypto.randomUUID() const now = new Date() const targetParentId = parentId ?? sourceFolder.parentId diff --git a/apps/sim/app/api/folders/[id]/route.ts b/apps/sim/app/api/folders/[id]/route.ts index 96ab40c2f1..41b9a6276c 100644 --- a/apps/sim/app/api/folders/[id]/route.ts +++ b/apps/sim/app/api/folders/[id]/route.ts @@ -1,11 +1,12 @@ import { db } from '@sim/db' import { workflow, workflowFolder } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' +import { archiveWorkflowsByIdsInWorkspace } from '@/lib/workflows/lifecycle' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const logger = createLogger('FoldersIDAPI') @@ -151,7 +152,7 @@ export async function DELETE( const totalWorkflowsInWorkspace = await db .select({ id: workflow.id }) .from(workflow) - .where(eq(workflow.workspaceId, existingFolder.workspaceId)) + .where(and(eq(workflow.workspaceId, existingFolder.workspaceId), isNull(workflow.archivedAt))) if (workflowsInFolder > 0 && workflowsInFolder >= totalWorkflowsInWorkspace.length) { return NextResponse.json( @@ -222,12 +223,20 @@ async function deleteFolderRecursively( const workflowsInFolder = await db .select({ id: workflow.id }) .from(workflow) - .where(and(eq(workflow.folderId, folderId), eq(workflow.workspaceId, workspaceId))) + .where( + and( + eq(workflow.folderId, folderId), + eq(workflow.workspaceId, workspaceId), + isNull(workflow.archivedAt) + ) + ) if (workflowsInFolder.length > 0) { - await db - .delete(workflow) - .where(and(eq(workflow.folderId, folderId), eq(workflow.workspaceId, workspaceId))) + await archiveWorkflowsByIdsInWorkspace( + workspaceId, + workflowsInFolder.map((entry) => entry.id), + { requestId: `folder-${folderId}` } + ) stats.workflows += workflowsInFolder.length } @@ -252,7 +261,13 @@ async function countWorkflowsInFolderRecursively( const workflowsInFolder = await db .select({ id: workflow.id }) .from(workflow) - .where(and(eq(workflow.folderId, folderId), eq(workflow.workspaceId, workspaceId))) + .where( + and( + eq(workflow.folderId, folderId), + eq(workflow.workspaceId, workspaceId), + isNull(workflow.archivedAt) + ) + ) count += workflowsInFolder.length diff --git a/apps/sim/app/api/folders/route.test.ts b/apps/sim/app/api/folders/route.test.ts index 5fa3a70901..b31b527a74 100644 --- a/apps/sim/app/api/folders/route.test.ts +++ b/apps/sim/app/api/folders/route.test.ts @@ -455,7 +455,7 @@ describe('Folders API Route', () => { expect(response.status).toBe(400) const data = await response.json() - expect(data).toHaveProperty('error', 'Name and workspace ID are required') + expect(data).toHaveProperty('error', 'Invalid request data') } }) diff --git a/apps/sim/app/api/folders/route.ts b/apps/sim/app/api/folders/route.ts index 835231d31f..2ae6d1673a 100644 --- a/apps/sim/app/api/folders/route.ts +++ b/apps/sim/app/api/folders/route.ts @@ -3,12 +3,22 @@ import { workflow, workflowFolder } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, asc, eq, isNull, min } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const logger = createLogger('FoldersAPI') +const CreateFolderSchema = z.object({ + id: z.string().uuid().optional(), + name: z.string().min(1, 'Name is required'), + workspaceId: z.string().min(1, 'Workspace ID is required'), + parentId: z.string().optional(), + color: z.string().optional(), + sortOrder: z.number().int().optional(), +}) + // GET - Fetch folders for a workspace export async function GET(request: NextRequest) { try { @@ -59,13 +69,15 @@ export async function POST(request: NextRequest) { } const body = await request.json() - const { name, workspaceId, parentId, color, sortOrder: providedSortOrder } = body - - if (!name || !workspaceId) { - return NextResponse.json({ error: 'Name and workspace ID are required' }, { status: 400 }) - } + const { + id: clientId, + name, + workspaceId, + parentId, + color, + sortOrder: providedSortOrder, + } = CreateFolderSchema.parse(body) - // Check if user has workspace permissions (at least 'write' access to create folders) const workspacePermission = await getUserEntityPermissions( session.user.id, 'workspace', @@ -79,8 +91,7 @@ export async function POST(request: NextRequest) { ) } - // Generate a new ID - const id = crypto.randomUUID() + const id = clientId || crypto.randomUUID() const newFolder = await db.transaction(async (tx) => { let sortOrder: number @@ -150,6 +161,14 @@ export async function POST(request: NextRequest) { return NextResponse.json({ folder: newFolder }) } catch (error) { + if (error instanceof z.ZodError) { + logger.warn('Invalid folder creation data', { errors: error.errors }) + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + logger.error('Error creating folder:', { error }) return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) } diff --git a/apps/sim/app/api/form/[identifier]/route.ts b/apps/sim/app/api/form/[identifier]/route.ts index d6d4f019e4..986a77610d 100644 --- a/apps/sim/app/api/form/[identifier]/route.ts +++ b/apps/sim/app/api/form/[identifier]/route.ts @@ -2,7 +2,7 @@ import { randomUUID } from 'crypto' import { db } from '@sim/db' import { form, workflow, workflowBlocks } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { addCorsHeaders, validateAuthToken } from '@/lib/core/security/deployment' @@ -91,7 +91,7 @@ export async function POST( customizations: form.customizations, }) .from(form) - .where(eq(form.identifier, identifier)) + .where(and(eq(form.identifier, identifier), isNull(form.archivedAt))) .limit(1) if (deploymentResult.length === 0) { @@ -107,7 +107,7 @@ export async function POST( const [workflowRecord] = await db .select({ workspaceId: workflow.workspaceId }) .from(workflow) - .where(eq(workflow.id, deployment.workflowId)) + .where(and(eq(workflow.id, deployment.workflowId), isNull(workflow.archivedAt))) .limit(1) const workspaceId = workflowRecord?.workspaceId @@ -312,7 +312,7 @@ export async function GET( showBranding: form.showBranding, }) .from(form) - .where(eq(form.identifier, identifier)) + .where(and(eq(form.identifier, identifier), isNull(form.archivedAt))) .limit(1) if (deploymentResult.length === 0) { diff --git a/apps/sim/app/api/form/manage/[id]/route.ts b/apps/sim/app/api/form/manage/[id]/route.ts index e64e52fb1e..577363b8d9 100644 --- a/apps/sim/app/api/form/manage/[id]/route.ts +++ b/apps/sim/app/api/form/manage/[id]/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { form } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' @@ -134,7 +134,7 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise< const existingIdentifier = await db .select() .from(form) - .where(eq(form.identifier, identifier)) + .where(and(eq(form.identifier, identifier), isNull(form.archivedAt))) .limit(1) if (existingIdentifier.length > 0) { @@ -241,7 +241,7 @@ export async function DELETE( return createErrorResponse('Form not found or access denied', 404) } - await db.update(form).set({ isActive: false, updatedAt: new Date() }).where(eq(form.id, id)) + await db.delete(form).where(eq(form.id, id)) logger.info(`Form ${id} deleted (soft delete)`) diff --git a/apps/sim/app/api/form/route.ts b/apps/sim/app/api/form/route.ts index 4ebb577f1e..3becf417a4 100644 --- a/apps/sim/app/api/form/route.ts +++ b/apps/sim/app/api/form/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { form } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { v4 as uuidv4 } from 'uuid' import { z } from 'zod' @@ -73,7 +73,10 @@ export async function GET(request: NextRequest) { return createErrorResponse('Unauthorized', 401) } - const deployments = await db.select().from(form).where(eq(form.userId, session.user.id)) + const deployments = await db + .select() + .from(form) + .where(and(eq(form.userId, session.user.id), isNull(form.archivedAt))) return createSuccessResponse({ deployments }) } catch (error: any) { @@ -118,21 +121,20 @@ export async function POST(request: NextRequest) { ) } - const existingIdentifier = await db - .select() - .from(form) - .where(eq(form.identifier, identifier)) - .limit(1) + // Check identifier availability and workflow access in parallel + const [existingIdentifier, { hasAccess, workflow: workflowRecord }] = await Promise.all([ + db + .select() + .from(form) + .where(and(eq(form.identifier, identifier), isNull(form.archivedAt))) + .limit(1), + checkWorkflowAccessForFormCreation(workflowId, session.user.id), + ]) if (existingIdentifier.length > 0) { return createErrorResponse('Identifier already in use', 400) } - const { hasAccess, workflow: workflowRecord } = await checkWorkflowAccessForFormCreation( - workflowId, - session.user.id - ) - if (!hasAccess || !workflowRecord) { return createErrorResponse('Workflow not found or access denied', 404) } diff --git a/apps/sim/app/api/form/utils.ts b/apps/sim/app/api/form/utils.ts index e39d210ac3..9f4bafd05a 100644 --- a/apps/sim/app/api/form/utils.ts +++ b/apps/sim/app/api/form/utils.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { form, workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest, NextResponse } from 'next/server' import { isEmailAllowed, @@ -57,7 +57,7 @@ export async function checkFormAccess( .select({ form: form, workflowWorkspaceId: workflow.workspaceId }) .from(form) .innerJoin(workflow, eq(form.workflowId, workflow.id)) - .where(eq(form.id, formId)) + .where(and(eq(form.id, formId), isNull(form.archivedAt))) .limit(1) if (formData.length === 0) { diff --git a/apps/sim/app/api/form/validate/route.ts b/apps/sim/app/api/form/validate/route.ts index 8352149fd9..0b2b8a076e 100644 --- a/apps/sim/app/api/form/validate/route.ts +++ b/apps/sim/app/api/form/validate/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { form } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { z } from 'zod' import { getSession } from '@/lib/auth' @@ -50,7 +50,7 @@ export async function GET(request: NextRequest) { const existingForm = await db .select({ id: form.id }) .from(form) - .where(eq(form.identifier, validatedIdentifier)) + .where(and(eq(form.identifier, validatedIdentifier), isNull(form.archivedAt))) .limit(1) const isAvailable = existingForm.length === 0 diff --git a/apps/sim/app/api/function/execute/route.ts b/apps/sim/app/api/function/execute/route.ts index 441bf788d9..24e992401b 100644 --- a/apps/sim/app/api/function/execute/route.ts +++ b/apps/sim/app/api/function/execute/route.ts @@ -610,6 +610,7 @@ export async function POST(req: NextRequest) { workflowVariables = {}, workflowId, isCustomTool = false, + _sandboxFiles, } = body const executionParams = { ...params } @@ -722,6 +723,7 @@ export async function POST(req: NextRequest) { code: codeForE2B, language: CodeLanguage.JavaScript, timeoutMs: timeout, + sandboxFiles: _sandboxFiles, }) const executionTime = Date.now() - execStart stdout += e2bStdout @@ -785,6 +787,7 @@ export async function POST(req: NextRequest) { code: codeForE2B, language: CodeLanguage.Python, timeoutMs: timeout, + sandboxFiles: _sandboxFiles, }) const executionTime = Date.now() - execStart stdout += e2bStdout diff --git a/apps/sim/app/api/jobs/[jobId]/route.ts b/apps/sim/app/api/jobs/[jobId]/route.ts index 14be54facb..cb8a43a80d 100644 --- a/apps/sim/app/api/jobs/[jobId]/route.ts +++ b/apps/sim/app/api/jobs/[jobId]/route.ts @@ -40,6 +40,14 @@ export async function GET( logger.warn(`[${requestId}] Access denied to workflow ${job.metadata.workflowId}`) return createErrorResponse('Access denied', 403) } + + if (authResult.apiKeyType === 'workspace' && authResult.workspaceId) { + const { getWorkflowById } = await import('@/lib/workflows/utils') + const workflow = await getWorkflowById(job.metadata.workflowId as string) + if (!workflow?.workspaceId || workflow.workspaceId !== authResult.workspaceId) { + return createErrorResponse('API key is not authorized for this workspace', 403) + } + } } else if (job.metadata?.userId && job.metadata.userId !== authenticatedUserId) { logger.warn(`[${requestId}] Access denied to user ${job.metadata.userId}`) return createErrorResponse('Access denied', 403) diff --git a/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/documents/route.test.ts b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/documents/route.test.ts new file mode 100644 index 0000000000..4c34f2e5ff --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/documents/route.test.ts @@ -0,0 +1,202 @@ +/** + * @vitest-environment node + */ +import { createMockRequest } from '@sim/testing' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { mockCheckSession, mockCheckAccess, mockCheckWriteAccess, mockDbChain } = vi.hoisted(() => { + const chain = { + select: vi.fn().mockReturnThis(), + from: vi.fn().mockReturnThis(), + where: vi.fn().mockReturnThis(), + orderBy: vi.fn().mockResolvedValue([]), + limit: vi.fn().mockResolvedValue([]), + update: vi.fn().mockReturnThis(), + set: vi.fn().mockReturnThis(), + returning: vi.fn().mockResolvedValue([]), + } + return { + mockCheckSession: vi.fn(), + mockCheckAccess: vi.fn(), + mockCheckWriteAccess: vi.fn(), + mockDbChain: chain, + } +}) + +vi.mock('@sim/db', () => ({ db: mockDbChain })) +vi.mock('@sim/db/schema', () => ({ + document: { + id: 'id', + connectorId: 'connectorId', + deletedAt: 'deletedAt', + filename: 'filename', + externalId: 'externalId', + sourceUrl: 'sourceUrl', + enabled: 'enabled', + userExcluded: 'userExcluded', + uploadedAt: 'uploadedAt', + processingStatus: 'processingStatus', + }, + knowledgeConnector: { + id: 'id', + knowledgeBaseId: 'knowledgeBaseId', + deletedAt: 'deletedAt', + }, +})) +vi.mock('@/app/api/knowledge/utils', () => ({ + checkKnowledgeBaseAccess: mockCheckAccess, + checkKnowledgeBaseWriteAccess: mockCheckWriteAccess, +})) +vi.mock('@/lib/auth/hybrid', () => ({ + checkSessionOrInternalAuth: mockCheckSession, +})) +vi.mock('@/lib/core/utils/request', () => ({ + generateRequestId: vi.fn().mockReturnValue('test-req-id'), +})) + +import { GET, PATCH } from '@/app/api/knowledge/[id]/connectors/[connectorId]/documents/route' + +describe('Connector Documents API Route', () => { + const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' }) + + beforeEach(() => { + vi.clearAllMocks() + mockDbChain.select.mockReturnThis() + mockDbChain.from.mockReturnThis() + mockDbChain.where.mockReturnThis() + mockDbChain.orderBy.mockResolvedValue([]) + mockDbChain.limit.mockResolvedValue([]) + mockDbChain.update.mockReturnThis() + mockDbChain.set.mockReturnThis() + mockDbChain.returning.mockResolvedValue([]) + }) + + describe('GET', () => { + it('returns 401 when unauthenticated', async () => { + mockCheckSession.mockResolvedValue({ success: false, userId: null }) + + const req = createMockRequest('GET') + const response = await GET(req as never, { params: mockParams }) + + expect(response.status).toBe(401) + }) + + it('returns 404 when connector not found', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([]) + + const req = createMockRequest('GET') + const response = await GET(req as never, { params: mockParams }) + + expect(response.status).toBe(404) + }) + + it('returns documents list on success', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckAccess.mockResolvedValue({ hasAccess: true }) + + const doc = { id: 'doc-1', filename: 'test.txt', userExcluded: false } + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }]) + mockDbChain.orderBy.mockResolvedValueOnce([doc]) + + const url = 'http://localhost/api/knowledge/kb-123/connectors/conn-456/documents' + const req = createMockRequest('GET', undefined, undefined, url) + Object.assign(req, { nextUrl: new URL(url) }) + const response = await GET(req as never, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.data.documents).toHaveLength(1) + expect(data.data.counts.active).toBe(1) + expect(data.data.counts.excluded).toBe(0) + }) + + it('includes excluded documents when includeExcluded=true', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckAccess.mockResolvedValue({ hasAccess: true }) + + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }]) + mockDbChain.orderBy + .mockResolvedValueOnce([{ id: 'doc-1', userExcluded: false }]) + .mockResolvedValueOnce([{ id: 'doc-2', userExcluded: true }]) + + const url = + 'http://localhost/api/knowledge/kb-123/connectors/conn-456/documents?includeExcluded=true' + const req = createMockRequest('GET', undefined, undefined, url) + Object.assign(req, { nextUrl: new URL(url) }) + const response = await GET(req as never, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.data.documents).toHaveLength(2) + expect(data.data.counts.active).toBe(1) + expect(data.data.counts.excluded).toBe(1) + }) + }) + + describe('PATCH', () => { + it('returns 401 when unauthenticated', async () => { + mockCheckSession.mockResolvedValue({ success: false, userId: null }) + + const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] }) + const response = await PATCH(req as never, { params: mockParams }) + + expect(response.status).toBe(401) + }) + + it('returns 400 for invalid body', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }]) + + const req = createMockRequest('PATCH', { documentIds: [] }) + const response = await PATCH(req as never, { params: mockParams }) + + expect(response.status).toBe(400) + }) + + it('returns 404 when connector not found', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([]) + + const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] }) + const response = await PATCH(req as never, { params: mockParams }) + + expect(response.status).toBe(404) + }) + + it('returns success for restore operation', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }]) + mockDbChain.returning.mockResolvedValueOnce([{ id: 'doc-1' }]) + + const req = createMockRequest('PATCH', { operation: 'restore', documentIds: ['doc-1'] }) + const response = await PATCH(req as never, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.data.restoredCount).toBe(1) + }) + + it('returns success for exclude operation', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }]) + mockDbChain.returning.mockResolvedValueOnce([{ id: 'doc-2' }, { id: 'doc-3' }]) + + const req = createMockRequest('PATCH', { + operation: 'exclude', + documentIds: ['doc-2', 'doc-3'], + }) + const response = await PATCH(req as never, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.data.excludedCount).toBe(2) + expect(data.data.documentIds).toEqual(['doc-2', 'doc-3']) + }) + }) +}) diff --git a/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/documents/route.ts b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/documents/route.ts new file mode 100644 index 0000000000..0b5e64c528 --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/documents/route.ts @@ -0,0 +1,217 @@ +import { db } from '@sim/db' +import { document, knowledgeConnector } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, inArray, isNull } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils' + +const logger = createLogger('ConnectorDocumentsAPI') + +type RouteParams = { params: Promise<{ id: string; connectorId: string }> } + +/** + * GET /api/knowledge/[id]/connectors/[connectorId]/documents + * Returns documents for a connector, optionally including user-excluded ones. + */ +export async function GET(request: NextRequest, { params }: RouteParams) { + const requestId = generateRequestId() + const { id: knowledgeBaseId, connectorId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId) + if (!accessCheck.hasAccess) { + const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const connectorRows = await db + .select({ id: knowledgeConnector.id }) + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + if (connectorRows.length === 0) { + return NextResponse.json({ error: 'Connector not found' }, { status: 404 }) + } + + const includeExcluded = request.nextUrl.searchParams.get('includeExcluded') === 'true' + + const activeDocs = await db + .select({ + id: document.id, + filename: document.filename, + externalId: document.externalId, + sourceUrl: document.sourceUrl, + enabled: document.enabled, + userExcluded: document.userExcluded, + uploadedAt: document.uploadedAt, + processingStatus: document.processingStatus, + }) + .from(document) + .where( + and( + eq(document.connectorId, connectorId), + isNull(document.archivedAt), + isNull(document.deletedAt), + eq(document.userExcluded, false) + ) + ) + .orderBy(document.filename) + + const excludedDocs = includeExcluded + ? await db + .select({ + id: document.id, + filename: document.filename, + externalId: document.externalId, + sourceUrl: document.sourceUrl, + enabled: document.enabled, + userExcluded: document.userExcluded, + uploadedAt: document.uploadedAt, + processingStatus: document.processingStatus, + }) + .from(document) + .where( + and( + eq(document.connectorId, connectorId), + eq(document.userExcluded, true), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) + .orderBy(document.filename) + : [] + + const docs = [...activeDocs, ...excludedDocs] + const activeCount = activeDocs.length + const excludedCount = excludedDocs.length + + return NextResponse.json({ + success: true, + data: { + documents: docs, + counts: { active: activeCount, excluded: excludedCount }, + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error fetching connector documents`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +const PatchSchema = z.object({ + operation: z.enum(['restore', 'exclude']), + documentIds: z.array(z.string()).min(1), +}) + +/** + * PATCH /api/knowledge/[id]/connectors/[connectorId]/documents + * Restore or exclude connector documents. + */ +export async function PATCH(request: NextRequest, { params }: RouteParams) { + const requestId = generateRequestId() + const { id: knowledgeBaseId, connectorId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) + if (!writeCheck.hasAccess) { + const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const connectorRows = await db + .select({ id: knowledgeConnector.id }) + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + if (connectorRows.length === 0) { + return NextResponse.json({ error: 'Connector not found' }, { status: 404 }) + } + + const body = await request.json() + const parsed = PatchSchema.safeParse(body) + if (!parsed.success) { + return NextResponse.json( + { error: 'Invalid request', details: parsed.error.flatten() }, + { status: 400 } + ) + } + + const { operation, documentIds } = parsed.data + + if (operation === 'restore') { + const updated = await db + .update(document) + .set({ userExcluded: false, enabled: true }) + .where( + and( + eq(document.connectorId, connectorId), + inArray(document.id, documentIds), + eq(document.userExcluded, true), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) + .returning({ id: document.id }) + + logger.info(`[${requestId}] Restored ${updated.length} excluded documents`, { connectorId }) + + return NextResponse.json({ + success: true, + data: { restoredCount: updated.length, documentIds: updated.map((d) => d.id) }, + }) + } + + const updated = await db + .update(document) + .set({ userExcluded: true, enabled: false }) + .where( + and( + eq(document.connectorId, connectorId), + inArray(document.id, documentIds), + eq(document.userExcluded, false), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) + .returning({ id: document.id }) + + logger.info(`[${requestId}] Excluded ${updated.length} documents`, { connectorId }) + + return NextResponse.json({ + success: true, + data: { excludedCount: updated.length, documentIds: updated.map((d) => d.id) }, + }) + } catch (error) { + logger.error(`[${requestId}] Error updating connector documents`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/route.test.ts b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/route.test.ts new file mode 100644 index 0000000000..c39c7866b6 --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/route.test.ts @@ -0,0 +1,230 @@ +/** + * @vitest-environment node + */ +import { createMockRequest } from '@sim/testing' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { mockCheckSession, mockCheckAccess, mockCheckWriteAccess, mockDbChain, mockValidateConfig } = + vi.hoisted(() => { + const chain = { + select: vi.fn().mockReturnThis(), + from: vi.fn().mockReturnThis(), + where: vi.fn().mockReturnThis(), + orderBy: vi.fn().mockReturnThis(), + limit: vi.fn().mockResolvedValue([]), + execute: vi.fn().mockResolvedValue(undefined), + transaction: vi.fn(), + insert: vi.fn().mockReturnThis(), + values: vi.fn().mockResolvedValue(undefined), + update: vi.fn().mockReturnThis(), + delete: vi.fn().mockReturnThis(), + set: vi.fn().mockReturnThis(), + returning: vi.fn().mockResolvedValue([]), + } + return { + mockCheckSession: vi.fn(), + mockCheckAccess: vi.fn(), + mockCheckWriteAccess: vi.fn(), + mockDbChain: chain, + mockValidateConfig: vi.fn(), + } + }) + +vi.mock('@sim/db', () => ({ db: mockDbChain })) +vi.mock('@sim/db/schema', () => ({ + document: { + id: 'id', + connectorId: 'connectorId', + fileUrl: 'fileUrl', + archivedAt: 'archivedAt', + deletedAt: 'deletedAt', + }, + embedding: { documentId: 'documentId' }, + knowledgeBase: { id: 'id', userId: 'userId' }, + knowledgeConnector: { + id: 'id', + knowledgeBaseId: 'knowledgeBaseId', + archivedAt: 'archivedAt', + deletedAt: 'deletedAt', + connectorType: 'connectorType', + credentialId: 'credentialId', + }, + knowledgeConnectorSyncLog: { connectorId: 'connectorId', startedAt: 'startedAt' }, +})) +vi.mock('@/app/api/knowledge/utils', () => ({ + checkKnowledgeBaseAccess: mockCheckAccess, + checkKnowledgeBaseWriteAccess: mockCheckWriteAccess, +})) +vi.mock('@/lib/auth/hybrid', () => ({ + checkSessionOrInternalAuth: mockCheckSession, +})) +vi.mock('@/lib/core/utils/request', () => ({ + generateRequestId: vi.fn().mockReturnValue('test-req-id'), +})) +vi.mock('@/app/api/auth/oauth/utils', () => ({ + refreshAccessTokenIfNeeded: vi.fn(), +})) +vi.mock('@/connectors/registry', () => ({ + CONNECTOR_REGISTRY: { + jira: { validateConfig: mockValidateConfig }, + }, +})) +vi.mock('@/lib/knowledge/tags/service', () => ({ + cleanupUnusedTagDefinitions: vi.fn().mockResolvedValue(undefined), +})) +vi.mock('@/lib/knowledge/documents/service', () => ({ + deleteDocumentStorageFiles: vi.fn().mockResolvedValue(undefined), +})) + +import { DELETE, GET, PATCH } from '@/app/api/knowledge/[id]/connectors/[connectorId]/route' + +describe('Knowledge Connector By ID API Route', () => { + const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' }) + + beforeEach(() => { + vi.clearAllMocks() + mockDbChain.select.mockReturnThis() + mockDbChain.from.mockReturnThis() + mockDbChain.where.mockReturnThis() + mockDbChain.orderBy.mockReturnThis() + mockDbChain.limit.mockResolvedValue([]) + mockDbChain.execute.mockResolvedValue(undefined) + mockDbChain.transaction.mockImplementation( + async (callback: (tx: typeof mockDbChain) => unknown) => callback(mockDbChain) + ) + mockDbChain.update.mockReturnThis() + mockDbChain.delete.mockReturnThis() + mockDbChain.set.mockReturnThis() + mockDbChain.returning.mockResolvedValue([]) + }) + + describe('GET', () => { + it('returns 401 when unauthenticated', async () => { + mockCheckSession.mockResolvedValue({ success: false, userId: null }) + + const req = createMockRequest('GET') + const response = await GET(req, { params: mockParams }) + + expect(response.status).toBe(401) + }) + + it('returns 404 when KB not found', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckAccess.mockResolvedValue({ hasAccess: false, notFound: true }) + + const req = createMockRequest('GET') + const response = await GET(req, { params: mockParams }) + + expect(response.status).toBe(404) + }) + + it('returns 404 when connector not found', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([]) + + const req = createMockRequest('GET') + const response = await GET(req, { params: mockParams }) + + expect(response.status).toBe(404) + }) + + it('returns connector with sync logs on success', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckAccess.mockResolvedValue({ hasAccess: true }) + + const mockConnector = { id: 'conn-456', connectorType: 'jira', status: 'active' } + const mockLogs = [{ id: 'log-1', status: 'completed' }] + + mockDbChain.limit.mockResolvedValueOnce([mockConnector]).mockResolvedValueOnce(mockLogs) + + const req = createMockRequest('GET') + const response = await GET(req, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.success).toBe(true) + expect(data.data.id).toBe('conn-456') + expect(data.data.syncLogs).toHaveLength(1) + }) + }) + + describe('PATCH', () => { + it('returns 401 when unauthenticated', async () => { + mockCheckSession.mockResolvedValue({ success: false, userId: null }) + + const req = createMockRequest('PATCH', { status: 'paused' }) + const response = await PATCH(req, { params: mockParams }) + + expect(response.status).toBe(401) + }) + + it('returns 400 for invalid body', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + + const req = createMockRequest('PATCH', { syncIntervalMinutes: 'not a number' }) + const response = await PATCH(req, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(400) + expect(data.error).toBe('Invalid request') + }) + + it('returns 404 when connector not found during sourceConfig validation', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([]) + + const req = createMockRequest('PATCH', { sourceConfig: { project: 'NEW' } }) + const response = await PATCH(req, { params: mockParams }) + + expect(response.status).toBe(404) + }) + + it('returns 200 and updates status', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + + const updatedConnector = { id: 'conn-456', status: 'paused', syncIntervalMinutes: 120 } + mockDbChain.limit.mockResolvedValueOnce([updatedConnector]) + + const req = createMockRequest('PATCH', { status: 'paused', syncIntervalMinutes: 120 }) + const response = await PATCH(req, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.success).toBe(true) + expect(data.data.status).toBe('paused') + }) + }) + + describe('DELETE', () => { + it('returns 401 when unauthenticated', async () => { + mockCheckSession.mockResolvedValue({ success: false, userId: null }) + + const req = createMockRequest('DELETE') + const response = await DELETE(req, { params: mockParams }) + + expect(response.status).toBe(401) + }) + + it('returns 200 on successful hard-delete', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.where + .mockReturnValueOnce(mockDbChain) + .mockResolvedValueOnce([{ id: 'doc-1', fileUrl: '/api/uploads/test.txt' }]) + .mockReturnValueOnce(mockDbChain) + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456' }]) + mockDbChain.returning.mockResolvedValueOnce([{ id: 'conn-456' }]) + + const req = createMockRequest('DELETE') + const response = await DELETE(req, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.success).toBe(true) + }) + }) +}) diff --git a/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/route.ts b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/route.ts new file mode 100644 index 0000000000..cfdca60afa --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/route.ts @@ -0,0 +1,331 @@ +import { db } from '@sim/db' +import { + document, + embedding, + knowledgeBase, + knowledgeConnector, + knowledgeConnectorSyncLog, +} from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, desc, eq, inArray, isNull, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { decryptApiKey } from '@/lib/api-key/crypto' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { deleteDocumentStorageFiles } from '@/lib/knowledge/documents/service' +import { cleanupUnusedTagDefinitions } from '@/lib/knowledge/tags/service' +import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils' +import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils' +import { CONNECTOR_REGISTRY } from '@/connectors/registry' + +const logger = createLogger('KnowledgeConnectorByIdAPI') + +type RouteParams = { params: Promise<{ id: string; connectorId: string }> } + +const UpdateConnectorSchema = z.object({ + sourceConfig: z.record(z.unknown()).optional(), + syncIntervalMinutes: z.number().int().min(0).optional(), + status: z.enum(['active', 'paused']).optional(), +}) + +/** + * GET /api/knowledge/[id]/connectors/[connectorId] - Get connector details with recent sync logs + */ +export async function GET(request: NextRequest, { params }: RouteParams) { + const requestId = generateRequestId() + const { id: knowledgeBaseId, connectorId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId) + if (!accessCheck.hasAccess) { + const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const connectorRows = await db + .select() + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + if (connectorRows.length === 0) { + return NextResponse.json({ error: 'Connector not found' }, { status: 404 }) + } + + const syncLogs = await db + .select() + .from(knowledgeConnectorSyncLog) + .where(eq(knowledgeConnectorSyncLog.connectorId, connectorId)) + .orderBy(desc(knowledgeConnectorSyncLog.startedAt)) + .limit(10) + + const { encryptedApiKey: _, ...connectorData } = connectorRows[0] + return NextResponse.json({ + success: true, + data: { + ...connectorData, + syncLogs, + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error fetching connector`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +/** + * PATCH /api/knowledge/[id]/connectors/[connectorId] - Update a connector + */ +export async function PATCH(request: NextRequest, { params }: RouteParams) { + const requestId = generateRequestId() + const { id: knowledgeBaseId, connectorId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) + if (!writeCheck.hasAccess) { + const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const body = await request.json() + const parsed = UpdateConnectorSchema.safeParse(body) + if (!parsed.success) { + return NextResponse.json( + { error: 'Invalid request', details: parsed.error.flatten() }, + { status: 400 } + ) + } + + if (parsed.data.sourceConfig !== undefined) { + const existingRows = await db + .select() + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + if (existingRows.length === 0) { + return NextResponse.json({ error: 'Connector not found' }, { status: 404 }) + } + + const existing = existingRows[0] + const connectorConfig = CONNECTOR_REGISTRY[existing.connectorType] + + if (!connectorConfig) { + return NextResponse.json( + { error: `Unknown connector type: ${existing.connectorType}` }, + { status: 400 } + ) + } + + const kbRows = await db + .select({ userId: knowledgeBase.userId }) + .from(knowledgeBase) + .where(eq(knowledgeBase.id, knowledgeBaseId)) + .limit(1) + + if (kbRows.length === 0) { + return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 }) + } + + let accessToken: string | null = null + if (connectorConfig.auth.mode === 'apiKey') { + if (!existing.encryptedApiKey) { + return NextResponse.json( + { error: 'API key not found. Please reconfigure the connector.' }, + { status: 400 } + ) + } + accessToken = (await decryptApiKey(existing.encryptedApiKey)).decrypted + } else { + if (!existing.credentialId) { + return NextResponse.json( + { error: 'OAuth credential not found. Please reconfigure the connector.' }, + { status: 400 } + ) + } + accessToken = await refreshAccessTokenIfNeeded( + existing.credentialId, + kbRows[0].userId, + `patch-${connectorId}` + ) + } + + if (!accessToken) { + return NextResponse.json( + { error: 'Failed to refresh access token. Please reconnect your account.' }, + { status: 401 } + ) + } + + const validation = await connectorConfig.validateConfig(accessToken, parsed.data.sourceConfig) + if (!validation.valid) { + return NextResponse.json( + { error: validation.error || 'Invalid source configuration' }, + { status: 400 } + ) + } + } + + const updates: Record = { updatedAt: new Date() } + if (parsed.data.sourceConfig !== undefined) { + updates.sourceConfig = parsed.data.sourceConfig + } + if (parsed.data.syncIntervalMinutes !== undefined) { + updates.syncIntervalMinutes = parsed.data.syncIntervalMinutes + if (parsed.data.syncIntervalMinutes > 0) { + updates.nextSyncAt = new Date(Date.now() + parsed.data.syncIntervalMinutes * 60 * 1000) + } else { + updates.nextSyncAt = null + } + } + if (parsed.data.status !== undefined) { + updates.status = parsed.data.status + } + + await db + .update(knowledgeConnector) + .set(updates) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + + const updated = await db + .select() + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + const { encryptedApiKey: __, ...updatedData } = updated[0] + return NextResponse.json({ success: true, data: updatedData }) + } catch (error) { + logger.error(`[${requestId}] Error updating connector`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +/** + * DELETE /api/knowledge/[id]/connectors/[connectorId] - Hard-delete a connector + */ +export async function DELETE(request: NextRequest, { params }: RouteParams) { + const requestId = generateRequestId() + const { id: knowledgeBaseId, connectorId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) + if (!writeCheck.hasAccess) { + const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const existingConnector = await db + .select({ id: knowledgeConnector.id }) + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + if (existingConnector.length === 0) { + return NextResponse.json({ error: 'Connector not found' }, { status: 404 }) + } + + const connectorDocuments = await db.transaction(async (tx) => { + await tx.execute(sql`SELECT 1 FROM knowledge_connector WHERE id = ${connectorId} FOR UPDATE`) + + const docs = await tx + .select({ id: document.id, fileUrl: document.fileUrl }) + .from(document) + .where( + and( + eq(document.connectorId, connectorId), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) + + const documentIds = docs.map((doc) => doc.id) + if (documentIds.length > 0) { + await tx.delete(embedding).where(inArray(embedding.documentId, documentIds)) + await tx.delete(document).where(inArray(document.id, documentIds)) + } + + const deletedConnectors = await tx + .delete(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .returning({ id: knowledgeConnector.id }) + + if (deletedConnectors.length === 0) { + throw new Error('Connector not found') + } + + return docs + }) + + await deleteDocumentStorageFiles(connectorDocuments, requestId) + + await cleanupUnusedTagDefinitions(knowledgeBaseId, requestId).catch((error) => { + logger.warn(`[${requestId}] Failed to cleanup tag definitions`, error) + }) + + logger.info(`[${requestId}] Hard-deleted connector ${connectorId} and its documents`) + + return NextResponse.json({ success: true }) + } catch (error) { + logger.error(`[${requestId}] Error deleting connector`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/sync/route.test.ts b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/sync/route.test.ts new file mode 100644 index 0000000000..12a873a6f4 --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/sync/route.test.ts @@ -0,0 +1,107 @@ +/** + * @vitest-environment node + */ +import { createMockRequest } from '@sim/testing' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { mockCheckSession, mockCheckWriteAccess, mockDispatchSync, mockDbChain } = vi.hoisted(() => { + const chain = { + select: vi.fn().mockReturnThis(), + from: vi.fn().mockReturnThis(), + where: vi.fn().mockReturnThis(), + orderBy: vi.fn().mockResolvedValue([]), + limit: vi.fn().mockResolvedValue([]), + update: vi.fn().mockReturnThis(), + set: vi.fn().mockReturnThis(), + } + return { + mockCheckSession: vi.fn(), + mockCheckWriteAccess: vi.fn(), + mockDispatchSync: vi.fn().mockResolvedValue(undefined), + mockDbChain: chain, + } +}) + +vi.mock('@sim/db', () => ({ db: mockDbChain })) +vi.mock('@sim/db/schema', () => ({ + knowledgeConnector: { + id: 'id', + knowledgeBaseId: 'knowledgeBaseId', + deletedAt: 'deletedAt', + status: 'status', + }, +})) +vi.mock('@/app/api/knowledge/utils', () => ({ + checkKnowledgeBaseWriteAccess: mockCheckWriteAccess, +})) +vi.mock('@/lib/auth/hybrid', () => ({ + checkSessionOrInternalAuth: mockCheckSession, +})) +vi.mock('@/lib/core/utils/request', () => ({ + generateRequestId: vi.fn().mockReturnValue('test-req-id'), +})) +vi.mock('@/lib/knowledge/connectors/sync-engine', () => ({ + dispatchSync: mockDispatchSync, +})) + +import { POST } from '@/app/api/knowledge/[id]/connectors/[connectorId]/sync/route' + +describe('Connector Manual Sync API Route', () => { + const mockParams = Promise.resolve({ id: 'kb-123', connectorId: 'conn-456' }) + + beforeEach(() => { + vi.clearAllMocks() + mockDbChain.select.mockReturnThis() + mockDbChain.from.mockReturnThis() + mockDbChain.where.mockReturnThis() + mockDbChain.orderBy.mockResolvedValue([]) + mockDbChain.limit.mockResolvedValue([]) + mockDbChain.update.mockReturnThis() + mockDbChain.set.mockReturnThis() + }) + + it('returns 401 when unauthenticated', async () => { + mockCheckSession.mockResolvedValue({ success: false, userId: null }) + + const req = createMockRequest('POST') + const response = await POST(req as never, { params: mockParams }) + + expect(response.status).toBe(401) + }) + + it('returns 404 when connector not found', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([]) + + const req = createMockRequest('POST') + const response = await POST(req as never, { params: mockParams }) + + expect(response.status).toBe(404) + }) + + it('returns 409 when connector is syncing', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456', status: 'syncing' }]) + + const req = createMockRequest('POST') + const response = await POST(req as never, { params: mockParams }) + + expect(response.status).toBe(409) + }) + + it('dispatches sync on valid request', async () => { + mockCheckSession.mockResolvedValue({ success: true, userId: 'user-1' }) + mockCheckWriteAccess.mockResolvedValue({ hasAccess: true }) + mockDbChain.limit.mockResolvedValueOnce([{ id: 'conn-456', status: 'active' }]) + + const req = createMockRequest('POST') + const response = await POST(req as never, { params: mockParams }) + const data = await response.json() + + expect(response.status).toBe(200) + expect(data.success).toBe(true) + expect(mockDispatchSync).toHaveBeenCalledWith('conn-456', { requestId: 'test-req-id' }) + }) +}) diff --git a/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/sync/route.ts b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/sync/route.ts new file mode 100644 index 0000000000..e6aae66eb2 --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/[connectorId]/sync/route.ts @@ -0,0 +1,72 @@ +import { db } from '@sim/db' +import { knowledgeConnector } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, isNull } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine' +import { checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils' + +const logger = createLogger('ConnectorManualSyncAPI') + +type RouteParams = { params: Promise<{ id: string; connectorId: string }> } + +/** + * POST /api/knowledge/[id]/connectors/[connectorId]/sync - Trigger a manual sync + */ +export async function POST(request: NextRequest, { params }: RouteParams) { + const requestId = generateRequestId() + const { id: knowledgeBaseId, connectorId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) + if (!writeCheck.hasAccess) { + const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const connectorRows = await db + .select() + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.id, connectorId), + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .limit(1) + + if (connectorRows.length === 0) { + return NextResponse.json({ error: 'Connector not found' }, { status: 404 }) + } + + if (connectorRows[0].status === 'syncing') { + return NextResponse.json({ error: 'Sync already in progress' }, { status: 409 }) + } + + logger.info(`[${requestId}] Manual sync triggered for connector ${connectorId}`) + + dispatchSync(connectorId, { requestId }).catch((error) => { + logger.error( + `[${requestId}] Failed to dispatch manual sync for connector ${connectorId}`, + error + ) + }) + + return NextResponse.json({ + success: true, + message: 'Sync triggered', + }) + } catch (error) { + logger.error(`[${requestId}] Error triggering manual sync`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/knowledge/[id]/connectors/route.ts b/apps/sim/app/api/knowledge/[id]/connectors/route.ts new file mode 100644 index 0000000000..c28cea60e4 --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/connectors/route.ts @@ -0,0 +1,251 @@ +import { db } from '@sim/db' +import { knowledgeBase, knowledgeBaseTagDefinitions, knowledgeConnector } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, desc, eq, isNull, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { encryptApiKey } from '@/lib/api-key/crypto' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine' +import { allocateTagSlots } from '@/lib/knowledge/constants' +import { createTagDefinition } from '@/lib/knowledge/tags/service' +import { getCredential } from '@/app/api/auth/oauth/utils' +import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils' +import { CONNECTOR_REGISTRY } from '@/connectors/registry' + +const logger = createLogger('KnowledgeConnectorsAPI') + +const CreateConnectorSchema = z.object({ + connectorType: z.string().min(1), + credentialId: z.string().min(1).optional(), + apiKey: z.string().min(1).optional(), + sourceConfig: z.record(z.unknown()), + syncIntervalMinutes: z.number().int().min(0).default(1440), +}) + +/** + * GET /api/knowledge/[id]/connectors - List connectors for a knowledge base + */ +export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const requestId = generateRequestId() + const { id: knowledgeBaseId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId) + if (!accessCheck.hasAccess) { + const status = 'notFound' in accessCheck && accessCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const connectors = await db + .select() + .from(knowledgeConnector) + .where( + and( + eq(knowledgeConnector.knowledgeBaseId, knowledgeBaseId), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt) + ) + ) + .orderBy(desc(knowledgeConnector.createdAt)) + + return NextResponse.json({ + success: true, + data: connectors.map(({ encryptedApiKey: _, ...rest }) => rest), + }) + } catch (error) { + logger.error(`[${requestId}] Error listing connectors`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +/** + * POST /api/knowledge/[id]/connectors - Create a new connector + */ +export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const requestId = generateRequestId() + const { id: knowledgeBaseId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const writeCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) + if (!writeCheck.hasAccess) { + const status = 'notFound' in writeCheck && writeCheck.notFound ? 404 : 401 + return NextResponse.json({ error: status === 404 ? 'Not found' : 'Unauthorized' }, { status }) + } + + const body = await request.json() + const parsed = CreateConnectorSchema.safeParse(body) + if (!parsed.success) { + return NextResponse.json( + { error: 'Invalid request', details: parsed.error.flatten() }, + { status: 400 } + ) + } + + const { connectorType, credentialId, apiKey, sourceConfig, syncIntervalMinutes } = parsed.data + + const connectorConfig = CONNECTOR_REGISTRY[connectorType] + if (!connectorConfig) { + return NextResponse.json( + { error: `Unknown connector type: ${connectorType}` }, + { status: 400 } + ) + } + + let resolvedCredentialId: string | null = null + let resolvedEncryptedApiKey: string | null = null + let accessToken: string + + if (connectorConfig.auth.mode === 'apiKey') { + if (!apiKey) { + return NextResponse.json({ error: 'API key is required' }, { status: 400 }) + } + accessToken = apiKey + } else { + if (!credentialId) { + return NextResponse.json({ error: 'Credential is required' }, { status: 400 }) + } + + const credential = await getCredential(requestId, credentialId, auth.userId) + if (!credential) { + return NextResponse.json({ error: 'Credential not found' }, { status: 400 }) + } + + if (!credential.accessToken) { + return NextResponse.json( + { error: 'Credential has no access token. Please reconnect your account.' }, + { status: 400 } + ) + } + + accessToken = credential.accessToken + resolvedCredentialId = credentialId + } + + const validation = await connectorConfig.validateConfig(accessToken, sourceConfig) + if (!validation.valid) { + return NextResponse.json( + { error: validation.error || 'Invalid source configuration' }, + { status: 400 } + ) + } + + let finalSourceConfig: Record = { ...sourceConfig } + + if (connectorConfig.auth.mode === 'apiKey' && apiKey) { + const { encrypted } = await encryptApiKey(apiKey) + resolvedEncryptedApiKey = encrypted + } + + const tagSlotMapping: Record = {} + + if (connectorConfig.tagDefinitions?.length) { + const disabledIds = new Set((sourceConfig.disabledTagIds as string[] | undefined) ?? []) + const enabledDefs = connectorConfig.tagDefinitions.filter((td) => !disabledIds.has(td.id)) + + const existingDefs = await db + .select({ tagSlot: knowledgeBaseTagDefinitions.tagSlot }) + .from(knowledgeBaseTagDefinitions) + .where(eq(knowledgeBaseTagDefinitions.knowledgeBaseId, knowledgeBaseId)) + + const usedSlots = new Set(existingDefs.map((d) => d.tagSlot)) + const { mapping, skipped: skippedTags } = allocateTagSlots(enabledDefs, usedSlots) + Object.assign(tagSlotMapping, mapping) + + for (const name of skippedTags) { + logger.warn(`[${requestId}] No available slots for "${name}"`) + } + + if (skippedTags.length > 0 && Object.keys(tagSlotMapping).length === 0) { + return NextResponse.json( + { error: `No available tag slots. Could not assign: ${skippedTags.join(', ')}` }, + { status: 422 } + ) + } + + finalSourceConfig = { ...finalSourceConfig, tagSlotMapping } + } + + const now = new Date() + const connectorId = crypto.randomUUID() + const nextSyncAt = + syncIntervalMinutes > 0 ? new Date(now.getTime() + syncIntervalMinutes * 60 * 1000) : null + + await db.transaction(async (tx) => { + await tx.execute(sql`SELECT 1 FROM knowledge_base WHERE id = ${knowledgeBaseId} FOR UPDATE`) + + const activeKb = await tx + .select({ id: knowledgeBase.id }) + .from(knowledgeBase) + .where(and(eq(knowledgeBase.id, knowledgeBaseId), isNull(knowledgeBase.deletedAt))) + .limit(1) + + if (activeKb.length === 0) { + throw new Error('Knowledge base not found') + } + + for (const [semanticId, slot] of Object.entries(tagSlotMapping)) { + const td = connectorConfig.tagDefinitions!.find((d) => d.id === semanticId)! + await createTagDefinition( + { + knowledgeBaseId, + tagSlot: slot, + displayName: td.displayName, + fieldType: td.fieldType, + }, + requestId, + tx + ) + } + + await tx.insert(knowledgeConnector).values({ + id: connectorId, + knowledgeBaseId, + connectorType, + credentialId: resolvedCredentialId, + encryptedApiKey: resolvedEncryptedApiKey, + sourceConfig: finalSourceConfig, + syncIntervalMinutes, + status: 'active', + nextSyncAt, + createdAt: now, + updatedAt: now, + }) + }) + + logger.info(`[${requestId}] Created connector ${connectorId} for KB ${knowledgeBaseId}`) + + dispatchSync(connectorId, { requestId }).catch((error) => { + logger.error( + `[${requestId}] Failed to dispatch initial sync for connector ${connectorId}`, + error + ) + }) + + const created = await db + .select() + .from(knowledgeConnector) + .where(eq(knowledgeConnector.id, connectorId)) + .limit(1) + + const { encryptedApiKey: _, ...createdData } = created[0] + return NextResponse.json({ success: true, data: createdData }, { status: 201 }) + } catch (error) { + if (error instanceof Error && error.message === 'Knowledge base not found') { + return NextResponse.json({ error: 'Not found' }, { status: 404 }) + } + logger.error(`[${requestId}] Error creating connector`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/[chunkId]/route.ts b/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/[chunkId]/route.ts index 08c02d508b..aae5097404 100644 --- a/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/[chunkId]/route.ts +++ b/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/[chunkId]/route.ts @@ -95,6 +95,16 @@ export async function PUT( return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } + if (accessCheck.document?.connectorId) { + logger.warn( + `[${requestId}] User ${session.user.id} attempted to update chunk on connector-synced document: Doc=${documentId}` + ) + return NextResponse.json( + { error: 'Chunks from connector-synced documents are read-only' }, + { status: 403 } + ) + } + const body = await req.json() try { @@ -167,6 +177,16 @@ export async function DELETE( return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } + if (accessCheck.document?.connectorId) { + logger.warn( + `[${requestId}] User ${session.user.id} attempted to delete chunk on connector-synced document: Doc=${documentId}` + ) + return NextResponse.json( + { error: 'Chunks from connector-synced documents are read-only' }, + { status: 403 } + ) + } + await deleteChunk(chunkId, documentId, requestId) logger.info( diff --git a/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/route.ts b/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/route.ts index c7979d41b0..762f9be66c 100644 --- a/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/route.ts +++ b/apps/sim/app/api/knowledge/[id]/documents/[documentId]/chunks/route.ts @@ -158,6 +158,16 @@ export async function POST( return NextResponse.json({ error: 'Document not found' }, { status: 404 }) } + if (doc.connectorId) { + logger.warn( + `[${requestId}] User ${userId} attempted to create chunk on connector-synced document: Doc=${documentId}` + ) + return NextResponse.json( + { error: 'Chunks from connector-synced documents are read-only' }, + { status: 403 } + ) + } + // Allow manual chunk creation even if document is not fully processed // but it should exist and not be in failed state if (doc.processingStatus === 'failed') { @@ -283,6 +293,16 @@ export async function PATCH( return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } + if (accessCheck.document?.connectorId) { + logger.warn( + `[${requestId}] User ${userId} attempted batch chunk operation on connector-synced document: Doc=${documentId}` + ) + return NextResponse.json( + { error: 'Chunks from connector-synced documents are read-only' }, + { status: 403 } + ) + } + const body = await req.json() try { diff --git a/apps/sim/app/api/knowledge/[id]/documents/route.ts b/apps/sim/app/api/knowledge/[id]/documents/route.ts index d8ac7324cf..18f7af35ac 100644 --- a/apps/sim/app/api/knowledge/[id]/documents/route.ts +++ b/apps/sim/app/api/knowledge/[id]/documents/route.ts @@ -13,6 +13,7 @@ import { getDocuments, getProcessingConfig, processDocumentsWithQueue, + type TagFilterCondition, } from '@/lib/knowledge/documents/service' import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' @@ -131,6 +132,21 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id: ? (sortOrderParam as SortOrder) : undefined + let tagFilters: TagFilterCondition[] | undefined + const tagFiltersParam = url.searchParams.get('tagFilters') + if (tagFiltersParam) { + try { + const parsed = JSON.parse(tagFiltersParam) + if (Array.isArray(parsed)) { + tagFilters = parsed.filter( + (f: TagFilterCondition) => f.tagSlot && f.operator && f.value !== undefined + ) + } + } catch { + logger.warn(`[${requestId}] Invalid tagFilters param`) + } + } + const result = await getDocuments( knowledgeBaseId, { @@ -140,6 +156,7 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id: offset, ...(sortBy && { sortBy }), ...(sortOrder && { sortOrder }), + tagFilters, }, requestId ) @@ -351,8 +368,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const errorMessage = error instanceof Error ? error.message : 'Failed to create document' const isStorageLimitError = errorMessage.includes('Storage limit exceeded') || errorMessage.includes('storage limit') + const isMissingKnowledgeBase = errorMessage === 'Knowledge base not found' - return NextResponse.json({ error: errorMessage }, { status: isStorageLimitError ? 413 : 500 }) + return NextResponse.json( + { error: errorMessage }, + { status: isMissingKnowledgeBase ? 404 : isStorageLimitError ? 413 : 500 } + ) } } diff --git a/apps/sim/app/api/knowledge/[id]/next-available-slot/route.ts b/apps/sim/app/api/knowledge/[id]/next-available-slot/route.ts index b328b7d5b6..54318d6f60 100644 --- a/apps/sim/app/api/knowledge/[id]/next-available-slot/route.ts +++ b/apps/sim/app/api/knowledge/[id]/next-available-slot/route.ts @@ -30,7 +30,10 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id: const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, session.user.id) if (!accessCheck.hasAccess) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + return NextResponse.json( + { error: accessCheck.notFound ? 'Not found' : 'Forbidden' }, + { status: accessCheck.notFound ? 404 : 403 } + ) } // Get existing definitions once and reuse diff --git a/apps/sim/app/api/knowledge/[id]/restore/route.ts b/apps/sim/app/api/knowledge/[id]/restore/route.ts new file mode 100644 index 0000000000..d202168532 --- /dev/null +++ b/apps/sim/app/api/knowledge/[id]/restore/route.ts @@ -0,0 +1,58 @@ +import { db } from '@sim/db' +import { knowledgeBase } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { restoreKnowledgeBase } from '@/lib/knowledge/service' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('RestoreKnowledgeBaseAPI') + +export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const requestId = generateRequestId() + const { id } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [kb] = await db + .select({ + id: knowledgeBase.id, + workspaceId: knowledgeBase.workspaceId, + userId: knowledgeBase.userId, + }) + .from(knowledgeBase) + .where(eq(knowledgeBase.id, id)) + .limit(1) + + if (!kb) { + return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 }) + } + + if (kb.workspaceId) { + const permission = await getUserEntityPermissions(auth.userId, 'workspace', kb.workspaceId) + if (permission !== 'admin' && permission !== 'write') { + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + } else if (kb.userId !== auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + await restoreKnowledgeBase(id, requestId) + + logger.info(`[${requestId}] Restored knowledge base ${id}`) + + return NextResponse.json({ success: true }) + } catch (error) { + logger.error(`[${requestId}] Error restoring knowledge base ${id}`, error) + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/knowledge/[id]/tag-definitions/[tagId]/route.ts b/apps/sim/app/api/knowledge/[id]/tag-definitions/[tagId]/route.ts index a141461ec0..08b56be3e2 100644 --- a/apps/sim/app/api/knowledge/[id]/tag-definitions/[tagId]/route.ts +++ b/apps/sim/app/api/knowledge/[id]/tag-definitions/[tagId]/route.ts @@ -1,9 +1,9 @@ import { randomUUID } from 'crypto' import { createLogger } from '@sim/logger' import { type NextRequest, NextResponse } from 'next/server' -import { getSession } from '@/lib/auth' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { deleteTagDefinition } from '@/lib/knowledge/tags/service' -import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils' +import { checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils' export const dynamic = 'force-dynamic' @@ -22,17 +22,20 @@ export async function DELETE( `[${requestId}] Deleting tag definition ${tagId} from knowledge base ${knowledgeBaseId}` ) - const session = await getSession() - if (!session?.user?.id) { + const auth = await checkSessionOrInternalAuth(req, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } - const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, session.user.id) + const accessCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) if (!accessCheck.hasAccess) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + return NextResponse.json( + { error: accessCheck.notFound ? 'Not found' : 'Forbidden' }, + { status: accessCheck.notFound ? 404 : 403 } + ) } - const deletedTag = await deleteTagDefinition(tagId, requestId) + const deletedTag = await deleteTagDefinition(knowledgeBaseId, tagId, requestId) return NextResponse.json({ success: true, diff --git a/apps/sim/app/api/knowledge/[id]/tag-definitions/route.ts b/apps/sim/app/api/knowledge/[id]/tag-definitions/route.ts index ad1c7f4ddd..f4e75b0f13 100644 --- a/apps/sim/app/api/knowledge/[id]/tag-definitions/route.ts +++ b/apps/sim/app/api/knowledge/[id]/tag-definitions/route.ts @@ -5,7 +5,7 @@ import { z } from 'zod' import { AuthType, checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { SUPPORTED_FIELD_TYPES } from '@/lib/knowledge/constants' import { createTagDefinition, getTagDefinitions } from '@/lib/knowledge/tags/service' -import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils' +import { checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils' export const dynamic = 'force-dynamic' @@ -26,9 +26,12 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id: // For session auth, verify KB access. Internal JWT is trusted. if (auth.authType === AuthType.SESSION && auth.userId) { - const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId) + const accessCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) if (!accessCheck.hasAccess) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + return NextResponse.json( + { error: accessCheck.notFound ? 'Not found' : 'Forbidden' }, + { status: accessCheck.notFound ? 404 : 403 } + ) } } @@ -63,9 +66,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: // For session auth, verify KB access. Internal JWT is trusted. if (auth.authType === AuthType.SESSION && auth.userId) { - const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, auth.userId) + const accessCheck = await checkKnowledgeBaseWriteAccess(knowledgeBaseId, auth.userId) if (!accessCheck.hasAccess) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + return NextResponse.json( + { error: accessCheck.notFound ? 'Not found' : 'Forbidden' }, + { status: accessCheck.notFound ? 404 : 403 } + ) } } diff --git a/apps/sim/app/api/knowledge/[id]/tag-usage/route.ts b/apps/sim/app/api/knowledge/[id]/tag-usage/route.ts index 788ae89758..8b311143ff 100644 --- a/apps/sim/app/api/knowledge/[id]/tag-usage/route.ts +++ b/apps/sim/app/api/knowledge/[id]/tag-usage/route.ts @@ -24,7 +24,10 @@ export async function GET(req: NextRequest, { params }: { params: Promise<{ id: const accessCheck = await checkKnowledgeBaseAccess(knowledgeBaseId, session.user.id) if (!accessCheck.hasAccess) { - return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + return NextResponse.json( + { error: accessCheck.notFound ? 'Not found' : 'Forbidden' }, + { status: accessCheck.notFound ? 404 : 403 } + ) } const usageStats = await getTagUsage(knowledgeBaseId, requestId) diff --git a/apps/sim/app/api/knowledge/connectors/sync/route.ts b/apps/sim/app/api/knowledge/connectors/sync/route.ts new file mode 100644 index 0000000000..dfddc72e44 --- /dev/null +++ b/apps/sim/app/api/knowledge/connectors/sync/route.ts @@ -0,0 +1,71 @@ +import { db } from '@sim/db' +import { knowledgeBase, knowledgeConnector } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, inArray, isNull, lte } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { verifyCronAuth } from '@/lib/auth/internal' +import { generateRequestId } from '@/lib/core/utils/request' +import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('ConnectorSyncSchedulerAPI') + +/** + * Cron endpoint that checks for connectors due for sync and dispatches sync jobs. + * Should be called every 5 minutes by an external cron service. + */ +export async function GET(request: NextRequest) { + const requestId = generateRequestId() + logger.info(`[${requestId}] Connector sync scheduler triggered`) + + const authError = verifyCronAuth(request, 'Connector sync scheduler') + if (authError) { + return authError + } + + try { + const now = new Date() + + const dueConnectors = await db + .select({ + id: knowledgeConnector.id, + }) + .from(knowledgeConnector) + .innerJoin(knowledgeBase, eq(knowledgeConnector.knowledgeBaseId, knowledgeBase.id)) + .where( + and( + inArray(knowledgeConnector.status, ['active', 'error']), + lte(knowledgeConnector.nextSyncAt, now), + isNull(knowledgeConnector.archivedAt), + isNull(knowledgeConnector.deletedAt), + isNull(knowledgeBase.deletedAt) + ) + ) + + logger.info(`[${requestId}] Found ${dueConnectors.length} connectors due for sync`) + + if (dueConnectors.length === 0) { + return NextResponse.json({ + success: true, + message: 'No connectors due for sync', + count: 0, + }) + } + + for (const connector of dueConnectors) { + dispatchSync(connector.id, { requestId }).catch((error) => { + logger.error(`[${requestId}] Failed to dispatch sync for connector ${connector.id}`, error) + }) + } + + return NextResponse.json({ + success: true, + message: `Dispatched ${dueConnectors.length} connector sync(s)`, + count: dueConnectors.length, + }) + } catch (error) { + logger.error(`[${requestId}] Connector sync scheduler error`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/knowledge/route.ts b/apps/sim/app/api/knowledge/route.ts index f266d90d8d..d6a80bab11 100644 --- a/apps/sim/app/api/knowledge/route.ts +++ b/apps/sim/app/api/knowledge/route.ts @@ -5,7 +5,11 @@ import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { PlatformEvents } from '@/lib/core/telemetry' import { generateRequestId } from '@/lib/core/utils/request' -import { createKnowledgeBase, getKnowledgeBases } from '@/lib/knowledge/service' +import { + createKnowledgeBase, + getKnowledgeBases, + type KnowledgeBaseScope, +} from '@/lib/knowledge/service' const logger = createLogger('KnowledgeBaseAPI') @@ -61,8 +65,12 @@ export async function GET(req: NextRequest) { const { searchParams } = new URL(req.url) const workspaceId = searchParams.get('workspaceId') + const scope = (searchParams.get('scope') ?? 'active') as KnowledgeBaseScope + if (!['active', 'archived', 'all'].includes(scope)) { + return NextResponse.json({ error: 'Invalid scope' }, { status: 400 }) + } - const knowledgeBasesWithCounts = await getKnowledgeBases(session.user.id, workspaceId) + const knowledgeBasesWithCounts = await getKnowledgeBases(session.user.id, workspaceId, scope) return NextResponse.json({ success: true, diff --git a/apps/sim/app/api/knowledge/search/route.test.ts b/apps/sim/app/api/knowledge/search/route.test.ts index d736edc44e..30027bca10 100644 --- a/apps/sim/app/api/knowledge/search/route.test.ts +++ b/apps/sim/app/api/knowledge/search/route.test.ts @@ -5,12 +5,7 @@ * * @vitest-environment node */ -import { - createEnvMock, - createMockRequest, - mockKnowledgeSchemas, - requestUtilsMock, -} from '@sim/testing' +import { createEnvMock, createMockRequest, requestUtilsMock } from '@sim/testing' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' const { @@ -61,7 +56,74 @@ vi.mock('drizzle-orm', () => ({ })), })) -mockKnowledgeSchemas() +vi.mock('@sim/db/schema', () => ({ + knowledgeBase: { + id: 'kb_id', + userId: 'user_id', + name: 'kb_name', + description: 'description', + tokenCount: 'token_count', + embeddingModel: 'embedding_model', + embeddingDimension: 'embedding_dimension', + chunkingConfig: 'chunking_config', + workspaceId: 'workspace_id', + createdAt: 'created_at', + updatedAt: 'updated_at', + deletedAt: 'deleted_at', + }, + document: { + id: 'doc_id', + knowledgeBaseId: 'kb_id', + filename: 'filename', + fileUrl: 'file_url', + fileSize: 'file_size', + mimeType: 'mime_type', + chunkCount: 'chunk_count', + tokenCount: 'token_count', + characterCount: 'character_count', + processingStatus: 'processing_status', + processingStartedAt: 'processing_started_at', + processingCompletedAt: 'processing_completed_at', + processingError: 'processing_error', + enabled: 'enabled', + tag1: 'tag1', + tag2: 'tag2', + tag3: 'tag3', + tag4: 'tag4', + tag5: 'tag5', + tag6: 'tag6', + tag7: 'tag7', + uploadedAt: 'uploaded_at', + deletedAt: 'deleted_at', + }, + embedding: { + id: 'embedding_id', + documentId: 'doc_id', + knowledgeBaseId: 'kb_id', + chunkIndex: 'chunk_index', + content: 'content', + embedding: 'embedding', + tokenCount: 'token_count', + characterCount: 'character_count', + tag1: 'tag1', + tag2: 'tag2', + tag3: 'tag3', + tag4: 'tag4', + tag5: 'tag5', + tag6: 'tag6', + tag7: 'tag7', + createdAt: 'created_at', + }, + permissions: { + id: 'permission_id', + userId: 'user_id', + entityType: 'entity_type', + entityId: 'entity_id', + permissionType: 'permission_type', + createdAt: 'created_at', + updatedAt: 'updated_at', + }, +})) vi.mock('@sim/db', () => ({ db: mockDbChain, diff --git a/apps/sim/app/api/knowledge/search/route.ts b/apps/sim/app/api/knowledge/search/route.ts index 686f7c19cc..348a60ec71 100644 --- a/apps/sim/app/api/knowledge/search/route.ts +++ b/apps/sim/app/api/knowledge/search/route.ts @@ -28,7 +28,7 @@ const logger = createLogger('VectorSearchAPI') const StructuredTagFilterSchema = z.object({ tagName: z.string(), tagSlot: z.string().optional(), - fieldType: z.enum(['text', 'number', 'date', 'boolean']).default('text'), + fieldType: z.enum(['text', 'number', 'date', 'boolean']).optional(), operator: z.string().default('eq'), value: z.union([z.string(), z.number(), z.boolean()]), valueTo: z.union([z.string(), z.number()]).optional(), @@ -117,17 +117,56 @@ export async function POST(request: NextRequest) { // Handle tag filters if (validatedData.tagFilters && accessibleKbIds.length > 0) { - const kbId = accessibleKbIds[0] - const tagDefs = await getDocumentTagDefinitions(kbId) + const kbTagDefs = await Promise.all( + accessibleKbIds.map(async (kbId) => ({ + kbId, + tagDefs: await getDocumentTagDefinitions(kbId), + })) + ) - // Create mapping from display name to tag slot and fieldType const displayNameToTagDef: Record = {} - tagDefs.forEach((def) => { - displayNameToTagDef[def.displayName] = { - tagSlot: def.tagSlot, - fieldType: def.fieldType, + for (const { kbId, tagDefs } of kbTagDefs) { + const perKbMap = new Map( + tagDefs.map((def) => [ + def.displayName, + { tagSlot: def.tagSlot, fieldType: def.fieldType }, + ]) + ) + + for (const filter of validatedData.tagFilters) { + const current = perKbMap.get(filter.tagName) + if (!current) { + if (accessibleKbIds.length > 1) { + return NextResponse.json( + { + error: `Tag "${filter.tagName}" does not exist in all selected knowledge bases. Search those knowledge bases separately.`, + }, + { status: 400 } + ) + } + continue + } + + const existing = displayNameToTagDef[filter.tagName] + if ( + existing && + (existing.tagSlot !== current.tagSlot || existing.fieldType !== current.fieldType) + ) { + return NextResponse.json( + { + error: `Tag "${filter.tagName}" is not mapped consistently across the selected knowledge bases. Search those knowledge bases separately.`, + }, + { status: 400 } + ) + } + + displayNameToTagDef[filter.tagName] = current } - }) + + logger.debug(`[${requestId}] Loaded tag definitions for KB ${kbId}`, { + tagCount: tagDefs.length, + }) + } // Validate all tag filters first const undefinedTags: string[] = [] @@ -171,8 +210,8 @@ export async function POST(request: NextRequest) { // Build structured filters with validated data structuredFilters = validatedData.tagFilters.map((filter) => { const tagDef = displayNameToTagDef[filter.tagName]! - const tagSlot = filter.tagSlot || tagDef.tagSlot - const fieldType = filter.fieldType || tagDef.fieldType + const tagSlot = tagDef.tagSlot + const fieldType = tagDef.fieldType logger.debug( `[${requestId}] Structured filter: ${filter.tagName} -> ${tagSlot} (${fieldType}) ${filter.operator} ${filter.value}` @@ -212,6 +251,28 @@ export async function POST(request: NextRequest) { ) } + if (workflowId) { + const authorization = await authorizeWorkflowByWorkspacePermission({ + workflowId, + userId, + action: 'read', + }) + const workflowWorkspaceId = authorization.workflow?.workspaceId ?? null + if ( + workflowWorkspaceId && + accessChecks.some( + (accessCheck) => + accessCheck?.hasAccess && + accessCheck.knowledgeBase?.workspaceId !== workflowWorkspaceId + ) + ) { + return NextResponse.json( + { error: 'Knowledge base does not belong to the workflow workspace' }, + { status: 400 } + ) + } + } + let results: SearchResult[] const hasFilters = structuredFilters && structuredFilters.length > 0 diff --git a/apps/sim/app/api/knowledge/search/utils.ts b/apps/sim/app/api/knowledge/search/utils.ts index dc112fe24a..8ca7e7c438 100644 --- a/apps/sim/app/api/knowledge/search/utils.ts +++ b/apps/sim/app/api/knowledge/search/utils.ts @@ -17,7 +17,14 @@ export async function getDocumentNamesByIds( filename: document.filename, }) .from(document) - .where(and(inArray(document.id, uniqueIds), isNull(document.deletedAt))) + .where( + and( + inArray(document.id, uniqueIds), + eq(document.userExcluded, false), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) const documentNameMap: Record = {} documents.forEach((doc) => { @@ -313,6 +320,10 @@ async function executeTagFilterQuery( and( eq(embedding.knowledgeBaseId, knowledgeBaseIds[0]), eq(embedding.enabled, true), + eq(document.enabled, true), + eq(document.processingStatus, 'completed'), + eq(document.userExcluded, false), + isNull(document.archivedAt), isNull(document.deletedAt), ...tagFilterConditions ) @@ -326,6 +337,10 @@ async function executeTagFilterQuery( and( inArray(embedding.knowledgeBaseId, knowledgeBaseIds), eq(embedding.enabled, true), + eq(document.enabled, true), + eq(document.processingStatus, 'completed'), + eq(document.userExcluded, false), + isNull(document.archivedAt), isNull(document.deletedAt), ...tagFilterConditions ) @@ -353,6 +368,10 @@ async function executeVectorSearchOnIds( .where( and( inArray(embedding.id, embeddingIds), + eq(document.enabled, true), + eq(document.processingStatus, 'completed'), + eq(document.userExcluded, false), + isNull(document.archivedAt), isNull(document.deletedAt), sql`${embedding.embedding} <=> ${queryVector}::vector < ${distanceThreshold}` ) @@ -384,6 +403,10 @@ export async function handleTagOnlySearch(params: SearchParams): Promise ${queryVector}::vector < ${distanceThreshold}` ) @@ -455,6 +486,10 @@ export async function handleVectorOnlySearch(params: SearchParams): Promise ${queryVector}::vector < ${distanceThreshold}` ) diff --git a/apps/sim/app/api/knowledge/utils.test.ts b/apps/sim/app/api/knowledge/utils.test.ts index 0e8debe701..7e87035a08 100644 --- a/apps/sim/app/api/knowledge/utils.test.ts +++ b/apps/sim/app/api/knowledge/utils.test.ts @@ -144,6 +144,18 @@ vi.mock('@sim/db', () => { }), transaction: vi.fn(async (fn: any) => { await fn({ + select: () => ({ + from: () => ({ + innerJoin: () => ({ + where: () => ({ + limit: () => Promise.resolve([{ id: 'doc1' }]), + }), + }), + where: () => ({ + limit: () => Promise.resolve([{}]), + }), + }), + }), delete: () => ({ where: () => Promise.resolve(), }), diff --git a/apps/sim/app/api/knowledge/utils.ts b/apps/sim/app/api/knowledge/utils.ts index 7a2f82d071..60042ccccf 100644 --- a/apps/sim/app/api/knowledge/utils.ts +++ b/apps/sim/app/api/knowledge/utils.ts @@ -56,6 +56,10 @@ export interface DocumentData { boolean1?: boolean | null boolean2?: boolean | null boolean3?: boolean | null + // Connector fields + connectorId?: string | null + sourceUrl?: string | null + externalId?: string | null } export interface EmbeddingData { @@ -283,9 +287,21 @@ export async function checkDocumentWriteAccess( boolean1: document.boolean1, boolean2: document.boolean2, boolean3: document.boolean3, + // Connector fields + connectorId: document.connectorId, + sourceUrl: document.sourceUrl, + externalId: document.externalId, }) .from(document) - .where(and(eq(document.id, documentId), isNull(document.deletedAt))) + .where( + and( + eq(document.id, documentId), + eq(document.knowledgeBaseId, knowledgeBaseId), + eq(document.userExcluded, false), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) .limit(1) if (doc.length === 0) { @@ -325,6 +341,8 @@ export async function checkDocumentAccess( and( eq(document.id, documentId), eq(document.knowledgeBaseId, knowledgeBaseId), + eq(document.userExcluded, false), + isNull(document.archivedAt), isNull(document.deletedAt) ) ) @@ -368,6 +386,8 @@ export async function checkChunkAccess( and( eq(document.id, documentId), eq(document.knowledgeBaseId, knowledgeBaseId), + eq(document.userExcluded, false), + isNull(document.archivedAt), isNull(document.deletedAt) ) ) diff --git a/apps/sim/app/api/logs/[id]/route.ts b/apps/sim/app/api/logs/[id]/route.ts index 179655ae53..494c250415 100644 --- a/apps/sim/app/api/logs/[id]/route.ts +++ b/apps/sim/app/api/logs/[id]/route.ts @@ -1,5 +1,6 @@ import { db } from '@sim/db' import { + jobExecutionLogs, permissions, workflow, workflowDeploymentVersion, @@ -74,8 +75,64 @@ export async function GET(_request: NextRequest, { params }: { params: Promise<{ .limit(1) const log = rows[0] + + // Fallback: check job_execution_logs if (!log) { - return NextResponse.json({ error: 'Not found' }, { status: 404 }) + const jobRows = await db + .select({ + id: jobExecutionLogs.id, + executionId: jobExecutionLogs.executionId, + level: jobExecutionLogs.level, + status: jobExecutionLogs.status, + trigger: jobExecutionLogs.trigger, + startedAt: jobExecutionLogs.startedAt, + endedAt: jobExecutionLogs.endedAt, + totalDurationMs: jobExecutionLogs.totalDurationMs, + executionData: jobExecutionLogs.executionData, + cost: jobExecutionLogs.cost, + createdAt: jobExecutionLogs.createdAt, + }) + .from(jobExecutionLogs) + .innerJoin( + permissions, + and( + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, jobExecutionLogs.workspaceId), + eq(permissions.userId, userId) + ) + ) + .where(eq(jobExecutionLogs.id, id)) + .limit(1) + + const jobLog = jobRows[0] + if (!jobLog) { + return NextResponse.json({ error: 'Not found' }, { status: 404 }) + } + + const execData = jobLog.executionData as Record | null + const response = { + id: jobLog.id, + workflowId: null, + executionId: jobLog.executionId, + deploymentVersionId: null, + deploymentVersion: null, + deploymentVersionName: null, + level: jobLog.level, + status: jobLog.status, + duration: jobLog.totalDurationMs ? `${jobLog.totalDurationMs}ms` : null, + trigger: jobLog.trigger, + createdAt: jobLog.startedAt.toISOString(), + workflow: null, + jobTitle: (execData?.trigger?.source as string) || null, + executionData: { + totalDuration: jobLog.totalDurationMs, + ...execData, + enhanced: true, + }, + cost: jobLog.cost as any, + } + + return NextResponse.json({ data: response }) } const workflowSummary = log.workflowId diff --git a/apps/sim/app/api/logs/execution/[executionId]/route.ts b/apps/sim/app/api/logs/execution/[executionId]/route.ts index 90e0747b00..4e6495b4df 100644 --- a/apps/sim/app/api/logs/execution/[executionId]/route.ts +++ b/apps/sim/app/api/logs/execution/[executionId]/route.ts @@ -1,5 +1,6 @@ import { db } from '@sim/db' import { + jobExecutionLogs, permissions, workflow, workflowExecutionLogs, @@ -60,9 +61,49 @@ export async function GET( .where(eq(workflowExecutionLogs.executionId, executionId)) .limit(1) + // Fallback: check job_execution_logs if (!workflowLog) { - logger.warn(`[${requestId}] Execution not found or access denied: ${executionId}`) - return NextResponse.json({ error: 'Workflow execution not found' }, { status: 404 }) + const [jobLog] = await db + .select({ + id: jobExecutionLogs.id, + executionId: jobExecutionLogs.executionId, + trigger: jobExecutionLogs.trigger, + startedAt: jobExecutionLogs.startedAt, + endedAt: jobExecutionLogs.endedAt, + totalDurationMs: jobExecutionLogs.totalDurationMs, + cost: jobExecutionLogs.cost, + executionData: jobExecutionLogs.executionData, + }) + .from(jobExecutionLogs) + .innerJoin( + permissions, + and( + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, jobExecutionLogs.workspaceId), + eq(permissions.userId, authenticatedUserId) + ) + ) + .where(eq(jobExecutionLogs.executionId, executionId)) + .limit(1) + + if (!jobLog) { + logger.warn(`[${requestId}] Execution not found or access denied: ${executionId}`) + return NextResponse.json({ error: 'Workflow execution not found' }, { status: 404 }) + } + + return NextResponse.json({ + executionId, + workflowId: null, + workflowState: null, + childWorkflowSnapshots: {}, + executionMetadata: { + trigger: jobLog.trigger, + startedAt: jobLog.startedAt.toISOString(), + endedAt: jobLog.endedAt?.toISOString(), + totalDurationMs: jobLog.totalDurationMs, + cost: jobLog.cost || null, + }, + }) } const [snapshot] = await db diff --git a/apps/sim/app/api/logs/route.ts b/apps/sim/app/api/logs/route.ts index e7080c9877..f6f631415f 100644 --- a/apps/sim/app/api/logs/route.ts +++ b/apps/sim/app/api/logs/route.ts @@ -1,5 +1,6 @@ import { db } from '@sim/db' import { + jobExecutionLogs, pausedExecutions, permissions, workflow, @@ -7,7 +8,22 @@ import { workflowExecutionLogs, } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, desc, eq, isNotNull, isNull, or, type SQL, sql } from 'drizzle-orm' +import { + and, + desc, + eq, + gt, + gte, + inArray, + isNotNull, + isNull, + lt, + lte, + ne, + or, + type SQL, + sql, +} from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { getSession } from '@/lib/auth' @@ -177,13 +193,29 @@ export async function GET(request: NextRequest) { conditions = and(conditions, commonFilters) } - const logs = await baseQuery + // Workflow-specific filters exclude job logs entirely + const hasWorkflowSpecificFilters = !!( + params.workflowIds || + params.folderIds || + params.workflowName || + params.folderName + ) + // If triggers filter is set and doesn't include 'mothership', skip job logs + const triggersList = params.triggers?.split(',').filter(Boolean) || [] + const triggersExcludeJobs = + triggersList.length > 0 && + !triggersList.includes('all') && + !triggersList.includes('mothership') + const includeJobLogs = !hasWorkflowSpecificFilters && !triggersExcludeJobs + + const fetchSize = params.limit + params.offset + + const workflowLogs = await baseQuery .where(and(workspaceFilter, conditions)) .orderBy(desc(workflowExecutionLogs.startedAt)) - .limit(params.limit) - .offset(params.offset) + .limit(fetchSize) - const countQuery = db + const workflowCountQuery = db .select({ count: sql`count(*)` }) .from(workflowExecutionLogs) .leftJoin( @@ -201,10 +233,141 @@ export async function GET(request: NextRequest) { ) .where(and(eq(workflowExecutionLogs.workspaceId, params.workspaceId), conditions)) - const countResult = await countQuery + // Build job log filters (subset of filters that apply to job logs) + let jobLogs: Array<{ + id: string + executionId: string + level: string + status: string + trigger: string + startedAt: Date + endedAt: Date | null + totalDurationMs: number | null + executionData: unknown + cost: unknown + createdAt: Date + jobTitle: string | null + }> = [] + let jobCount = 0 + + if (includeJobLogs) { + const jobConditions: SQL[] = [eq(jobExecutionLogs.workspaceId, params.workspaceId)] + + // Permission check + jobConditions.push( + sql`EXISTS (SELECT 1 FROM ${permissions} WHERE ${permissions.entityType} = 'workspace' AND ${permissions.entityId} = ${jobExecutionLogs.workspaceId} AND ${permissions.userId} = ${userId})` + ) - const count = countResult[0]?.count || 0 + // Level filter + if (params.level && params.level !== 'all') { + const levels = params.level.split(',').filter(Boolean) + const jobLevelConditions: SQL[] = [] + for (const level of levels) { + if (level === 'error') { + jobLevelConditions.push(eq(jobExecutionLogs.level, 'error')) + } else if (level === 'info') { + const c = and(eq(jobExecutionLogs.level, 'info'), isNotNull(jobExecutionLogs.endedAt)) + if (c) jobLevelConditions.push(c) + } + // 'running' and 'pending' don't apply to job logs (they complete synchronously) + } + if (jobLevelConditions.length > 0) { + jobConditions.push( + jobLevelConditions.length === 1 ? jobLevelConditions[0] : or(...jobLevelConditions)! + ) + } + } + // Trigger filter + if (triggersList.length > 0 && !triggersList.includes('all')) { + jobConditions.push(inArray(jobExecutionLogs.trigger, triggersList)) + } + + // Date filters + if (params.startDate) { + jobConditions.push(gte(jobExecutionLogs.startedAt, new Date(params.startDate))) + } + if (params.endDate) { + jobConditions.push(lte(jobExecutionLogs.startedAt, new Date(params.endDate))) + } + + // Search by executionId + if (params.search) { + jobConditions.push(sql`${jobExecutionLogs.executionId} ILIKE ${`%${params.search}%`}`) + } + if (params.executionId) { + jobConditions.push(eq(jobExecutionLogs.executionId, params.executionId)) + } + + // Cost filter + if (params.costOperator && params.costValue !== undefined) { + const costField = sql`(${jobExecutionLogs.cost}->>'total')::numeric` + const ops = { + '=': sql`=`, + '>': sql`>`, + '<': sql`<`, + '>=': sql`>=`, + '<=': sql`<=`, + '!=': sql`!=`, + } as const + jobConditions.push(sql`${costField} ${ops[params.costOperator]} ${params.costValue}`) + } + + // Duration filter + if (params.durationOperator && params.durationValue !== undefined) { + const durationOps: Record< + string, + (field: typeof jobExecutionLogs.totalDurationMs, val: number) => SQL | undefined + > = { + '=': (f, v) => eq(f, v), + '>': (f, v) => gt(f, v), + '<': (f, v) => lt(f, v), + '>=': (f, v) => gte(f, v), + '<=': (f, v) => lte(f, v), + '!=': (f, v) => ne(f, v), + } + const durationCond = durationOps[params.durationOperator]?.( + jobExecutionLogs.totalDurationMs, + params.durationValue + ) + if (durationCond) jobConditions.push(durationCond) + } + + const jobWhere = and(...jobConditions) + + const [jobLogResults, jobCountResult] = await Promise.all([ + db + .select({ + id: jobExecutionLogs.id, + executionId: jobExecutionLogs.executionId, + level: jobExecutionLogs.level, + status: jobExecutionLogs.status, + trigger: jobExecutionLogs.trigger, + startedAt: jobExecutionLogs.startedAt, + endedAt: jobExecutionLogs.endedAt, + totalDurationMs: jobExecutionLogs.totalDurationMs, + executionData: + params.details === 'full' ? jobExecutionLogs.executionData : sql`NULL`, + cost: jobExecutionLogs.cost, + createdAt: jobExecutionLogs.createdAt, + jobTitle: sql`${jobExecutionLogs.executionData}->'trigger'->>'source'`, + }) + .from(jobExecutionLogs) + .where(jobWhere) + .orderBy(desc(jobExecutionLogs.startedAt)) + .limit(fetchSize), + db.select({ count: sql`count(*)` }).from(jobExecutionLogs).where(jobWhere), + ]) + + jobLogs = jobLogResults as typeof jobLogs + jobCount = Number(jobCountResult[0]?.count || 0) + } + + const workflowCountResult = await workflowCountQuery + const workflowCount = Number(workflowCountResult[0]?.count || 0) + const totalCount = workflowCount + jobCount + + // Transform workflow logs to the unified shape const blockExecutionsByExecution: Record = {} const createTraceSpans = (blockExecutions: any[]) => { @@ -289,7 +452,7 @@ export async function GET(request: NextRequest) { } } - const enhancedLogs = logs.map((log) => { + const transformedWorkflowLogs = workflowLogs.map((log) => { const blockExecutions = blockExecutionsByExecution[log.executionId] || [] let traceSpans = [] @@ -367,13 +530,60 @@ export async function GET(request: NextRequest) { (log.pausedStatus && log.pausedStatus !== 'fully_resumed'), } }) + + // Transform job logs to the same shape + const transformedJobLogs = jobLogs.map((log) => { + const execData = log.executionData as any + const costSummary = (log.cost as any) || { total: 0 } + + return { + id: log.id, + workflowId: null as string | null, + executionId: log.executionId, + deploymentVersionId: null as string | null, + deploymentVersion: null as number | null, + deploymentVersionName: null as string | null, + level: log.level, + status: log.status, + duration: log.totalDurationMs ? `${log.totalDurationMs}ms` : null, + trigger: log.trigger, + createdAt: log.startedAt.toISOString(), + files: undefined as any, + workflow: null as any, + jobTitle: log.jobTitle, + pauseSummary: { + status: null as string | null, + total: 0, + resumed: 0, + }, + executionData: + params.details === 'full' && execData + ? { + totalDuration: log.totalDurationMs, + traceSpans: execData.traceSpans || [], + blockExecutions: [], + finalOutput: execData.finalOutput, + enhanced: true, + trigger: execData.trigger, + } + : undefined, + cost: params.details === 'full' ? costSummary : { total: costSummary?.total || 0 }, + hasPendingPause: false, + } + }) + + // Merge, sort by createdAt (which is startedAt ISO string) desc, paginate + const allLogs = [...transformedWorkflowLogs, ...transformedJobLogs] + .sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime()) + .slice(params.offset, params.offset + params.limit) + return NextResponse.json( { - data: enhancedLogs, - total: Number(count), + data: allLogs, + total: totalCount, page: Math.floor(params.offset / params.limit) + 1, pageSize: params.limit, - totalPages: Math.ceil(Number(count) / params.limit), + totalPages: Math.ceil(totalCount / params.limit), }, { status: 200 } ) diff --git a/apps/sim/app/api/mcp/copilot/route.ts b/apps/sim/app/api/mcp/copilot/route.ts index 8fdb166ae4..f3c9551aa3 100644 --- a/apps/sim/app/api/mcp/copilot/route.ts +++ b/apps/sim/app/api/mcp/copilot/route.ts @@ -18,12 +18,7 @@ import { eq, sql } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { validateOAuthAccessToken } from '@/lib/auth/oauth-token' import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription' -import { - ORCHESTRATION_TIMEOUT_MS, - SIM_AGENT_API_URL, - SIM_AGENT_VERSION, -} from '@/lib/copilot/constants' -import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator' +import { ORCHESTRATION_TIMEOUT_MS, SIM_AGENT_API_URL } from '@/lib/copilot/constants' import { orchestrateSubagentStream } from '@/lib/copilot/orchestrator/subagent' import { executeToolServerSide, @@ -33,10 +28,6 @@ import { DIRECT_TOOL_DEFS, SUBAGENT_TOOL_DEFS } from '@/lib/copilot/tools/mcp/de import { env } from '@/lib/core/config/env' import { RateLimiter } from '@/lib/core/rate-limiter' import { getBaseUrl } from '@/lib/core/utils/urls' -import { - authorizeWorkflowByWorkspacePermission, - resolveWorkflowIdForUser, -} from '@/lib/workflows/utils' const logger = createLogger('CopilotMcpAPI') const mcpRateLimiter = new RateLimiter() @@ -669,112 +660,12 @@ async function handleDirectToolCall( } } -/** - * Build mode uses the main chat orchestrator with the 'fast' command instead of - * the subagent endpoint. In Go, 'build' is not a registered subagent — it's a mode - * (ModeFast) on the main chat processor that bypasses subagent orchestration and - * executes all tools directly. - */ -async function handleBuildToolCall( - args: Record, - userId: string, - abortSignal?: AbortSignal -): Promise { - try { - const requestText = (args.request as string) || JSON.stringify(args) - const workflowId = args.workflowId as string | undefined - - const resolved = workflowId - ? await (async () => { - const authorization = await authorizeWorkflowByWorkspacePermission({ - workflowId, - userId, - action: 'read', - }) - return authorization.allowed ? { workflowId } : null - })() - : await resolveWorkflowIdForUser(userId) - - if (!resolved?.workflowId) { - return { - content: [ - { - type: 'text', - text: JSON.stringify( - { - success: false, - error: 'workflowId is required for build. Call create_workflow first.', - }, - null, - 2 - ), - }, - ], - isError: true, - } - } - - const chatId = randomUUID() - - const requestPayload = { - message: requestText, - workflowId: resolved.workflowId, - userId, - model: DEFAULT_COPILOT_MODEL, - mode: 'agent', - commands: ['fast'], - messageId: randomUUID(), - version: SIM_AGENT_VERSION, - headless: true, - chatId, - source: 'mcp', - } - - const result = await orchestrateCopilotStream(requestPayload, { - userId, - workflowId: resolved.workflowId, - chatId, - autoExecuteTools: true, - timeout: 300000, - interactive: false, - abortSignal, - }) - - const responseData = { - success: result.success, - content: result.content, - toolCalls: result.toolCalls, - error: result.error, - } - - return { - content: [{ type: 'text', text: JSON.stringify(responseData, null, 2) }], - isError: !result.success, - } - } catch (error) { - logger.error('Build tool call failed', { error }) - return { - content: [ - { - type: 'text', - text: `Build failed: ${error instanceof Error ? error.message : String(error)}`, - }, - ], - isError: true, - } - } -} - async function handleSubagentToolCall( toolDef: (typeof SUBAGENT_TOOL_DEFS)[number], args: Record, userId: string, abortSignal?: AbortSignal ): Promise { - if (toolDef.agentId === 'build') { - return handleBuildToolCall(args, userId, abortSignal) - } - try { const requestText = (args.request as string) || diff --git a/apps/sim/app/api/mcp/discover/route.ts b/apps/sim/app/api/mcp/discover/route.ts index 600e9362f6..c386c304cc 100644 --- a/apps/sim/app/api/mcp/discover/route.ts +++ b/apps/sim/app/api/mcp/discover/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { permissions, workflowMcpServer, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq, sql } from 'drizzle-orm' +import { and, eq, isNull, sql } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { checkHybridAuth } from '@/lib/auth/hybrid' import { getBaseUrl } from '@/lib/core/utils/urls' @@ -26,12 +26,31 @@ export async function GET(request: NextRequest) { const userId = auth.userId + if (auth.apiKeyType === 'workspace' && !auth.workspaceId) { + return NextResponse.json( + { success: false, error: 'Workspace API key missing workspace scope' }, + { status: 403 } + ) + } + const userWorkspacePermissions = await db .select({ entityId: permissions.entityId }) .from(permissions) - .where(and(eq(permissions.userId, userId), eq(permissions.entityType, 'workspace'))) + .innerJoin(workspace, eq(permissions.entityId, workspace.id)) + .where( + and( + eq(permissions.userId, userId), + eq(permissions.entityType, 'workspace'), + isNull(workspace.archivedAt) + ) + ) - const workspaceIds = userWorkspacePermissions.map((w) => w.entityId) + const workspaceIds = + auth.apiKeyType === 'workspace' && auth.workspaceId + ? userWorkspacePermissions + .map((w) => w.entityId) + .filter((workspaceId) => workspaceId === auth.workspaceId) + : userWorkspacePermissions.map((w) => w.entityId) if (workspaceIds.length === 0) { return NextResponse.json({ success: true, servers: [] }) @@ -49,11 +68,18 @@ export async function GET(request: NextRequest) { SELECT COUNT(*)::int FROM "workflow_mcp_tool" WHERE "workflow_mcp_tool"."server_id" = "workflow_mcp_server"."id" + AND "workflow_mcp_tool"."archived_at" IS NULL )`.as('tool_count'), }) .from(workflowMcpServer) .leftJoin(workspace, eq(workflowMcpServer.workspaceId, workspace.id)) - .where(sql`${workflowMcpServer.workspaceId} IN ${workspaceIds}`) + .where( + and( + sql`${workflowMcpServer.workspaceId} IN ${workspaceIds}`, + isNull(workflowMcpServer.deletedAt), + isNull(workspace.archivedAt) + ) + ) .orderBy(workflowMcpServer.name) const baseUrl = getBaseUrl() diff --git a/apps/sim/app/api/mcp/events/route.test.ts b/apps/sim/app/api/mcp/events/route.test.ts index 2d5fd7bded..ca9be354ce 100644 --- a/apps/sim/app/api/mcp/events/route.test.ts +++ b/apps/sim/app/api/mcp/events/route.test.ts @@ -19,6 +19,37 @@ vi.mock('@/lib/workspaces/permissions/utils', () => ({ getUserEntityPermissions: mockGetUserEntityPermissions, })) +vi.mock('@/lib/events/sse-endpoint', () => ({ + createWorkspaceSSE: (_config: any) => { + return async (request: any) => { + const session = await mockGetSession() + if (!session?.user?.id) { + return new Response('Unauthorized', { status: 401 }) + } + const url = new URL(request.url) + const workspaceId = url.searchParams.get('workspaceId') + if (!workspaceId) { + return new Response('Missing workspaceId query parameter', { status: 400 }) + } + const permissions = await mockGetUserEntityPermissions( + session.user.id, + 'workspace', + workspaceId + ) + if (!permissions) { + return new Response('Access denied to workspace', { status: 403 }) + } + return new Response(new ReadableStream({ start() {} }), { + headers: { + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache', + Connection: 'keep-alive', + }, + }) + } + }, +})) + vi.mock('@/lib/mcp/connection-manager', () => ({ mcpConnectionManager: null, })) diff --git a/apps/sim/app/api/mcp/events/route.ts b/apps/sim/app/api/mcp/events/route.ts index fee4ca65fb..61c0f4c82a 100644 --- a/apps/sim/app/api/mcp/events/route.ts +++ b/apps/sim/app/api/mcp/events/route.ts @@ -8,66 +8,19 @@ * Auth is handled via session cookies (EventSource sends cookies automatically). */ -import { createLogger } from '@sim/logger' -import type { NextRequest } from 'next/server' -import { getSession } from '@/lib/auth' -import { SSE_HEADERS } from '@/lib/core/utils/sse' +import { createWorkspaceSSE } from '@/lib/events/sse-endpoint' import { mcpConnectionManager } from '@/lib/mcp/connection-manager' import { mcpPubSub } from '@/lib/mcp/pubsub' -import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' - -const logger = createLogger('McpEventsSSE') export const dynamic = 'force-dynamic' -const HEARTBEAT_INTERVAL_MS = 30_000 - -export async function GET(request: NextRequest) { - const session = await getSession() - if (!session?.user?.id) { - return new Response('Unauthorized', { status: 401 }) - } - - const { searchParams } = new URL(request.url) - const workspaceId = searchParams.get('workspaceId') - if (!workspaceId) { - return new Response('Missing workspaceId query parameter', { status: 400 }) - } - - const permissions = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId) - if (!permissions) { - return new Response('Access denied to workspace', { status: 403 }) - } - - const encoder = new TextEncoder() - const unsubscribers: Array<() => void> = [] - let cleaned = false - - const cleanup = () => { - if (cleaned) return - cleaned = true - for (const unsub of unsubscribers) { - unsub() - } - logger.info(`SSE connection closed for workspace ${workspaceId}`) - } - - const stream = new ReadableStream({ - start(controller) { - const send = (eventName: string, data: Record) => { - if (cleaned) return - try { - controller.enqueue( - encoder.encode(`event: ${eventName}\ndata: ${JSON.stringify(data)}\n\n`) - ) - } catch { - // Stream already closed - } - } - - // Subscribe to external MCP server tool changes - if (mcpConnectionManager) { - const unsub = mcpConnectionManager.subscribe((event) => { +export const GET = createWorkspaceSSE({ + label: 'mcp-events', + subscriptions: [ + { + subscribe: (workspaceId, send) => { + if (!mcpConnectionManager) return () => {} + return mcpConnectionManager.subscribe((event) => { if (event.workspaceId !== workspaceId) return send('tools_changed', { source: 'external', @@ -75,12 +28,12 @@ export async function GET(request: NextRequest) { timestamp: event.timestamp, }) }) - unsubscribers.push(unsub) - } - - // Subscribe to workflow CRUD tool changes - if (mcpPubSub) { - const unsub = mcpPubSub.onWorkflowToolsChanged((event) => { + }, + }, + { + subscribe: (workspaceId, send) => { + if (!mcpPubSub) return () => {} + return mcpPubSub.onWorkflowToolsChanged((event) => { if (event.workspaceId !== workspaceId) return send('tools_changed', { source: 'workflow', @@ -88,43 +41,7 @@ export async function GET(request: NextRequest) { timestamp: Date.now(), }) }) - unsubscribers.push(unsub) - } - - // Heartbeat to keep the connection alive - const heartbeat = setInterval(() => { - if (cleaned) { - clearInterval(heartbeat) - return - } - try { - controller.enqueue(encoder.encode(': heartbeat\n\n')) - } catch { - clearInterval(heartbeat) - } - }, HEARTBEAT_INTERVAL_MS) - unsubscribers.push(() => clearInterval(heartbeat)) - - // Cleanup when client disconnects - request.signal.addEventListener( - 'abort', - () => { - cleanup() - try { - controller.close() - } catch { - // Already closed - } - }, - { once: true } - ) - - logger.info(`SSE connection opened for workspace ${workspaceId}`) - }, - cancel() { - cleanup() + }, }, - }) - - return new Response(stream, { headers: SSE_HEADERS }) -} + ], +}) diff --git a/apps/sim/app/api/mcp/serve/[serverId]/route.test.ts b/apps/sim/app/api/mcp/serve/[serverId]/route.test.ts index 77dd1adebf..6113799bda 100644 --- a/apps/sim/app/api/mcp/serve/[serverId]/route.test.ts +++ b/apps/sim/app/api/mcp/serve/[serverId]/route.test.ts @@ -12,6 +12,7 @@ const { mockGenerateInternalToken, mockDbSelect, mockDbFrom, + mockDbInnerJoin, mockDbWhere, mockDbLimit, fetchMock, @@ -21,6 +22,7 @@ const { mockGenerateInternalToken: vi.fn(), mockDbSelect: vi.fn(), mockDbFrom: vi.fn(), + mockDbInnerJoin: vi.fn(), mockDbWhere: vi.fn(), mockDbLimit: vi.fn(), fetchMock: vi.fn(), @@ -29,6 +31,7 @@ const { vi.mock('drizzle-orm', () => ({ and: vi.fn(), eq: vi.fn(), + isNull: vi.fn(), })) vi.mock('@sim/db', () => ({ @@ -44,6 +47,7 @@ vi.mock('@sim/db/schema', () => ({ workspaceId: 'workspaceId', isPublic: 'isPublic', createdBy: 'createdBy', + deletedAt: 'deletedAt', }, workflowMcpTool: { serverId: 'serverId', @@ -51,10 +55,16 @@ vi.mock('@sim/db/schema', () => ({ toolDescription: 'toolDescription', parameterSchema: 'parameterSchema', workflowId: 'workflowId', + archivedAt: 'archivedAt', }, workflow: { id: 'id', isDeployed: 'isDeployed', + archivedAt: 'archivedAt', + }, + workspace: { + id: 'id', + archivedAt: 'archivedAt', }, })) @@ -89,7 +99,8 @@ describe('MCP Serve Route', () => { vi.clearAllMocks() mockDbSelect.mockReturnValue({ from: mockDbFrom }) - mockDbFrom.mockReturnValue({ where: mockDbWhere }) + mockDbFrom.mockReturnValue({ innerJoin: mockDbInnerJoin, where: mockDbWhere }) + mockDbInnerJoin.mockReturnValue({ where: mockDbWhere }) mockDbWhere.mockReturnValue({ limit: mockDbLimit }) vi.stubGlobal('fetch', fetchMock) diff --git a/apps/sim/app/api/mcp/serve/[serverId]/route.ts b/apps/sim/app/api/mcp/serve/[serverId]/route.ts index 29c7012097..0be8778bc5 100644 --- a/apps/sim/app/api/mcp/serve/[serverId]/route.ts +++ b/apps/sim/app/api/mcp/serve/[serverId]/route.ts @@ -15,9 +15,9 @@ import { type RequestId, } from '@modelcontextprotocol/sdk/types.js' import { db } from '@sim/db' -import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema' +import { workflow, workflowMcpServer, workflowMcpTool, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { type AuthResult, AuthType, checkHybridAuth } from '@/lib/auth/hybrid' import { generateInternalToken } from '@/lib/auth/internal' @@ -66,7 +66,14 @@ async function getServer(serverId: string) { createdBy: workflowMcpServer.createdBy, }) .from(workflowMcpServer) - .where(eq(workflowMcpServer.id, serverId)) + .innerJoin(workspace, eq(workflowMcpServer.workspaceId, workspace.id)) + .where( + and( + eq(workflowMcpServer.id, serverId), + isNull(workflowMcpServer.deletedAt), + isNull(workspace.archivedAt) + ) + ) .limit(1) return server @@ -87,6 +94,10 @@ export async function GET(request: NextRequest, { params }: { params: Promise { @@ -262,7 +277,13 @@ async function handleToolsCall( workflowId: workflowMcpTool.workflowId, }) .from(workflowMcpTool) - .where(and(eq(workflowMcpTool.serverId, serverId), eq(workflowMcpTool.toolName, params.name))) + .where( + and( + eq(workflowMcpTool.serverId, serverId), + eq(workflowMcpTool.toolName, params.name), + isNull(workflowMcpTool.archivedAt) + ) + ) .limit(1) if (!tool) { return NextResponse.json( @@ -276,7 +297,7 @@ async function handleToolsCall( const [wf] = await db .select({ isDeployed: workflow.isDeployed }) .from(workflow) - .where(eq(workflow.id, tool.workflowId)) + .where(and(eq(workflow.id, tool.workflowId), isNull(workflow.archivedAt))) .limit(1) if (!wf?.isDeployed) { diff --git a/apps/sim/app/api/mcp/servers/[id]/route.ts b/apps/sim/app/api/mcp/servers/[id]/route.ts index 19c2609ab5..597244a970 100644 --- a/apps/sim/app/api/mcp/servers/[id]/route.ts +++ b/apps/sim/app/api/mcp/servers/[id]/route.ts @@ -82,11 +82,16 @@ export const PATCH = withMcpAuth<{ id: string }>('write')( ) } - // Only clear cache if URL changed (requires re-discovery) - const urlChanged = body.url && currentServer?.url !== body.url - if (urlChanged) { + const shouldClearCache = + (body.url !== undefined && currentServer?.url !== body.url) || + body.enabled !== undefined || + body.headers !== undefined || + body.timeout !== undefined || + body.retries !== undefined + + if (shouldClearCache) { await mcpService.clearCache(workspaceId) - logger.info(`[${requestId}] Cleared cache due to URL change`) + logger.info(`[${requestId}] Cleared MCP cache after server lifecycle update`) } logger.info(`[${requestId}] Successfully updated MCP server: ${serverId}`) diff --git a/apps/sim/app/api/mcp/workflow-servers/[id]/route.ts b/apps/sim/app/api/mcp/workflow-servers/[id]/route.ts index 4890dbc8f4..f5ed5371e1 100644 --- a/apps/sim/app/api/mcp/workflow-servers/[id]/route.ts +++ b/apps/sim/app/api/mcp/workflow-servers/[id]/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware' @@ -39,7 +39,11 @@ export const GET = withMcpAuth('read')( }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) @@ -50,7 +54,7 @@ export const GET = withMcpAuth('read')( const tools = await db .select() .from(workflowMcpTool) - .where(eq(workflowMcpTool.serverId, serverId)) + .where(and(eq(workflowMcpTool.serverId, serverId), isNull(workflowMcpTool.archivedAt))) logger.info( `[${requestId}] Found workflow MCP server: ${server.name} with ${tools.length} tools` @@ -87,7 +91,11 @@ export const PATCH = withMcpAuth('write')( .select({ id: workflowMcpServer.id }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) @@ -112,7 +120,7 @@ export const PATCH = withMcpAuth('write')( const [updatedServer] = await db .update(workflowMcpServer) .set(updateData) - .where(eq(workflowMcpServer.id, serverId)) + .where(and(eq(workflowMcpServer.id, serverId), isNull(workflowMcpServer.deletedAt))) .returning() logger.info(`[${requestId}] Successfully updated workflow MCP server: ${serverId}`) diff --git a/apps/sim/app/api/mcp/workflow-servers/[id]/tools/[toolId]/route.ts b/apps/sim/app/api/mcp/workflow-servers/[id]/tools/[toolId]/route.ts index 9a2d374ed8..f54caf4703 100644 --- a/apps/sim/app/api/mcp/workflow-servers/[id]/tools/[toolId]/route.ts +++ b/apps/sim/app/api/mcp/workflow-servers/[id]/tools/[toolId]/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware' @@ -32,7 +32,11 @@ export const GET = withMcpAuth('read')( .select({ id: workflowMcpServer.id }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) @@ -43,7 +47,13 @@ export const GET = withMcpAuth('read')( const [tool] = await db .select() .from(workflowMcpTool) - .where(and(eq(workflowMcpTool.id, toolId), eq(workflowMcpTool.serverId, serverId))) + .where( + and( + eq(workflowMcpTool.id, toolId), + eq(workflowMcpTool.serverId, serverId), + isNull(workflowMcpTool.archivedAt) + ) + ) .limit(1) if (!tool) { @@ -81,7 +91,11 @@ export const PATCH = withMcpAuth('write')( .select({ id: workflowMcpServer.id }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) @@ -92,7 +106,13 @@ export const PATCH = withMcpAuth('write')( const [existingTool] = await db .select({ id: workflowMcpTool.id }) .from(workflowMcpTool) - .where(and(eq(workflowMcpTool.id, toolId), eq(workflowMcpTool.serverId, serverId))) + .where( + and( + eq(workflowMcpTool.id, toolId), + eq(workflowMcpTool.serverId, serverId), + isNull(workflowMcpTool.archivedAt) + ) + ) .limit(1) if (!existingTool) { @@ -166,7 +186,11 @@ export const DELETE = withMcpAuth('write')( .select({ id: workflowMcpServer.id }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) diff --git a/apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts b/apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts index bdd9139f93..b0887aef1f 100644 --- a/apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts +++ b/apps/sim/app/api/mcp/workflow-servers/[id]/tools/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware' @@ -33,7 +33,11 @@ export const GET = withMcpAuth('read')( .select({ id: workflowMcpServer.id }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) @@ -56,8 +60,11 @@ export const GET = withMcpAuth('read')( isDeployed: workflow.isDeployed, }) .from(workflowMcpTool) - .leftJoin(workflow, eq(workflowMcpTool.workflowId, workflow.id)) - .where(eq(workflowMcpTool.serverId, serverId)) + .leftJoin( + workflow, + and(eq(workflowMcpTool.workflowId, workflow.id), isNull(workflow.archivedAt)) + ) + .where(and(eq(workflowMcpTool.serverId, serverId), isNull(workflowMcpTool.archivedAt))) logger.info(`[${requestId}] Found ${tools.length} tools for server ${serverId}`) @@ -102,7 +109,11 @@ export const POST = withMcpAuth('write')( .select({ id: workflowMcpServer.id }) .from(workflowMcpServer) .where( - and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId)) + and( + eq(workflowMcpServer.id, serverId), + eq(workflowMcpServer.workspaceId, workspaceId), + isNull(workflowMcpServer.deletedAt) + ) ) .limit(1) @@ -119,7 +130,7 @@ export const POST = withMcpAuth('write')( workspaceId: workflow.workspaceId, }) .from(workflow) - .where(eq(workflow.id, body.workflowId)) + .where(and(eq(workflow.id, body.workflowId), isNull(workflow.archivedAt))) .limit(1) if (!workflowRecord) { @@ -157,7 +168,8 @@ export const POST = withMcpAuth('write')( .where( and( eq(workflowMcpTool.serverId, serverId), - eq(workflowMcpTool.workflowId, body.workflowId) + eq(workflowMcpTool.workflowId, body.workflowId), + isNull(workflowMcpTool.archivedAt) ) ) .limit(1) diff --git a/apps/sim/app/api/mcp/workflow-servers/route.ts b/apps/sim/app/api/mcp/workflow-servers/route.ts index 2751594132..185c551b27 100644 --- a/apps/sim/app/api/mcp/workflow-servers/route.ts +++ b/apps/sim/app/api/mcp/workflow-servers/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq, inArray, sql } from 'drizzle-orm' +import { and, eq, inArray, isNull, sql } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware' @@ -37,10 +37,13 @@ export const GET = withMcpAuth('read')( SELECT COUNT(*)::int FROM "workflow_mcp_tool" WHERE "workflow_mcp_tool"."server_id" = "workflow_mcp_server"."id" + AND "workflow_mcp_tool"."archived_at" IS NULL )`.as('tool_count'), }) .from(workflowMcpServer) - .where(eq(workflowMcpServer.workspaceId, workspaceId)) + .where( + and(eq(workflowMcpServer.workspaceId, workspaceId), isNull(workflowMcpServer.deletedAt)) + ) const serverIds = servers.map((s) => s.id) const tools = @@ -51,7 +54,12 @@ export const GET = withMcpAuth('read')( toolName: workflowMcpTool.toolName, }) .from(workflowMcpTool) - .where(inArray(workflowMcpTool.serverId, serverIds)) + .where( + and( + inArray(workflowMcpTool.serverId, serverIds), + isNull(workflowMcpTool.archivedAt) + ) + ) : [] const toolNamesByServer: Record = {} @@ -133,7 +141,7 @@ export const POST = withMcpAuth('write')( workspaceId: workflow.workspaceId, }) .from(workflow) - .where(inArray(workflow.id, workflowIds)) + .where(and(inArray(workflow.id, workflowIds), isNull(workflow.archivedAt))) for (const workflowRecord of workflows) { if (workflowRecord.workspaceId !== workspaceId) { diff --git a/apps/sim/app/api/mothership/chat/route.ts b/apps/sim/app/api/mothership/chat/route.ts new file mode 100644 index 0000000000..b351ddfb73 --- /dev/null +++ b/apps/sim/app/api/mothership/chat/route.ts @@ -0,0 +1,345 @@ +import { db } from '@sim/db' +import { copilotChats } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { eq, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { resolveOrCreateChat } from '@/lib/copilot/chat-lifecycle' +import { buildCopilotRequestPayload } from '@/lib/copilot/chat-payload' +import { createSSEStream, SSE_RESPONSE_HEADERS } from '@/lib/copilot/chat-streaming' +import type { OrchestratorResult } from '@/lib/copilot/orchestrator/types' +import { processContextsServer, resolveActiveResourceContext } from '@/lib/copilot/process-contents' +import { createRequestTracker, createUnauthorizedResponse } from '@/lib/copilot/request-helpers' +import { taskPubSub } from '@/lib/copilot/task-events' +import { generateWorkspaceContext } from '@/lib/copilot/workspace-context' +import { + assertActiveWorkspaceAccess, + getUserEntityPermissions, +} from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('MothershipChatAPI') + +const FileAttachmentSchema = z.object({ + id: z.string(), + key: z.string(), + filename: z.string(), + media_type: z.string(), + size: z.number(), +}) + +const ResourceAttachmentSchema = z.object({ + type: z.enum(['workflow', 'table', 'file', 'knowledgebase']), + id: z.string().min(1), +}) + +const MothershipMessageSchema = z.object({ + message: z.string().min(1, 'Message is required'), + workspaceId: z.string().min(1, 'workspaceId is required'), + userMessageId: z.string().optional(), + chatId: z.string().optional(), + createNewChat: z.boolean().optional().default(false), + fileAttachments: z.array(FileAttachmentSchema).optional(), + userTimezone: z.string().optional(), + resourceAttachments: z.array(ResourceAttachmentSchema).optional(), + contexts: z + .array( + z.object({ + kind: z.enum([ + 'past_chat', + 'workflow', + 'current_workflow', + 'blocks', + 'logs', + 'workflow_block', + 'knowledge', + 'templates', + 'docs', + 'table', + 'file', + ]), + label: z.string(), + chatId: z.string().optional(), + workflowId: z.string().optional(), + knowledgeId: z.string().optional(), + blockId: z.string().optional(), + blockIds: z.array(z.string()).optional(), + templateId: z.string().optional(), + executionId: z.string().optional(), + tableId: z.string().optional(), + fileId: z.string().optional(), + }) + ) + .optional(), +}) + +/** + * POST /api/mothership/chat + * Workspace-scoped chat — no workflowId, proxies to Go /api/mothership. + */ +export async function POST(req: NextRequest) { + const tracker = createRequestTracker() + + try { + const session = await getSession() + if (!session?.user?.id) { + return createUnauthorizedResponse() + } + + const authenticatedUserId = session.user.id + const body = await req.json() + const { + message, + workspaceId, + userMessageId: providedMessageId, + chatId, + createNewChat, + fileAttachments, + contexts, + resourceAttachments, + userTimezone, + } = MothershipMessageSchema.parse(body) + + const userMessageId = providedMessageId || crypto.randomUUID() + + try { + await assertActiveWorkspaceAccess(workspaceId, authenticatedUserId) + } catch { + return NextResponse.json({ error: 'Workspace not found or access denied' }, { status: 403 }) + } + + let agentContexts: Array<{ type: string; content: string }> = [] + if (Array.isArray(contexts) && contexts.length > 0) { + try { + agentContexts = await processContextsServer( + contexts as any, + authenticatedUserId, + message, + workspaceId + ) + } catch (e) { + logger.error(`[${tracker.requestId}] Failed to process contexts`, e) + } + } + + if (Array.isArray(resourceAttachments) && resourceAttachments.length > 0) { + const results = await Promise.allSettled( + resourceAttachments.map((r) => + resolveActiveResourceContext(r.type, r.id, workspaceId, authenticatedUserId) + ) + ) + for (const result of results) { + if (result.status === 'fulfilled' && result.value) { + agentContexts.push(result.value) + } else if (result.status === 'rejected') { + logger.error( + `[${tracker.requestId}] Failed to resolve resource attachment`, + result.reason + ) + } + } + } + + let currentChat: any = null + let conversationHistory: any[] = [] + let actualChatId = chatId + + if (chatId || createNewChat) { + const chatResult = await resolveOrCreateChat({ + chatId, + userId: authenticatedUserId, + workspaceId, + model: 'claude-opus-4-5', + type: 'mothership', + }) + currentChat = chatResult.chat + actualChatId = chatResult.chatId || chatId + conversationHistory = Array.isArray(chatResult.conversationHistory) + ? chatResult.conversationHistory + : [] + + if (chatId && !currentChat) { + return NextResponse.json({ error: 'Chat not found' }, { status: 404 }) + } + } + + if (actualChatId) { + const userMsg = { + id: userMessageId, + role: 'user' as const, + content: message, + timestamp: new Date().toISOString(), + ...(fileAttachments && + fileAttachments.length > 0 && { + fileAttachments: fileAttachments.map((f) => ({ + id: f.id, + key: f.key, + filename: f.filename, + media_type: f.media_type, + size: f.size, + })), + }), + ...(contexts && + contexts.length > 0 && { + contexts: contexts.map((c) => ({ + kind: c.kind, + label: c.label, + ...(c.workflowId && { workflowId: c.workflowId }), + ...(c.knowledgeId && { knowledgeId: c.knowledgeId }), + ...(c.tableId && { tableId: c.tableId }), + ...(c.fileId && { fileId: c.fileId }), + })), + }), + } + + const [updated] = await db + .update(copilotChats) + .set({ + messages: sql`${copilotChats.messages} || ${JSON.stringify([userMsg])}::jsonb`, + conversationId: userMessageId, + updatedAt: new Date(), + }) + .where(eq(copilotChats.id, actualChatId)) + .returning({ messages: copilotChats.messages }) + + if (updated) { + const freshMessages: any[] = Array.isArray(updated.messages) ? updated.messages : [] + conversationHistory = freshMessages.filter((m: any) => m.id !== userMessageId) + taskPubSub?.publishStatusChanged({ workspaceId, chatId: actualChatId, type: 'started' }) + } + } + + const [workspaceContext, userPermission] = await Promise.all([ + generateWorkspaceContext(workspaceId, authenticatedUserId), + getUserEntityPermissions(authenticatedUserId, 'workspace', workspaceId).catch(() => null), + ]) + + const requestPayload = await buildCopilotRequestPayload( + { + message, + workspaceId, + userId: authenticatedUserId, + userMessageId, + mode: 'agent', + model: '', + contexts: agentContexts, + fileAttachments, + chatId: actualChatId, + userPermission: userPermission ?? undefined, + workspaceContext, + userTimezone, + }, + { selectedModel: '' } + ) + + const stream = createSSEStream({ + requestPayload, + userId: authenticatedUserId, + streamId: userMessageId, + chatId: actualChatId, + currentChat, + isNewChat: conversationHistory.length === 0, + message, + titleModel: 'claude-opus-4-5', + requestId: tracker.requestId, + workspaceId, + orchestrateOptions: { + userId: authenticatedUserId, + workspaceId, + chatId: actualChatId, + goRoute: '/api/mothership', + autoExecuteTools: true, + interactive: false, + onComplete: async (result: OrchestratorResult) => { + if (!actualChatId) return + + const assistantMessage: Record = { + id: crypto.randomUUID(), + role: 'assistant' as const, + content: result.content, + timestamp: new Date().toISOString(), + } + if (result.toolCalls.length > 0) { + assistantMessage.toolCalls = result.toolCalls + } + if (result.contentBlocks.length > 0) { + assistantMessage.contentBlocks = result.contentBlocks.map((block) => { + const stored: Record = { type: block.type } + if (block.content) stored.content = block.content + if (block.type === 'tool_call' && block.toolCall) { + stored.toolCall = { + id: block.toolCall.id, + name: block.toolCall.name, + state: + block.toolCall.result?.success !== undefined + ? block.toolCall.result.success + ? 'success' + : 'error' + : block.toolCall.status, + result: block.toolCall.result, + ...(block.calledBy ? { calledBy: block.calledBy } : {}), + } + } + return stored + }) + } + + try { + const [row] = await db + .select({ messages: copilotChats.messages }) + .from(copilotChats) + .where(eq(copilotChats.id, actualChatId)) + .limit(1) + + const msgs: any[] = Array.isArray(row?.messages) ? row.messages : [] + const userIdx = msgs.findIndex((m: any) => m.id === userMessageId) + const alreadyHasResponse = + userIdx >= 0 && + userIdx + 1 < msgs.length && + (msgs[userIdx + 1] as any)?.role === 'assistant' + + if (!alreadyHasResponse) { + await db + .update(copilotChats) + .set({ + messages: sql`${copilotChats.messages} || ${JSON.stringify([assistantMessage])}::jsonb`, + conversationId: sql`CASE WHEN ${copilotChats.conversationId} = ${userMessageId} THEN NULL ELSE ${copilotChats.conversationId} END`, + updatedAt: new Date(), + }) + .where(eq(copilotChats.id, actualChatId)) + + taskPubSub?.publishStatusChanged({ + workspaceId, + chatId: actualChatId, + type: 'completed', + }) + } + } catch (error) { + logger.error(`[${tracker.requestId}] Failed to persist chat messages`, { + chatId: actualChatId, + error: error instanceof Error ? error.message : 'Unknown error', + }) + } + }, + }, + }) + + return new Response(stream, { headers: SSE_RESPONSE_HEADERS }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${tracker.requestId}] Error handling mothership chat:`, { + error: error instanceof Error ? error.message : 'Unknown error', + }) + + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/mothership/chat/stop/route.ts b/apps/sim/app/api/mothership/chat/stop/route.ts new file mode 100644 index 0000000000..763ff9b2cf --- /dev/null +++ b/apps/sim/app/api/mothership/chat/stop/route.ts @@ -0,0 +1,110 @@ +import { db } from '@sim/db' +import { copilotChats } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { taskPubSub } from '@/lib/copilot/task-events' + +const logger = createLogger('MothershipChatStopAPI') + +const StoredToolCallSchema = z + .object({ + id: z.string().optional(), + name: z.string().optional(), + state: z.string().optional(), + params: z.record(z.unknown()).optional(), + result: z + .object({ + success: z.boolean(), + output: z.unknown().optional(), + error: z.string().optional(), + }) + .optional(), + display: z + .object({ + text: z.string().optional(), + }) + .optional(), + calledBy: z.string().optional(), + }) + .nullable() + +const ContentBlockSchema = z.object({ + type: z.string(), + content: z.string().optional(), + toolCall: StoredToolCallSchema.optional(), +}) + +const StopSchema = z.object({ + chatId: z.string(), + streamId: z.string(), + content: z.string(), + contentBlocks: z.array(ContentBlockSchema).optional(), +}) + +/** + * POST /api/mothership/chat/stop + * Persists partial assistant content when the user stops a stream mid-response. + * Clears conversationId so the server-side onComplete won't duplicate the message. + */ +export async function POST(req: NextRequest) { + try { + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const { chatId, streamId, content, contentBlocks } = StopSchema.parse(await req.json()) + + const setClause: Record = { + conversationId: null, + updatedAt: new Date(), + } + + const hasContent = content.trim().length > 0 + const hasBlocks = Array.isArray(contentBlocks) && contentBlocks.length > 0 + + if (hasContent || hasBlocks) { + const assistantMessage: Record = { + id: crypto.randomUUID(), + role: 'assistant' as const, + content, + timestamp: new Date().toISOString(), + } + if (hasBlocks) { + assistantMessage.contentBlocks = contentBlocks + } + setClause.messages = sql`${copilotChats.messages} || ${JSON.stringify([assistantMessage])}::jsonb` + } + + const [updated] = await db + .update(copilotChats) + .set(setClause) + .where( + and( + eq(copilotChats.id, chatId), + eq(copilotChats.userId, session.user.id), + eq(copilotChats.conversationId, streamId) + ) + ) + .returning({ workspaceId: copilotChats.workspaceId }) + + if (updated?.workspaceId) { + taskPubSub?.publishStatusChanged({ + workspaceId: updated.workspaceId, + chatId, + type: 'completed', + }) + } + + return NextResponse.json({ success: true }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: 'Invalid request' }, { status: 400 }) + } + logger.error('Error stopping chat stream:', error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/mothership/chats/read/route.ts b/apps/sim/app/api/mothership/chats/read/route.ts new file mode 100644 index 0000000000..e75ffd28d3 --- /dev/null +++ b/apps/sim/app/api/mothership/chats/read/route.ts @@ -0,0 +1,43 @@ +import { db } from '@sim/db' +import { copilotChats } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { + authenticateCopilotRequestSessionOnly, + createBadRequestResponse, + createInternalServerErrorResponse, + createUnauthorizedResponse, +} from '@/lib/copilot/request-helpers' + +const logger = createLogger('MarkTaskReadAPI') + +const MarkReadSchema = z.object({ + chatId: z.string().min(1), +}) + +export async function POST(request: NextRequest) { + try { + const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly() + if (!isAuthenticated || !userId) { + return createUnauthorizedResponse() + } + + const body = await request.json() + const { chatId } = MarkReadSchema.parse(body) + + await db + .update(copilotChats) + .set({ lastSeenAt: sql`GREATEST(${copilotChats.updatedAt}, NOW())` }) + .where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, userId))) + + return NextResponse.json({ success: true }) + } catch (error) { + if (error instanceof z.ZodError) { + return createBadRequestResponse('chatId is required') + } + logger.error('Error marking task as read:', error) + return createInternalServerErrorResponse('Failed to mark task as read') + } +} diff --git a/apps/sim/app/api/mothership/chats/route.ts b/apps/sim/app/api/mothership/chats/route.ts new file mode 100644 index 0000000000..f9b4e1748c --- /dev/null +++ b/apps/sim/app/api/mothership/chats/route.ts @@ -0,0 +1,106 @@ +import { db } from '@sim/db' +import { copilotChats } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, desc, eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { + authenticateCopilotRequestSessionOnly, + createBadRequestResponse, + createInternalServerErrorResponse, + createUnauthorizedResponse, +} from '@/lib/copilot/request-helpers' +import { taskPubSub } from '@/lib/copilot/task-events' +import { assertActiveWorkspaceAccess } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('MothershipChatsAPI') + +/** + * GET /api/mothership/chats?workspaceId=xxx + * Returns mothership (home) chats for the authenticated user in the given workspace. + */ +export async function GET(request: NextRequest) { + try { + const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly() + if (!isAuthenticated || !userId) { + return createUnauthorizedResponse() + } + + const workspaceId = request.nextUrl.searchParams.get('workspaceId') + if (!workspaceId) { + return createBadRequestResponse('workspaceId is required') + } + + await assertActiveWorkspaceAccess(workspaceId, userId) + + const chats = await db + .select({ + id: copilotChats.id, + title: copilotChats.title, + updatedAt: copilotChats.updatedAt, + conversationId: copilotChats.conversationId, + lastSeenAt: copilotChats.lastSeenAt, + }) + .from(copilotChats) + .where( + and( + eq(copilotChats.userId, userId), + eq(copilotChats.workspaceId, workspaceId), + eq(copilotChats.type, 'mothership') + ) + ) + .orderBy(desc(copilotChats.updatedAt)) + + return NextResponse.json({ success: true, data: chats }) + } catch (error) { + logger.error('Error fetching mothership chats:', error) + return createInternalServerErrorResponse('Failed to fetch chats') + } +} + +const CreateChatSchema = z.object({ + workspaceId: z.string().min(1), +}) + +/** + * POST /api/mothership/chats + * Creates an empty mothership chat and returns its ID. + */ +export async function POST(request: NextRequest) { + try { + const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly() + if (!isAuthenticated || !userId) { + return createUnauthorizedResponse() + } + + const body = await request.json() + const { workspaceId } = CreateChatSchema.parse(body) + + await assertActiveWorkspaceAccess(workspaceId, userId) + + const now = new Date() + const [chat] = await db + .insert(copilotChats) + .values({ + userId, + workspaceId, + type: 'mothership', + title: null, + model: 'claude-opus-4-5', + messages: [], + updatedAt: now, + lastSeenAt: now, + }) + .returning({ id: copilotChats.id }) + + taskPubSub?.publishStatusChanged({ workspaceId, chatId: chat.id, type: 'created' }) + + return NextResponse.json({ success: true, id: chat.id }) + } catch (error) { + if (error instanceof z.ZodError) { + return createBadRequestResponse('workspaceId is required') + } + logger.error('Error creating mothership chat:', error) + return createInternalServerErrorResponse('Failed to create chat') + } +} diff --git a/apps/sim/app/api/mothership/events/route.ts b/apps/sim/app/api/mothership/events/route.ts new file mode 100644 index 0000000000..38abba7b33 --- /dev/null +++ b/apps/sim/app/api/mothership/events/route.ts @@ -0,0 +1,32 @@ +/** + * SSE endpoint for task status events. + * + * Pushes `task_status` events to the browser when tasks are + * started, completed, created, deleted, or renamed. + * + * Auth is handled via session cookies (EventSource sends cookies automatically). + */ + +import { taskPubSub } from '@/lib/copilot/task-events' +import { createWorkspaceSSE } from '@/lib/events/sse-endpoint' + +export const dynamic = 'force-dynamic' + +export const GET = createWorkspaceSSE({ + label: 'mothership-events', + subscriptions: [ + { + subscribe: (workspaceId, send) => { + if (!taskPubSub) return () => {} + return taskPubSub.onStatusChanged((event) => { + if (event.workspaceId !== workspaceId) return + send('task_status', { + chatId: event.chatId, + type: event.type, + timestamp: Date.now(), + }) + }) + }, + }, + ], +}) diff --git a/apps/sim/app/api/mothership/execute/route.ts b/apps/sim/app/api/mothership/execute/route.ts new file mode 100644 index 0000000000..f7f2e72d71 --- /dev/null +++ b/apps/sim/app/api/mothership/execute/route.ts @@ -0,0 +1,126 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkInternalAuth } from '@/lib/auth/hybrid' +import { buildIntegrationToolSchemas } from '@/lib/copilot/chat-payload' +import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator' +import { generateWorkspaceContext } from '@/lib/copilot/workspace-context' +import { + assertActiveWorkspaceAccess, + getUserEntityPermissions, +} from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('MothershipExecuteAPI') + +const MessageSchema = z.object({ + role: z.enum(['system', 'user', 'assistant']), + content: z.string(), +}) + +const ExecuteRequestSchema = z.object({ + messages: z.array(MessageSchema).min(1, 'At least one message is required'), + responseFormat: z.any().optional(), + workspaceId: z.string().min(1, 'workspaceId is required'), + userId: z.string().min(1, 'userId is required'), + chatId: z.string().optional(), +}) + +/** + * POST /api/mothership/execute + * + * Non-streaming endpoint for Mothership block execution within workflows. + * Called by the executor via internal JWT auth, not by the browser directly. + * Consumes the Go SSE stream internally and returns a single JSON response. + */ +export async function POST(req: NextRequest) { + try { + const auth = await checkInternalAuth(req, { requireWorkflowId: false }) + if (!auth.success) { + return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 }) + } + + const body = await req.json() + const { messages, responseFormat, workspaceId, userId, chatId } = + ExecuteRequestSchema.parse(body) + + await assertActiveWorkspaceAccess(workspaceId, userId) + + const effectiveChatId = chatId || crypto.randomUUID() + const [workspaceContext, integrationTools, userPermission] = await Promise.all([ + generateWorkspaceContext(workspaceId, userId), + buildIntegrationToolSchemas(userId), + getUserEntityPermissions(userId, 'workspace', workspaceId).catch(() => null), + ]) + + const requestPayload: Record = { + messages, + responseFormat, + userId, + chatId: effectiveChatId, + mode: 'agent', + messageId: crypto.randomUUID(), + isHosted: true, + workspaceContext, + ...(integrationTools.length > 0 ? { integrationTools } : {}), + ...(userPermission ? { userPermission } : {}), + } + + const result = await orchestrateCopilotStream(requestPayload, { + userId, + workspaceId, + chatId: effectiveChatId, + goRoute: '/api/mothership/execute', + autoExecuteTools: true, + interactive: false, + }) + + if (!result.success) { + logger.error('Mothership execute failed', { + error: result.error, + errors: result.errors, + }) + return NextResponse.json( + { + error: result.error || 'Mothership execution failed', + content: result.content || '', + }, + { status: 500 } + ) + } + + const clientToolNames = new Set(integrationTools.map((t) => t.name)) + const clientToolCalls = (result.toolCalls || []).filter( + (tc: { name: string }) => clientToolNames.has(tc.name) || tc.name.startsWith('mcp-') + ) + + return NextResponse.json({ + content: result.content, + model: 'mothership', + tokens: result.usage + ? { + prompt: result.usage.prompt, + completion: result.usage.completion, + total: (result.usage.prompt || 0) + (result.usage.completion || 0), + } + : {}, + cost: result.cost || undefined, + toolCalls: clientToolCalls, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + logger.error('Mothership execute error', { + error: error instanceof Error ? error.message : 'Unknown error', + }) + + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/organizations/[id]/invitations/[invitationId]/route.ts b/apps/sim/app/api/organizations/[id]/invitations/[invitationId]/route.ts index 3532981267..d7b1df2a77 100644 --- a/apps/sim/app/api/organizations/[id]/invitations/[invitationId]/route.ts +++ b/apps/sim/app/api/organizations/[id]/invitations/[invitationId]/route.ts @@ -23,6 +23,7 @@ import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { hasAccessControlAccess } from '@/lib/billing' import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage' +import { isOrgPlan } from '@/lib/billing/plan-helpers' import { requireStripeClient } from '@/lib/billing/stripe-client' import { getBaseUrl } from '@/lib/core/utils/urls' import { syncWorkspaceEnvCredentials } from '@/lib/credentials/environment' @@ -325,7 +326,7 @@ export async function PUT( .limit(1) const orgSub = orgSubs[0] - const orgIsPaid = orgSub && (orgSub.plan === 'team' || orgSub.plan === 'enterprise') + const orgIsPaid = orgSub && isOrgPlan(orgSub.plan) if (orgIsPaid) { const userId = session.user.id diff --git a/apps/sim/app/api/organizations/[id]/seats/route.ts b/apps/sim/app/api/organizations/[id]/seats/route.ts index eaadf5717a..f8ccc35221 100644 --- a/apps/sim/app/api/organizations/[id]/seats/route.ts +++ b/apps/sim/app/api/organizations/[id]/seats/route.ts @@ -6,6 +6,7 @@ import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { getSession } from '@/lib/auth' import { getPlanPricing } from '@/lib/billing/core/billing' +import { isTeam } from '@/lib/billing/plan-helpers' import { requireStripeClient } from '@/lib/billing/stripe-client' import { isBillingEnabled } from '@/lib/core/config/feature-flags' @@ -75,7 +76,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ const orgSubscription = subscriptionRecord[0] // Only team plans support seat changes (not enterprise - those are handled manually) - if (orgSubscription.plan !== 'team') { + if (!isTeam(orgSubscription.plan)) { return NextResponse.json( { error: 'Seat changes are only available for Team plans' }, { status: 400 } @@ -174,7 +175,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ .where(eq(subscription.id, orgSubscription.id)) // Update orgUsageLimit to reflect new seat count (seats × basePrice as minimum) - const { basePrice } = getPlanPricing('team') + const { basePrice } = getPlanPricing(orgSubscription.plan) const newMinimumLimit = newSeatCount * basePrice const orgData = await db diff --git a/apps/sim/app/api/referral-code/redeem/route.ts b/apps/sim/app/api/referral-code/redeem/route.ts index be3cbac902..89b109dc6e 100644 --- a/apps/sim/app/api/referral-code/redeem/route.ts +++ b/apps/sim/app/api/referral-code/redeem/route.ts @@ -24,6 +24,7 @@ import { z } from 'zod' import { getSession } from '@/lib/auth' import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription' import { applyBonusCredits } from '@/lib/billing/credits/bonus' +import { isEnterprise, isTeam } from '@/lib/billing/plan-helpers' const logger = createLogger('ReferralCodeRedemption') @@ -43,15 +44,15 @@ export async function POST(request: Request) { const subscription = await getHighestPrioritySubscription(session.user.id) - if (subscription?.plan === 'enterprise') { + if (isEnterprise(subscription?.plan)) { return NextResponse.json({ redeemed: false, error: 'Enterprise accounts cannot redeem referral codes', }) } - const isTeam = subscription?.plan === 'team' - const orgId = isTeam ? subscription.referenceId : null + const isTeamSub = isTeam(subscription?.plan) + const orgId = isTeamSub ? subscription!.referenceId : null const normalizedCode = code.trim().toUpperCase() diff --git a/apps/sim/app/api/schedules/[id]/route.test.ts b/apps/sim/app/api/schedules/[id]/route.test.ts index ca0e723be5..c970e41714 100644 --- a/apps/sim/app/api/schedules/[id]/route.test.ts +++ b/apps/sim/app/api/schedules/[id]/route.test.ts @@ -24,11 +24,22 @@ vi.mock('@sim/db', () => databaseMock) vi.mock('@sim/db/schema', () => ({ workflow: { id: 'id', userId: 'userId', workspaceId: 'workspaceId' }, - workflowSchedule: { id: 'id', workflowId: 'workflowId', status: 'status' }, + workflowSchedule: { + id: 'id', + workflowId: 'workflowId', + status: 'status', + cronExpression: 'cronExpression', + timezone: 'timezone', + sourceType: 'sourceType', + sourceWorkspaceId: 'sourceWorkspaceId', + archivedAt: 'archivedAt', + }, })) vi.mock('drizzle-orm', () => ({ + and: vi.fn(), eq: vi.fn(), + isNull: vi.fn(), })) vi.mock('@/lib/core/utils/request', () => requestUtilsMock) @@ -100,13 +111,13 @@ describe('Schedule PUT API (Reactivate)', () => { }) describe('Request Validation', () => { - it('returns 400 when action is not reactivate', async () => { + it('returns 400 when action is not a valid enum value', async () => { mockDbChain([ [{ id: 'sched-1', workflowId: 'wf-1', status: 'disabled' }], [{ userId: 'user-1', workspaceId: null }], ]) - const res = await PUT(createRequest({ action: 'disable' }), createParams('sched-1')) + const res = await PUT(createRequest({ action: 'invalid-action' }), createParams('sched-1')) expect(res.status).toBe(400) const data = await res.json() diff --git a/apps/sim/app/api/schedules/[id]/route.ts b/apps/sim/app/api/schedules/[id]/route.ts index eb65f07b53..901e91392e 100644 --- a/apps/sim/app/api/schedules/[id]/route.ts +++ b/apps/sim/app/api/schedules/[id]/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { workflowSchedule } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' @@ -9,18 +9,100 @@ import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' import { validateCronExpression } from '@/lib/workflows/schedules/utils' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' +import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' const logger = createLogger('ScheduleAPI') export const dynamic = 'force-dynamic' -const scheduleUpdateSchema = z.object({ - action: z.literal('reactivate'), -}) +const scheduleUpdateSchema = z.discriminatedUnion('action', [ + z.object({ action: z.literal('reactivate') }), + z.object({ action: z.literal('disable') }), + z.object({ + action: z.literal('update'), + title: z.string().min(1).optional(), + prompt: z.string().min(1).optional(), + cronExpression: z.string().optional(), + timezone: z.string().optional(), + lifecycle: z.enum(['persistent', 'until_complete']).optional(), + maxRuns: z.number().nullable().optional(), + }), +]) + +type ScheduleRow = { + id: string + workflowId: string | null + status: string + cronExpression: string | null + timezone: string | null + sourceType: string | null + sourceWorkspaceId: string | null +} + +async function fetchAndAuthorize( + requestId: string, + scheduleId: string, + userId: string, + action: 'read' | 'write' +): Promise<{ schedule: ScheduleRow; workspaceId: string | null } | NextResponse> { + const [schedule] = await db + .select({ + id: workflowSchedule.id, + workflowId: workflowSchedule.workflowId, + status: workflowSchedule.status, + cronExpression: workflowSchedule.cronExpression, + timezone: workflowSchedule.timezone, + sourceType: workflowSchedule.sourceType, + sourceWorkspaceId: workflowSchedule.sourceWorkspaceId, + }) + .from(workflowSchedule) + .where(and(eq(workflowSchedule.id, scheduleId), isNull(workflowSchedule.archivedAt))) + .limit(1) + + if (!schedule) { + logger.warn(`[${requestId}] Schedule not found: ${scheduleId}`) + return NextResponse.json({ error: 'Schedule not found' }, { status: 404 }) + } + + if (schedule.sourceType === 'job') { + if (!schedule.sourceWorkspaceId) { + return NextResponse.json({ error: 'Job has no workspace' }, { status: 400 }) + } + const permission = await verifyWorkspaceMembership(userId, schedule.sourceWorkspaceId) + const canWrite = permission === 'admin' || permission === 'write' + if (!permission || (action === 'write' && !canWrite)) { + return NextResponse.json({ error: 'Not authorized' }, { status: 403 }) + } + return { schedule, workspaceId: schedule.sourceWorkspaceId } + } + + if (!schedule.workflowId) { + logger.warn(`[${requestId}] Schedule has no workflow: ${scheduleId}`) + return NextResponse.json({ error: 'Schedule has no associated workflow' }, { status: 400 }) + } + + const authorization = await authorizeWorkflowByWorkspacePermission({ + workflowId: schedule.workflowId, + userId, + action, + }) + + if (!authorization.workflow) { + logger.warn(`[${requestId}] Workflow not found for schedule: ${scheduleId}`) + return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) + } + + if (!authorization.allowed) { + logger.warn(`[${requestId}] User not authorized to modify schedule: ${scheduleId}`) + return NextResponse.json( + { error: authorization.message || 'Not authorized to modify this schedule' }, + { status: authorization.status } + ) + } + + return { schedule, workspaceId: authorization.workflow.workspaceId ?? null } +} -/** - * Reactivate a disabled schedule - */ export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { const requestId = generateRequestId() @@ -40,44 +122,103 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ return NextResponse.json({ error: 'Invalid request body' }, { status: 400 }) } - const [schedule] = await db - .select({ - id: workflowSchedule.id, - workflowId: workflowSchedule.workflowId, - status: workflowSchedule.status, - cronExpression: workflowSchedule.cronExpression, - timezone: workflowSchedule.timezone, + const result = await fetchAndAuthorize(requestId, scheduleId, session.user.id, 'write') + if (result instanceof NextResponse) return result + const { schedule, workspaceId } = result + + const { action } = validation.data + + if (action === 'disable') { + if (schedule.status === 'disabled') { + return NextResponse.json({ message: 'Schedule is already disabled' }) + } + + await db + .update(workflowSchedule) + .set({ status: 'disabled', nextRunAt: null, updatedAt: new Date() }) + .where(and(eq(workflowSchedule.id, scheduleId), isNull(workflowSchedule.archivedAt))) + + logger.info(`[${requestId}] Disabled schedule: ${scheduleId}`) + + recordAudit({ + workspaceId, + actorId: session.user.id, + action: AuditAction.SCHEDULE_UPDATED, + resourceType: AuditResourceType.SCHEDULE, + resourceId: scheduleId, + actorName: session.user.name ?? undefined, + actorEmail: session.user.email ?? undefined, + description: `Disabled schedule ${scheduleId}`, + metadata: {}, + request, }) - .from(workflowSchedule) - .where(eq(workflowSchedule.id, scheduleId)) - .limit(1) - if (!schedule) { - logger.warn(`[${requestId}] Schedule not found: ${scheduleId}`) - return NextResponse.json({ error: 'Schedule not found' }, { status: 404 }) + return NextResponse.json({ message: 'Schedule disabled successfully' }) } - const authorization = await authorizeWorkflowByWorkspacePermission({ - workflowId: schedule.workflowId, - userId: session.user.id, - action: 'write', - }) + if (action === 'update') { + if (schedule.sourceType !== 'job') { + return NextResponse.json( + { error: 'Only standalone job schedules can be edited' }, + { status: 400 } + ) + } - if (!authorization.workflow) { - logger.warn(`[${requestId}] Workflow not found for schedule: ${scheduleId}`) - return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) - } + const updates = validation.data + const setFields: Record = { updatedAt: new Date() } + + if (updates.title !== undefined) setFields.jobTitle = updates.title.trim() + if (updates.prompt !== undefined) setFields.prompt = updates.prompt.trim() + if (updates.timezone !== undefined) setFields.timezone = updates.timezone + if (updates.lifecycle !== undefined) { + setFields.lifecycle = updates.lifecycle + if (updates.lifecycle === 'persistent') { + setFields.maxRuns = null + } + } + if (updates.maxRuns !== undefined) setFields.maxRuns = updates.maxRuns + + if (updates.cronExpression !== undefined) { + const tz = updates.timezone ?? schedule.timezone ?? 'UTC' + const cronResult = validateCronExpression(updates.cronExpression, tz) + if (!cronResult.isValid) { + return NextResponse.json( + { error: cronResult.error || 'Invalid cron expression' }, + { status: 400 } + ) + } + setFields.cronExpression = updates.cronExpression + if (schedule.status === 'active' && cronResult.nextRun) { + setFields.nextRunAt = cronResult.nextRun + } + } - if (!authorization.allowed) { - logger.warn(`[${requestId}] User not authorized to modify this schedule: ${scheduleId}`) - return NextResponse.json( - { error: authorization.message || 'Not authorized to modify this schedule' }, - { status: authorization.status } - ) + await db + .update(workflowSchedule) + .set(setFields) + .where(and(eq(workflowSchedule.id, scheduleId), isNull(workflowSchedule.archivedAt))) + + logger.info(`[${requestId}] Updated job schedule: ${scheduleId}`) + + recordAudit({ + workspaceId, + actorId: session.user.id, + action: AuditAction.SCHEDULE_UPDATED, + resourceType: AuditResourceType.SCHEDULE, + resourceId: scheduleId, + actorName: session.user.name ?? undefined, + actorEmail: session.user.email ?? undefined, + description: `Updated job schedule ${scheduleId}`, + metadata: {}, + request, + }) + + return NextResponse.json({ message: 'Schedule updated successfully' }) } + // reactivate if (schedule.status === 'active') { - return NextResponse.json({ message: 'Schedule is already active' }, { status: 200 }) + return NextResponse.json({ message: 'Schedule is already active' }) } if (!schedule.cronExpression) { @@ -96,35 +237,70 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ await db .update(workflowSchedule) - .set({ - status: 'active', - failedCount: 0, - updatedAt: now, - nextRunAt, - }) - .where(eq(workflowSchedule.id, scheduleId)) + .set({ status: 'active', failedCount: 0, updatedAt: now, nextRunAt }) + .where(and(eq(workflowSchedule.id, scheduleId), isNull(workflowSchedule.archivedAt))) logger.info(`[${requestId}] Reactivated schedule: ${scheduleId}`) recordAudit({ - workspaceId: authorization.workflow.workspaceId ?? null, + workspaceId, actorId: session.user.id, action: AuditAction.SCHEDULE_UPDATED, resourceType: AuditResourceType.SCHEDULE, resourceId: scheduleId, actorName: session.user.name ?? undefined, actorEmail: session.user.email ?? undefined, - description: `Reactivated schedule for workflow ${schedule.workflowId}`, + description: `Reactivated schedule ${scheduleId}`, metadata: { cronExpression: schedule.cronExpression, timezone: schedule.timezone }, request, }) - return NextResponse.json({ - message: 'Schedule activated successfully', - nextRunAt, - }) + return NextResponse.json({ message: 'Schedule activated successfully', nextRunAt }) } catch (error) { logger.error(`[${requestId}] Error updating schedule`, error) return NextResponse.json({ error: 'Failed to update schedule' }, { status: 500 }) } } + +export async function DELETE( + request: NextRequest, + { params }: { params: Promise<{ id: string }> } +) { + const requestId = generateRequestId() + + try { + const { id: scheduleId } = await params + + const session = await getSession() + if (!session?.user?.id) { + logger.warn(`[${requestId}] Unauthorized schedule delete attempt`) + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const result = await fetchAndAuthorize(requestId, scheduleId, session.user.id, 'write') + if (result instanceof NextResponse) return result + const { schedule, workspaceId } = result + + await db.delete(workflowSchedule).where(eq(workflowSchedule.id, scheduleId)) + + logger.info(`[${requestId}] Deleted schedule: ${scheduleId}`) + + recordAudit({ + workspaceId, + actorId: session.user.id, + action: AuditAction.SCHEDULE_UPDATED, + resourceType: AuditResourceType.SCHEDULE, + resourceId: scheduleId, + actorName: session.user.name ?? undefined, + actorEmail: session.user.email ?? undefined, + description: `Deleted ${schedule.sourceType === 'job' ? 'job' : 'schedule'} ${scheduleId}`, + metadata: {}, + request, + }) + + return NextResponse.json({ message: 'Schedule deleted successfully' }) + } catch (error) { + logger.error(`[${requestId}] Error deleting schedule`, error) + return NextResponse.json({ error: 'Failed to delete schedule' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/schedules/execute/route.test.ts b/apps/sim/app/api/schedules/execute/route.test.ts index c4bb541533..cfdf6c3877 100644 --- a/apps/sim/app/api/schedules/execute/route.test.ts +++ b/apps/sim/app/api/schedules/execute/route.test.ts @@ -71,6 +71,7 @@ vi.mock('@/lib/core/async-jobs', () => ({ vi.mock('drizzle-orm', () => ({ and: vi.fn((...conditions: unknown[]) => ({ type: 'and', conditions })), eq: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'eq' })), + ne: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'ne' })), lte: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'lte' })), lt: vi.fn((field: unknown, value: unknown) => ({ field, value, type: 'lt' })), not: vi.fn((condition: unknown) => ({ type: 'not', condition })), @@ -94,6 +95,7 @@ vi.mock('@sim/db', () => ({ nextRunAt: 'nextRunAt', lastQueuedAt: 'lastQueuedAt', deploymentVersionId: 'deploymentVersionId', + sourceType: 'sourceType', }, workflowDeploymentVersion: { id: 'id', @@ -165,7 +167,7 @@ describe('Scheduled Workflow Execution API Route', () => { }) it('should execute scheduled workflows with Trigger.dev disabled', async () => { - mockDbReturning.mockReturnValue(SINGLE_SCHEDULE) + mockDbReturning.mockReturnValueOnce(SINGLE_SCHEDULE).mockReturnValueOnce([]) const response = await GET(createMockRequest()) @@ -178,7 +180,7 @@ describe('Scheduled Workflow Execution API Route', () => { it('should queue schedules to Trigger.dev when enabled', async () => { mockFeatureFlags.isTriggerDevEnabled = true - mockDbReturning.mockReturnValue(SINGLE_SCHEDULE) + mockDbReturning.mockReturnValueOnce(SINGLE_SCHEDULE).mockReturnValueOnce([]) const response = await GET(createMockRequest()) @@ -189,7 +191,7 @@ describe('Scheduled Workflow Execution API Route', () => { }) it('should handle case with no due schedules', async () => { - mockDbReturning.mockReturnValue([]) + mockDbReturning.mockReturnValueOnce([]).mockReturnValueOnce([]) const response = await GET(createMockRequest()) @@ -200,7 +202,7 @@ describe('Scheduled Workflow Execution API Route', () => { }) it('should execute multiple schedules in parallel', async () => { - mockDbReturning.mockReturnValue(MULTIPLE_SCHEDULES) + mockDbReturning.mockReturnValueOnce(MULTIPLE_SCHEDULES).mockReturnValueOnce([]) const response = await GET(createMockRequest()) diff --git a/apps/sim/app/api/schedules/execute/route.ts b/apps/sim/app/api/schedules/execute/route.ts index fc87b07833..cef36bfb25 100644 --- a/apps/sim/app/api/schedules/execute/route.ts +++ b/apps/sim/app/api/schedules/execute/route.ts @@ -1,17 +1,33 @@ import { db, workflowDeploymentVersion, workflowSchedule } from '@sim/db' import { createLogger } from '@sim/logger' -import { and, eq, isNull, lt, lte, not, or, sql } from 'drizzle-orm' +import { and, eq, isNull, lt, lte, ne, not, or, sql } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { v4 as uuidv4 } from 'uuid' import { verifyCronAuth } from '@/lib/auth/internal' import { getJobQueue, shouldExecuteInline } from '@/lib/core/async-jobs' import { generateRequestId } from '@/lib/core/utils/request' -import { executeScheduleJob } from '@/background/schedule-execution' +import { + executeJobInline, + executeScheduleJob, + releaseScheduleLock, +} from '@/background/schedule-execution' export const dynamic = 'force-dynamic' const logger = createLogger('ScheduledExecuteAPI') +const dueFilter = (queuedAt: Date) => + and( + isNull(workflowSchedule.archivedAt), + lte(workflowSchedule.nextRunAt, queuedAt), + not(eq(workflowSchedule.status, 'disabled')), + ne(workflowSchedule.status, 'completed'), + or( + isNull(workflowSchedule.lastQueuedAt), + lt(workflowSchedule.lastQueuedAt, workflowSchedule.nextRunAt) + ) + ) + export async function GET(request: NextRequest) { const requestId = generateRequestId() logger.info(`[${requestId}] Scheduled execution triggered at ${new Date().toISOString()}`) @@ -24,20 +40,14 @@ export async function GET(request: NextRequest) { const queuedAt = new Date() try { + // Workflow schedules (require active deployment) const dueSchedules = await db .update(workflowSchedule) - .set({ - lastQueuedAt: queuedAt, - updatedAt: queuedAt, - }) + .set({ lastQueuedAt: queuedAt, updatedAt: queuedAt }) .where( and( - lte(workflowSchedule.nextRunAt, queuedAt), - not(eq(workflowSchedule.status, 'disabled')), - or( - isNull(workflowSchedule.lastQueuedAt), - lt(workflowSchedule.lastQueuedAt, workflowSchedule.nextRunAt) - ), + dueFilter(queuedAt), + or(eq(workflowSchedule.sourceType, 'workflow'), isNull(workflowSchedule.sourceType)), sql`${workflowSchedule.deploymentVersionId} = (select ${workflowDeploymentVersion.id} from ${workflowDeploymentVersion} where ${workflowDeploymentVersion.workflowId} = ${workflowSchedule.workflowId} and ${workflowDeploymentVersion.isActive} = true)` ) ) @@ -50,20 +60,37 @@ export async function GET(request: NextRequest) { failedCount: workflowSchedule.failedCount, nextRunAt: workflowSchedule.nextRunAt, lastQueuedAt: workflowSchedule.lastQueuedAt, + sourceType: workflowSchedule.sourceType, + }) + + // Jobs (no deployment, dispatch inline) + const dueJobs = await db + .update(workflowSchedule) + .set({ lastQueuedAt: queuedAt, updatedAt: queuedAt }) + .where(and(dueFilter(queuedAt), eq(workflowSchedule.sourceType, 'job'))) + .returning({ + id: workflowSchedule.id, + cronExpression: workflowSchedule.cronExpression, + failedCount: workflowSchedule.failedCount, + lastQueuedAt: workflowSchedule.lastQueuedAt, + sourceType: workflowSchedule.sourceType, }) - logger.info(`[${requestId}] Processing ${dueSchedules.length} due scheduled workflows`) + const totalCount = dueSchedules.length + dueJobs.length + logger.info( + `[${requestId}] Processing ${totalCount} due items (${dueSchedules.length} schedules, ${dueJobs.length} jobs)` + ) const jobQueue = await getJobQueue() - const queuePromises = dueSchedules.map(async (schedule) => { + const schedulePromises = dueSchedules.map(async (schedule) => { const queueTime = schedule.lastQueuedAt ?? queuedAt const executionId = uuidv4() const correlation = { executionId, requestId, source: 'schedule' as const, - workflowId: schedule.workflowId, + workflowId: schedule.workflowId!, scheduleId: schedule.id, triggerType: 'schedule', scheduledFor: schedule.nextRunAt?.toISOString(), @@ -71,7 +98,7 @@ export async function GET(request: NextRequest) { const payload = { scheduleId: schedule.id, - workflowId: schedule.workflowId, + workflowId: schedule.workflowId!, executionId, requestId, correlation, @@ -85,53 +112,91 @@ export async function GET(request: NextRequest) { try { const jobId = await jobQueue.enqueue('schedule-execution', payload, { - metadata: { workflowId: schedule.workflowId, correlation }, + metadata: { workflowId: schedule.workflowId ?? undefined, correlation }, }) logger.info( `[${requestId}] Queued schedule execution task ${jobId} for workflow ${schedule.workflowId}` ) if (shouldExecuteInline()) { - void (async () => { - try { - await jobQueue.startJob(jobId) - const output = await executeScheduleJob(payload) - await jobQueue.completeJob(jobId, output) - } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error) - logger.error( - `[${requestId}] Schedule execution failed for workflow ${schedule.workflowId}`, - { jobId, error: errorMessage } - ) - try { - await jobQueue.markJobFailed(jobId, errorMessage) - } catch (markFailedError) { - logger.error(`[${requestId}] Failed to mark job as failed`, { - jobId, - error: - markFailedError instanceof Error - ? markFailedError.message - : String(markFailedError), - }) + try { + await jobQueue.startJob(jobId) + const output = await executeScheduleJob(payload) + await jobQueue.completeJob(jobId, output) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + logger.error( + `[${requestId}] Schedule execution failed for workflow ${schedule.workflowId}`, + { + jobId, + error: errorMessage, } + ) + try { + await jobQueue.markJobFailed(jobId, errorMessage) + } catch (markFailedError) { + logger.error(`[${requestId}] Failed to mark job as failed`, { + jobId, + error: + markFailedError instanceof Error + ? markFailedError.message + : String(markFailedError), + }) } - })() + await releaseScheduleLock( + schedule.id, + requestId, + queuedAt, + `Failed to release lock for schedule ${schedule.id} after inline execution failure` + ) + } } } catch (error) { logger.error( `[${requestId}] Failed to queue schedule execution for workflow ${schedule.workflowId}`, error ) + await releaseScheduleLock( + schedule.id, + requestId, + queuedAt, + `Failed to release lock for schedule ${schedule.id} after queue failure` + ) + } + }) + + // Jobs always execute inline (no TriggerDev) + const jobPromises = dueJobs.map(async (job) => { + const queueTime = job.lastQueuedAt ?? queuedAt + const payload = { + scheduleId: job.id, + cronExpression: job.cronExpression || undefined, + failedCount: job.failedCount || 0, + now: queueTime.toISOString(), + } + + try { + await executeJobInline(payload) + } catch (error) { + logger.error(`[${requestId}] Job execution failed for ${job.id}`, { + error: error instanceof Error ? error.message : String(error), + }) + await releaseScheduleLock( + job.id, + requestId, + queuedAt, + `Failed to release lock for job ${job.id}` + ) } }) - await Promise.allSettled(queuePromises) + await Promise.allSettled([...schedulePromises, ...jobPromises]) - logger.info(`[${requestId}] Queued ${dueSchedules.length} schedule executions`) + logger.info(`[${requestId}] Processed ${totalCount} items`) return NextResponse.json({ message: 'Scheduled workflow executions processed', - executedCount: dueSchedules.length, + executedCount: totalCount, }) } catch (error: any) { logger.error(`[${requestId}] Error in scheduled execution handler`, error) diff --git a/apps/sim/app/api/schedules/route.ts b/apps/sim/app/api/schedules/route.ts index 575acd45f9..9c91530b98 100644 --- a/apps/sim/app/api/schedules/route.ts +++ b/apps/sim/app/api/schedules/route.ts @@ -1,21 +1,28 @@ import { db } from '@sim/db' -import { workflowDeploymentVersion, workflowSchedule } from '@sim/db/schema' +import { workflow, workflowDeploymentVersion, workflowSchedule } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq, isNull, or } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' +import { validateCronExpression } from '@/lib/workflows/schedules/utils' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' +import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' const logger = createLogger('ScheduledAPI') /** - * Get schedule information for a workflow + * Get schedule information for a workflow, or all schedules for a workspace. + * + * Query params (choose one): + * - workflowId + optional blockId → single schedule for one workflow + * - workspaceId → all schedules across the workspace */ export async function GET(req: NextRequest) { const requestId = generateRequestId() const url = new URL(req.url) const workflowId = url.searchParams.get('workflowId') + const workspaceId = url.searchParams.get('workspaceId') const blockId = url.searchParams.get('blockId') try { @@ -25,8 +32,15 @@ export async function GET(req: NextRequest) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } + if (workspaceId) { + return handleWorkspaceSchedules(requestId, session.user.id, workspaceId) + } + if (!workflowId) { - return NextResponse.json({ error: 'Missing workflowId parameter' }, { status: 400 }) + return NextResponse.json( + { error: 'Missing workflowId or workspaceId parameter' }, + { status: 400 } + ) } const authorization = await authorizeWorkflowByWorkspacePermission({ @@ -66,6 +80,7 @@ export async function GET(req: NextRequest) { .where( and( ...conditions, + isNull(workflowSchedule.archivedAt), or( eq(workflowSchedule.deploymentVersionId, workflowDeploymentVersion.id), and(isNull(workflowDeploymentVersion.id), isNull(workflowSchedule.deploymentVersionId)) @@ -99,3 +114,175 @@ export async function GET(req: NextRequest) { return NextResponse.json({ error: 'Failed to retrieve workflow schedule' }, { status: 500 }) } } + +async function handleWorkspaceSchedules(requestId: string, userId: string, workspaceId: string) { + const hasPermission = await verifyWorkspaceMembership(userId, workspaceId) + if (!hasPermission) { + return NextResponse.json({ error: 'Not authorized' }, { status: 403 }) + } + + logger.info(`[${requestId}] Getting all schedules for workspace ${workspaceId}`) + + const [workflowRows, jobRows] = await Promise.all([ + db + .select({ + schedule: workflowSchedule, + workflowName: workflow.name, + workflowColor: workflow.color, + }) + .from(workflowSchedule) + .innerJoin(workflow, eq(workflow.id, workflowSchedule.workflowId)) + .leftJoin( + workflowDeploymentVersion, + and( + eq(workflowDeploymentVersion.workflowId, workflowSchedule.workflowId), + eq(workflowDeploymentVersion.isActive, true) + ) + ) + .where( + and( + eq(workflow.workspaceId, workspaceId), + isNull(workflow.archivedAt), + eq(workflowSchedule.triggerType, 'schedule'), + isNull(workflowSchedule.archivedAt), + or(eq(workflowSchedule.sourceType, 'workflow'), isNull(workflowSchedule.sourceType)), + or( + eq(workflowSchedule.deploymentVersionId, workflowDeploymentVersion.id), + and(isNull(workflowDeploymentVersion.id), isNull(workflowSchedule.deploymentVersionId)) + ) + ) + ), + db + .select({ schedule: workflowSchedule }) + .from(workflowSchedule) + .where( + and( + eq(workflowSchedule.sourceWorkspaceId, workspaceId), + eq(workflowSchedule.sourceType, 'job'), + isNull(workflowSchedule.archivedAt) + ) + ), + ]) + + const headers = new Headers() + headers.set('Cache-Control', 'no-store, max-age=0') + + const schedules = [ + ...workflowRows.map((r) => ({ + ...r.schedule, + workflowName: r.workflowName, + workflowColor: r.workflowColor, + })), + ...jobRows.map((r) => ({ + ...r.schedule, + workflowName: null, + workflowColor: null, + })), + ] + + return NextResponse.json({ schedules }, { headers }) +} + +/** + * Create a standalone scheduled job. + * + * Body: { workspaceId, title, prompt, cronExpression, timezone, lifecycle?, maxRuns?, startDate? } + */ +export async function POST(req: NextRequest) { + const requestId = generateRequestId() + + try { + const session = await getSession() + if (!session?.user?.id) { + logger.warn(`[${requestId}] Unauthorized schedule creation attempt`) + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const body = await req.json() + const { + workspaceId, + title, + prompt, + cronExpression, + timezone = 'UTC', + lifecycle = 'persistent', + maxRuns, + startDate, + } = body as { + workspaceId: string + title: string + prompt: string + cronExpression: string + timezone?: string + lifecycle?: 'persistent' | 'until_complete' + maxRuns?: number + startDate?: string + } + + if (!workspaceId || !title?.trim() || !prompt?.trim() || !cronExpression?.trim()) { + return NextResponse.json( + { error: 'Missing required fields: workspaceId, title, prompt, cronExpression' }, + { status: 400 } + ) + } + + const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId) + if (!hasPermission) { + return NextResponse.json({ error: 'Not authorized' }, { status: 403 }) + } + + const validation = validateCronExpression(cronExpression, timezone) + if (!validation.isValid) { + return NextResponse.json( + { error: validation.error || 'Invalid cron expression' }, + { status: 400 } + ) + } + + let nextRunAt = validation.nextRun! + if (startDate) { + const start = new Date(startDate) + if (start > new Date()) { + nextRunAt = start + } + } + + const now = new Date() + const id = crypto.randomUUID() + + await db.insert(workflowSchedule).values({ + id, + cronExpression, + triggerType: 'schedule', + sourceType: 'job', + status: 'active', + timezone, + nextRunAt, + createdAt: now, + updatedAt: now, + failedCount: 0, + jobTitle: title.trim(), + prompt: prompt.trim(), + lifecycle, + maxRuns: maxRuns ?? null, + runCount: 0, + sourceWorkspaceId: workspaceId, + sourceUserId: session.user.id, + }) + + logger.info(`[${requestId}] Created job schedule ${id}`, { + title, + cronExpression, + timezone, + lifecycle, + }) + + return NextResponse.json( + { schedule: { id, status: 'active', cronExpression, nextRunAt } }, + { status: 201 } + ) + } catch (error) { + logger.error(`[${requestId}] Error creating schedule`, error) + return NextResponse.json({ error: 'Failed to create schedule' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/skills/route.ts b/apps/sim/app/api/skills/route.ts index cf0b76c84d..224edf44de 100644 --- a/apps/sim/app/api/skills/route.ts +++ b/apps/sim/app/api/skills/route.ts @@ -1,12 +1,9 @@ -import { db } from '@sim/db' -import { skill } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, desc, eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' -import { upsertSkills } from '@/lib/workflows/skills/operations' +import { deleteSkill, listSkills, upsertSkills } from '@/lib/workflows/skills/operations' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const logger = createLogger('SkillsAPI') @@ -53,11 +50,7 @@ export async function GET(request: NextRequest) { return NextResponse.json({ error: 'Access denied' }, { status: 403 }) } - const result = await db - .select() - .from(skill) - .where(eq(skill.workspaceId, workspaceId)) - .orderBy(desc(skill.createdAt)) + const result = await listSkills({ workspaceId }) return NextResponse.json({ data: result }, { status: 200 }) } catch (error) { @@ -159,20 +152,12 @@ export async function DELETE(request: NextRequest) { return NextResponse.json({ error: 'Write permission required' }, { status: 403 }) } - const existingSkill = await db.select().from(skill).where(eq(skill.id, skillId)).limit(1) - - if (existingSkill.length === 0) { + const deleted = await deleteSkill({ skillId, workspaceId }) + if (!deleted) { logger.warn(`[${requestId}] Skill not found: ${skillId}`) return NextResponse.json({ error: 'Skill not found' }, { status: 404 }) } - if (existingSkill[0].workspaceId !== workspaceId) { - logger.warn(`[${requestId}] Skill ${skillId} does not belong to workspace ${workspaceId}`) - return NextResponse.json({ error: 'Skill not found' }, { status: 404 }) - } - - await db.delete(skill).where(and(eq(skill.id, skillId), eq(skill.workspaceId, workspaceId))) - logger.info(`[${requestId}] Deleted skill: ${skillId}`) return NextResponse.json({ success: true }) } catch (error) { diff --git a/apps/sim/app/api/superuser/import-workflow/route.ts b/apps/sim/app/api/superuser/import-workflow/route.ts index 3998792993..e9e4f5bbe5 100644 --- a/apps/sim/app/api/superuser/import-workflow/route.ts +++ b/apps/sim/app/api/superuser/import-workflow/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { copilotChats, workflow, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { getSession } from '@/lib/auth' import { verifyEffectiveSuperUser } from '@/lib/templates/permissions' @@ -11,6 +11,7 @@ import { saveWorkflowToNormalizedTables, } from '@/lib/workflows/persistence/utils' import { sanitizeForExport } from '@/lib/workflows/sanitization/json-sanitizer' +import { deduplicateWorkflowName } from '@/lib/workflows/utils' const logger = createLogger('SuperUserImportWorkflow') @@ -63,7 +64,7 @@ export async function POST(request: NextRequest) { const [targetWorkspace] = await db .select({ id: workspace.id, ownerId: workspace.ownerId }) .from(workspace) - .where(eq(workspace.id, targetWorkspaceId)) + .where(and(eq(workspace.id, targetWorkspaceId), isNull(workspace.archivedAt))) .limit(1) if (!targetWorkspace) { @@ -119,13 +120,18 @@ export async function POST(request: NextRequest) { // Create new workflow record const newWorkflowId = crypto.randomUUID() const now = new Date() + const dedupedName = await deduplicateWorkflowName( + `[Debug Import] ${sourceWorkflow.name}`, + targetWorkspaceId, + null + ) await db.insert(workflow).values({ id: newWorkflowId, userId: session.user.id, workspaceId: targetWorkspaceId, - folderId: null, // Don't copy folder association - name: `[Debug Import] ${sourceWorkflow.name}`, + folderId: null, + name: dedupedName, description: sourceWorkflow.description, color: sourceWorkflow.color, lastSynced: now, diff --git a/apps/sim/app/api/table/[tableId]/columns/route.ts b/apps/sim/app/api/table/[tableId]/columns/route.ts new file mode 100644 index 0000000000..de69649bf0 --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/columns/route.ts @@ -0,0 +1,231 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { + addTableColumn, + deleteColumn, + renameColumn, + updateColumnConstraints, + updateColumnType, +} from '@/lib/table' +import { + accessError, + CreateColumnSchema, + checkAccess, + DeleteColumnSchema, + normalizeColumn, + UpdateColumnSchema, +} from '@/app/api/table/utils' + +const logger = createLogger('TableColumnsAPI') + +interface ColumnsRouteParams { + params: Promise<{ tableId: string }> +} + +/** POST /api/table/[tableId]/columns - Adds a column to the table schema. */ +export async function POST(request: NextRequest, { params }: ColumnsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized column creation attempt`) + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body = await request.json() + const validated = CreateColumnSchema.parse(body) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const updatedTable = await addTableColumn(tableId, validated.column, requestId) + + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + if (error.message.includes('already exists') || error.message.includes('maximum column')) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + if (error.message === 'Table not found') { + return NextResponse.json({ error: error.message }, { status: 404 }) + } + } + + logger.error(`[${requestId}] Error adding column to table ${tableId}:`, error) + return NextResponse.json({ error: 'Failed to add column' }, { status: 500 }) + } +} + +/** PATCH /api/table/[tableId]/columns - Updates a column (rename, type change, constraints). */ +export async function PATCH(request: NextRequest, { params }: ColumnsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized column update attempt`) + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body = await request.json() + const validated = UpdateColumnSchema.parse(body) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const { updates } = validated + let updatedTable = null + + if (updates.name) { + updatedTable = await renameColumn( + { tableId, oldName: validated.columnName, newName: updates.name }, + requestId + ) + } + + if (updates.type) { + updatedTable = await updateColumnType( + { tableId, columnName: updates.name ?? validated.columnName, newType: updates.type }, + requestId + ) + } + + if (updates.required !== undefined || updates.unique !== undefined) { + updatedTable = await updateColumnConstraints( + { + tableId, + columnName: updates.name ?? validated.columnName, + ...(updates.required !== undefined ? { required: updates.required } : {}), + ...(updates.unique !== undefined ? { unique: updates.unique } : {}), + }, + requestId + ) + } + + if (!updatedTable) { + return NextResponse.json({ error: 'No updates specified' }, { status: 400 }) + } + + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + const msg = error.message + if (msg.includes('not found') || msg.includes('Table not found')) { + return NextResponse.json({ error: msg }, { status: 404 }) + } + if ( + msg.includes('already exists') || + msg.includes('Cannot delete the last column') || + msg.includes('Cannot set column') || + msg.includes('Invalid column') || + msg.includes('exceeds maximum') || + msg.includes('incompatible') || + msg.includes('duplicate') + ) { + return NextResponse.json({ error: msg }, { status: 400 }) + } + } + + logger.error(`[${requestId}] Error updating column in table ${tableId}:`, error) + return NextResponse.json({ error: 'Failed to update column' }, { status: 500 }) + } +} + +/** DELETE /api/table/[tableId]/columns - Deletes a column from the table schema. */ +export async function DELETE(request: NextRequest, { params }: ColumnsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized column deletion attempt`) + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body = await request.json() + const validated = DeleteColumnSchema.parse(body) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const updatedTable = await deleteColumn( + { tableId, columnName: validated.columnName }, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Invalid request data', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + if (error.message.includes('not found') || error.message === 'Table not found') { + return NextResponse.json({ error: error.message }, { status: 404 }) + } + if (error.message.includes('Cannot delete') || error.message.includes('last column')) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + } + + logger.error(`[${requestId}] Error deleting column from table ${tableId}:`, error) + return NextResponse.json({ error: 'Failed to delete column' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/table/[tableId]/metadata/route.ts b/apps/sim/app/api/table/[tableId]/metadata/route.ts new file mode 100644 index 0000000000..5ae158e334 --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/metadata/route.ts @@ -0,0 +1,65 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import type { TableMetadata } from '@/lib/table' +import { updateTableMetadata } from '@/lib/table' +import { accessError, checkAccess } from '@/app/api/table/utils' + +const logger = createLogger('TableMetadataAPI') + +const MetadataSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + metadata: z.object({ + columnWidths: z.record(z.number().positive()).optional(), + }), +}) + +interface TableRouteParams { + params: Promise<{ tableId: string }> +} + +/** PUT /api/table/[tableId]/metadata - Update table UI metadata (column widths, etc.) */ +export async function PUT(request: NextRequest, { params }: TableRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized metadata update attempt`) + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body = await request.json() + const validated = MetadataSchema.parse(body) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const updated = await updateTableMetadata( + tableId, + validated.metadata, + table.metadata as TableMetadata | null + ) + + return NextResponse.json({ success: true, data: { metadata: updated } }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error updating table metadata:`, error) + return NextResponse.json({ error: 'Failed to update metadata' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/table/[tableId]/restore/route.ts b/apps/sim/app/api/table/[tableId]/restore/route.ts new file mode 100644 index 0000000000..8622f849f1 --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/restore/route.ts @@ -0,0 +1,45 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { getTableById, restoreTable } from '@/lib/table' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('RestoreTableAPI') + +export async function POST( + request: NextRequest, + { params }: { params: Promise<{ tableId: string }> } +) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const table = await getTableById(tableId, { includeArchived: true }) + if (!table) { + return NextResponse.json({ error: 'Table not found' }, { status: 404 }) + } + + const permission = await getUserEntityPermissions(auth.userId, 'workspace', table.workspaceId) + if (permission !== 'admin' && permission !== 'write') { + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + + await restoreTable(tableId, requestId) + + logger.info(`[${requestId}] Restored table ${tableId}`) + + return NextResponse.json({ success: true }) + } catch (error) { + logger.error(`[${requestId}] Error restoring table ${tableId}`, error) + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/table/[tableId]/route.ts b/apps/sim/app/api/table/[tableId]/route.ts index 8f8e7f0df9..2341c9f8ad 100644 --- a/apps/sim/app/api/table/[tableId]/route.ts +++ b/apps/sim/app/api/table/[tableId]/route.ts @@ -3,8 +3,8 @@ import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' -import { deleteTable, type TableSchema } from '@/lib/table' -import { accessError, checkAccess, normalizeColumn, verifyTableWorkspace } from '../utils' +import { deleteTable, NAME_PATTERN, renameTable, TABLE_LIMITS, type TableSchema } from '@/lib/table' +import { accessError, checkAccess, normalizeColumn } from '@/app/api/table/utils' const logger = createLogger('TableDetailAPI') @@ -38,11 +38,7 @@ export async function GET(request: NextRequest, { params }: TableRouteParams) { const { table } = result - const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) - if (!isValidWorkspace) { - logger.warn( - `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` - ) + if (table.workspaceId !== validated.workspaceId) { return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } @@ -60,6 +56,7 @@ export async function GET(request: NextRequest, { params }: TableRouteParams) { schema: { columns: schemaData.columns.map(normalizeColumn), }, + metadata: table.metadata ?? null, rowCount: table.rowCount, maxRows: table.maxRows, createdAt: @@ -86,7 +83,68 @@ export async function GET(request: NextRequest, { params }: TableRouteParams) { } } -/** DELETE /api/table/[tableId] - Deletes a table and all its rows. */ +const PatchTableSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + name: z + .string() + .min(1, 'Name is required') + .max( + TABLE_LIMITS.MAX_TABLE_NAME_LENGTH, + `Name must be at most ${TABLE_LIMITS.MAX_TABLE_NAME_LENGTH} characters` + ) + .regex( + NAME_PATTERN, + 'Name must start with letter or underscore, followed by alphanumeric or underscore' + ), +}) + +/** PATCH /api/table/[tableId] - Renames a table. */ +export async function PATCH(request: NextRequest, { params }: TableRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + logger.warn(`[${requestId}] Unauthorized table rename attempt`) + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const body = await request.json() + const validated = PatchTableSchema.parse(body) + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const updated = await renameTable(tableId, validated.name, requestId) + + return NextResponse.json({ + success: true, + data: { table: updated }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error renaming table:`, error) + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Failed to rename table' }, + { status: 500 } + ) + } +} + +/** DELETE /api/table/[tableId] - Archives a table. */ export async function DELETE(request: NextRequest, { params }: TableRouteParams) { const requestId = generateRequestId() const { tableId } = await params @@ -108,11 +166,7 @@ export async function DELETE(request: NextRequest, { params }: TableRouteParams) const { table } = result - const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) - if (!isValidWorkspace) { - logger.warn( - `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` - ) + if (table.workspaceId !== validated.workspaceId) { return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } @@ -121,7 +175,7 @@ export async function DELETE(request: NextRequest, { params }: TableRouteParams) return NextResponse.json({ success: true, data: { - message: 'Table deleted successfully', + message: 'Table archived successfully', }, }) } catch (error) { diff --git a/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts b/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts index 15a4473283..12326141c7 100644 --- a/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts +++ b/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts @@ -6,9 +6,9 @@ import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' -import type { RowData, TableSchema } from '@/lib/table' -import { validateRowData } from '@/lib/table' -import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils' +import type { RowData } from '@/lib/table' +import { deleteRow, updateRow } from '@/lib/table' +import { accessError, checkAccess } from '@/app/api/table/utils' const logger = createLogger('TableRowAPI') @@ -50,11 +50,7 @@ export async function GET(request: NextRequest, { params }: RowRouteParams) { const { table } = result - const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) - if (!isValidWorkspace) { - logger.warn( - `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` - ) + if (table.workspaceId !== validated.workspaceId) { return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } @@ -62,6 +58,7 @@ export async function GET(request: NextRequest, { params }: RowRouteParams) { .select({ id: userTableRows.id, data: userTableRows.data, + position: userTableRows.position, createdAt: userTableRows.createdAt, updatedAt: userTableRows.updatedAt, }) @@ -87,8 +84,11 @@ export async function GET(request: NextRequest, { params }: RowRouteParams) { row: { id: row.id, data: row.data, - createdAt: row.createdAt.toISOString(), - updatedAt: row.updatedAt.toISOString(), + position: row.position, + createdAt: + row.createdAt instanceof Date ? row.createdAt.toISOString() : String(row.createdAt), + updatedAt: + row.updatedAt instanceof Date ? row.updatedAt.toISOString() : String(row.updatedAt), }, }, }) @@ -116,7 +116,13 @@ export async function PATCH(request: NextRequest, { params }: RowRouteParams) { return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + const validated = UpdateRowSchema.parse(body) const result = await checkAccess(tableId, authResult.userId, 'write') @@ -124,15 +130,10 @@ export async function PATCH(request: NextRequest, { params }: RowRouteParams) { const { table } = result - const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) - if (!isValidWorkspace) { - logger.warn( - `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` - ) + if (table.workspaceId !== validated.workspaceId) { return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - // Fetch existing row to support partial updates const [existingRow] = await db .select({ data: userTableRows.data }) .from(userTableRows) @@ -149,42 +150,21 @@ export async function PATCH(request: NextRequest, { params }: RowRouteParams) { return NextResponse.json({ error: 'Row not found' }, { status: 404 }) } - // Merge existing data with incoming partial data (incoming takes precedence) const mergedData = { ...(existingRow.data as RowData), ...(validated.data as RowData), } - const validation = await validateRowData({ - rowData: mergedData, - schema: table.schema as TableSchema, - tableId, - excludeRowId: rowId, - }) - if (!validation.valid) return validation.response - - const now = new Date() - - const [updatedRow] = await db - .update(userTableRows) - .set({ + const updatedRow = await updateRow( + { + tableId, + rowId, data: mergedData, - updatedAt: now, - }) - .where( - and( - eq(userTableRows.id, rowId), - eq(userTableRows.tableId, tableId), - eq(userTableRows.workspaceId, validated.workspaceId) - ) - ) - .returning() - - if (!updatedRow) { - return NextResponse.json({ error: 'Row not found' }, { status: 404 }) - } - - logger.info(`[${requestId}] Updated row ${rowId} in table ${tableId}`) + workspaceId: validated.workspaceId, + }, + table, + requestId + ) return NextResponse.json({ success: true, @@ -192,8 +172,15 @@ export async function PATCH(request: NextRequest, { params }: RowRouteParams) { row: { id: updatedRow.id, data: updatedRow.data, - createdAt: updatedRow.createdAt.toISOString(), - updatedAt: updatedRow.updatedAt.toISOString(), + position: updatedRow.position, + createdAt: + updatedRow.createdAt instanceof Date + ? updatedRow.createdAt.toISOString() + : updatedRow.createdAt, + updatedAt: + updatedRow.updatedAt instanceof Date + ? updatedRow.updatedAt.toISOString() + : updatedRow.updatedAt, }, message: 'Row updated successfully', }, @@ -206,6 +193,22 @@ export async function PATCH(request: NextRequest, { params }: RowRouteParams) { ) } + const errorMessage = error instanceof Error ? error.message : String(error) + + if (errorMessage === 'Row not found') { + return NextResponse.json({ error: errorMessage }, { status: 404 }) + } + + if ( + errorMessage.includes('Row size exceeds') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('Cannot set unique column') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + logger.error(`[${requestId}] Error updating row:`, error) return NextResponse.json({ error: 'Failed to update row' }, { status: 500 }) } @@ -222,7 +225,13 @@ export async function DELETE(request: NextRequest, { params }: RowRouteParams) { return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + const validated = DeleteRowSchema.parse(body) const result = await checkAccess(tableId, authResult.userId, 'write') @@ -230,30 +239,11 @@ export async function DELETE(request: NextRequest, { params }: RowRouteParams) { const { table } = result - const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) - if (!isValidWorkspace) { - logger.warn( - `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` - ) + if (table.workspaceId !== validated.workspaceId) { return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - const [deletedRow] = await db - .delete(userTableRows) - .where( - and( - eq(userTableRows.id, rowId), - eq(userTableRows.tableId, tableId), - eq(userTableRows.workspaceId, validated.workspaceId) - ) - ) - .returning() - - if (!deletedRow) { - return NextResponse.json({ error: 'Row not found' }, { status: 404 }) - } - - logger.info(`[${requestId}] Deleted row ${rowId} from table ${tableId}`) + await deleteRow(tableId, rowId, validated.workspaceId, requestId) return NextResponse.json({ success: true, @@ -270,6 +260,12 @@ export async function DELETE(request: NextRequest, { params }: RowRouteParams) { ) } + const errorMessage = error instanceof Error ? error.message : String(error) + + if (errorMessage === 'Row not found') { + return NextResponse.json({ error: errorMessage }, { status: 404 }) + } + logger.error(`[${requestId}] Error deleting row:`, error) return NextResponse.json({ error: 'Failed to delete row' }, { status: 500 }) } diff --git a/apps/sim/app/api/table/[tableId]/rows/route.ts b/apps/sim/app/api/table/[tableId]/rows/route.ts index 47bd0fe1a5..305a55a885 100644 --- a/apps/sim/app/api/table/[tableId]/rows/route.ts +++ b/apps/sim/app/api/table/[tableId]/rows/route.ts @@ -8,32 +8,44 @@ import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' import type { Filter, RowData, Sort, TableSchema } from '@/lib/table' import { - checkUniqueConstraintsDb, - getUniqueColumns, + batchInsertRows, + batchUpdateRows, + deleteRowsByFilter, + deleteRowsByIds, + insertRow, TABLE_LIMITS, USER_TABLE_ROWS_SQL_NAME, + updateRowsByFilter, validateBatchRows, - validateRowAgainstSchema, validateRowData, validateRowSize, } from '@/lib/table' import { buildFilterClause, buildSortClause } from '@/lib/table/sql' -import { accessError, checkAccess } from '../../utils' +import { accessError, checkAccess } from '@/app/api/table/utils' const logger = createLogger('TableRowsAPI') const InsertRowSchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), data: z.record(z.unknown(), { required_error: 'Row data is required' }), + position: z.number().int().min(0).optional(), }) -const BatchInsertRowsSchema = z.object({ - workspaceId: z.string().min(1, 'Workspace ID is required'), - rows: z - .array(z.record(z.unknown()), { required_error: 'Rows array is required' }) - .min(1, 'At least one row is required') - .max(1000, 'Cannot insert more than 1000 rows per batch'), -}) +const BatchInsertRowsSchema = z + .object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + rows: z + .array(z.record(z.unknown()), { required_error: 'Rows array is required' }) + .min(1, 'At least one row is required') + .max(1000, 'Cannot insert more than 1000 rows per batch'), + positions: z.array(z.number().int().min(0)).max(1000).optional(), + }) + .refine((d) => !d.positions || d.positions.length === d.rows.length, { + message: 'positions array length must match rows array length', + }) + .refine((d) => !d.positions || new Set(d.positions).size === d.positions.length, { + message: 'positions must not contain duplicates', + }) const QueryRowsSchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), @@ -54,27 +66,32 @@ const QueryRowsSchema = z.object({ .default(0), }) +const nonEmptyFilter = z + .record(z.unknown(), { required_error: 'Filter criteria is required' }) + .refine((f) => Object.keys(f).length > 0, { message: 'Filter must not be empty' }) + +const optionalPositiveLimit = (max: number, label: string) => + z.preprocess( + (val) => (val === null || val === undefined || val === '' ? undefined : Number(val)), + z + .number() + .int(`${label} must be an integer`) + .min(1, `${label} must be at least 1`) + .max(max, `Cannot ${label.toLowerCase()} more than ${max} rows per operation`) + .optional() + ) + const UpdateRowsByFilterSchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), - filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }), + filter: nonEmptyFilter, data: z.record(z.unknown(), { required_error: 'Update data is required' }), - limit: z.coerce - .number({ required_error: 'Limit must be a number' }) - .int('Limit must be an integer') - .min(1, 'Limit must be at least 1') - .max(1000, 'Cannot update more than 1000 rows per operation') - .optional(), + limit: optionalPositiveLimit(1000, 'Limit'), }) const DeleteRowsByFilterSchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), - filter: z.record(z.unknown(), { required_error: 'Filter criteria is required' }), - limit: z.coerce - .number({ required_error: 'Limit must be a number' }) - .int('Limit must be an integer') - .min(1, 'Limit must be at least 1') - .max(1000, 'Cannot delete more than 1000 rows per operation') - .optional(), + filter: nonEmptyFilter, + limit: optionalPositiveLimit(1000, 'Limit'), }) const DeleteRowsByIdsSchema = z.object({ @@ -87,6 +104,22 @@ const DeleteRowsByIdsSchema = z.object({ const DeleteRowsRequestSchema = z.union([DeleteRowsByFilterSchema, DeleteRowsByIdsSchema]) +const BatchUpdateByIdsSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + updates: z + .array( + z.object({ + rowId: z.string().min(1), + data: z.record(z.unknown()), + }) + ) + .min(1, 'At least one update is required') + .max(1000, 'Cannot update more than 1000 rows per batch') + .refine((d) => new Set(d.map((u) => u.rowId)).size === d.length, { + message: 'updates must not contain duplicate rowId values', + }), +}) + interface TableRowsRouteParams { params: Promise<{ tableId: string }> } @@ -111,18 +144,8 @@ async function handleBatchInsert( return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - const workspaceId = validated.workspaceId - - const remainingCapacity = table.maxRows - table.rowCount - if (remainingCapacity < validated.rows.length) { - return NextResponse.json( - { - error: `Insufficient capacity. Can only insert ${remainingCapacity} more rows (table has ${table.rowCount}/${table.maxRows} rows)`, - }, - { status: 400 } - ) - } - + // Validate rows before calling service (service also validates, but route-level + // validation returns structured HTTP responses) const validation = await validateBatchRows({ rows: validated.rows as RowData[], schema: table.schema as TableSchema, @@ -130,34 +153,50 @@ async function handleBatchInsert( }) if (!validation.valid) return validation.response - const now = new Date() - const rowsToInsert = validated.rows.map((data) => ({ - id: `row_${crypto.randomUUID().replace(/-/g, '')}`, - tableId, - workspaceId, - data, - createdAt: now, - updatedAt: now, - createdBy: userId, - })) - - const insertedRows = await db.insert(userTableRows).values(rowsToInsert).returning() - - logger.info(`[${requestId}] Batch inserted ${insertedRows.length} rows into table ${tableId}`) - - return NextResponse.json({ - success: true, - data: { - rows: insertedRows.map((r) => ({ - id: r.id, - data: r.data, - createdAt: r.createdAt.toISOString(), - updatedAt: r.updatedAt.toISOString(), - })), - insertedCount: insertedRows.length, - message: `Successfully inserted ${insertedRows.length} rows`, - }, - }) + try { + const insertedRows = await batchInsertRows( + { + tableId, + rows: validated.rows as RowData[], + workspaceId: validated.workspaceId, + userId, + positions: validated.positions, + }, + table, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + rows: insertedRows.map((r) => ({ + id: r.id, + data: r.data, + position: r.position, + createdAt: r.createdAt instanceof Date ? r.createdAt.toISOString() : r.createdAt, + updatedAt: r.updatedAt instanceof Date ? r.updatedAt.toISOString() : r.updatedAt, + })), + insertedCount: insertedRows.length, + message: `Successfully inserted ${insertedRows.length} rows`, + }, + }) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + + if ( + errorMessage.includes('row limit') || + errorMessage.includes('Insufficient capacity') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Row size exceeds') || + errorMessage.match(/^Row \d+:/) + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error batch inserting rows:`, error) + return NextResponse.json({ error: 'Failed to insert rows' }, { status: 500 }) + } } /** POST /api/table/[tableId]/rows - Inserts row(s). Supports single or batch insert. */ @@ -171,7 +210,12 @@ export async function POST(request: NextRequest, { params }: TableRowsRouteParam return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } if ( typeof body === 'object' && @@ -201,9 +245,9 @@ export async function POST(request: NextRequest, { params }: TableRowsRouteParam return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - const workspaceId = validated.workspaceId const rowData = validated.data as RowData + // Validate at route level for structured HTTP error responses const validation = await validateRowData({ rowData, schema: table.schema as TableSchema, @@ -211,30 +255,18 @@ export async function POST(request: NextRequest, { params }: TableRowsRouteParam }) if (!validation.valid) return validation.response - if (table.rowCount >= table.maxRows) { - return NextResponse.json( - { error: `Table row limit reached (${table.maxRows} rows max)` }, - { status: 400 } - ) - } - - const rowId = `row_${crypto.randomUUID().replace(/-/g, '')}` - const now = new Date() - - const [row] = await db - .insert(userTableRows) - .values({ - id: rowId, + // Service handles atomic capacity check + insert in a transaction + const row = await insertRow( + { tableId, - workspaceId, - data: validated.data, - createdAt: now, - updatedAt: now, - createdBy: authResult.userId, - }) - .returning() - - logger.info(`[${requestId}] Inserted row ${rowId} into table ${tableId}`) + data: rowData, + workspaceId: validated.workspaceId, + userId: authResult.userId, + position: validated.position, + }, + table, + requestId + ) return NextResponse.json({ success: true, @@ -242,8 +274,9 @@ export async function POST(request: NextRequest, { params }: TableRowsRouteParam row: { id: row.id, data: row.data, - createdAt: row.createdAt.toISOString(), - updatedAt: row.updatedAt.toISOString(), + position: row.position, + createdAt: row.createdAt instanceof Date ? row.createdAt.toISOString() : row.createdAt, + updatedAt: row.updatedAt instanceof Date ? row.updatedAt.toISOString() : row.updatedAt, }, message: 'Row inserted successfully', }, @@ -256,6 +289,18 @@ export async function POST(request: NextRequest, { params }: TableRowsRouteParam ) } + const errorMessage = error instanceof Error ? error.message : String(error) + + if ( + errorMessage.includes('row limit') || + errorMessage.includes('Insufficient capacity') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Row size exceeds') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + logger.error(`[${requestId}] Error inserting row:`, error) return NextResponse.json({ error: 'Failed to insert row' }, { status: 500 }) } @@ -329,6 +374,7 @@ export async function GET(request: NextRequest, { params }: TableRowsRouteParams .select({ id: userTableRows.id, data: userTableRows.data, + position: userTableRows.position, createdAt: userTableRows.createdAt, updatedAt: userTableRows.updatedAt, }) @@ -340,9 +386,11 @@ export async function GET(request: NextRequest, { params }: TableRowsRouteParams const sortClause = buildSortClause(validated.sort, USER_TABLE_ROWS_SQL_NAME, schema.columns) if (sortClause) { query = query.orderBy(sortClause) as typeof query + } else { + query = query.orderBy(userTableRows.position) as typeof query } } else { - query = query.orderBy(userTableRows.createdAt) as typeof query + query = query.orderBy(userTableRows.position) as typeof query } const countQuery = db @@ -364,8 +412,9 @@ export async function GET(request: NextRequest, { params }: TableRowsRouteParams rows: rows.map((r) => ({ id: r.id, data: r.data, - createdAt: r.createdAt.toISOString(), - updatedAt: r.updatedAt.toISOString(), + position: r.position, + createdAt: r.createdAt instanceof Date ? r.createdAt.toISOString() : String(r.createdAt), + updatedAt: r.updatedAt instanceof Date ? r.updatedAt.toISOString() : String(r.updatedAt), })), rowCount: rows.length, totalCount: Number(totalCount), @@ -397,7 +446,13 @@ export async function PUT(request: NextRequest, { params }: TableRowsRouteParams return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + const validated = UpdateRowsByFilterSchema.parse(body) const accessResult = await checkAccess(tableId, authResult.userId, 'write') @@ -412,9 +467,7 @@ export async function PUT(request: NextRequest, { params }: TableRowsRouteParams return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - const updateData = validated.data as RowData - - const sizeValidation = validateRowSize(updateData) + const sizeValidation = validateRowSize(validated.data as RowData) if (!sizeValidation.valid) { return NextResponse.json( { error: 'Invalid row data', details: sizeValidation.errors }, @@ -422,31 +475,19 @@ export async function PUT(request: NextRequest, { params }: TableRowsRouteParams ) } - const baseConditions = [ - eq(userTableRows.tableId, tableId), - eq(userTableRows.workspaceId, validated.workspaceId), - ] - - const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME) - if (filterClause) { - baseConditions.push(filterClause) - } - - let matchingRowsQuery = db - .select({ - id: userTableRows.id, - data: userTableRows.data, - }) - .from(userTableRows) - .where(and(...baseConditions)) - - if (validated.limit) { - matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery - } - - const matchingRows = await matchingRowsQuery + const result = await updateRowsByFilter( + { + tableId, + filter: validated.filter as Filter, + data: validated.data as RowData, + limit: validated.limit, + workspaceId: validated.workspaceId, + }, + table, + requestId + ) - if (matchingRows.length === 0) { + if (result.affectedCount === 0) { return NextResponse.json( { success: true, @@ -459,103 +500,12 @@ export async function PUT(request: NextRequest, { params }: TableRowsRouteParams ) } - if (matchingRows.length > TABLE_LIMITS.MAX_BULK_OPERATION_SIZE) { - logger.warn(`[${requestId}] Updating ${matchingRows.length} rows. This may take some time.`) - } - - for (const row of matchingRows) { - const existingData = row.data as RowData - const mergedData = { ...existingData, ...updateData } - const rowValidation = validateRowAgainstSchema(mergedData, table.schema as TableSchema) - if (!rowValidation.valid) { - return NextResponse.json( - { - error: 'Updated data does not match schema', - details: rowValidation.errors, - affectedRowId: row.id, - }, - { status: 400 } - ) - } - } - - const uniqueColumns = getUniqueColumns(table.schema as TableSchema) - if (uniqueColumns.length > 0) { - // If updating multiple rows, check that updateData doesn't set any unique column - // (would cause all rows to have the same value, violating uniqueness) - if (matchingRows.length > 1) { - const uniqueColumnsInUpdate = uniqueColumns.filter((col) => col.name in updateData) - if (uniqueColumnsInUpdate.length > 0) { - return NextResponse.json( - { - error: 'Cannot set unique column values when updating multiple rows', - details: [ - `Columns with unique constraint: ${uniqueColumnsInUpdate.map((c) => c.name).join(', ')}. ` + - `Updating ${matchingRows.length} rows with the same value would violate uniqueness.`, - ], - }, - { status: 400 } - ) - } - } - - // Check unique constraints against database for each row - for (const row of matchingRows) { - const existingData = row.data as RowData - const mergedData = { ...existingData, ...updateData } - const uniqueValidation = await checkUniqueConstraintsDb( - tableId, - mergedData, - table.schema as TableSchema, - row.id - ) - - if (!uniqueValidation.valid) { - return NextResponse.json( - { - error: 'Unique constraint violation', - details: uniqueValidation.errors, - affectedRowId: row.id, - }, - { status: 400 } - ) - } - } - } - - const now = new Date() - - await db.transaction(async (trx) => { - let totalUpdated = 0 - - for (let i = 0; i < matchingRows.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) { - const batch = matchingRows.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE) - const updatePromises = batch.map((row) => { - const existingData = row.data as RowData - return trx - .update(userTableRows) - .set({ - data: { ...existingData, ...updateData }, - updatedAt: now, - }) - .where(eq(userTableRows.id, row.id)) - }) - await Promise.all(updatePromises) - totalUpdated += batch.length - logger.info( - `[${requestId}] Updated batch ${Math.floor(i / TABLE_LIMITS.UPDATE_BATCH_SIZE) + 1} (${totalUpdated}/${matchingRows.length} rows)` - ) - } - }) - - logger.info(`[${requestId}] Updated ${matchingRows.length} rows in table ${tableId}`) - return NextResponse.json({ success: true, data: { message: 'Rows updated successfully', - updatedCount: matchingRows.length, - updatedRowIds: matchingRows.map((r) => r.id), + updatedCount: result.affectedCount, + updatedRowIds: result.affectedRowIds, }, }) } catch (error) { @@ -566,16 +516,25 @@ export async function PUT(request: NextRequest, { params }: TableRowsRouteParams ) } - logger.error(`[${requestId}] Error updating rows by filter:`, error) - const errorMessage = error instanceof Error ? error.message : String(error) - const detailedError = `Failed to update rows: ${errorMessage}` - return NextResponse.json({ error: detailedError }, { status: 500 }) + if ( + errorMessage.includes('Row size exceeds') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('Cannot set unique column') || + errorMessage.includes('Filter is required') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error updating rows by filter:`, error) + return NextResponse.json({ error: 'Failed to update rows' }, { status: 500 }) } } -/** DELETE /api/table/[tableId]/rows - Deletes rows matching filter criteria. */ +/** DELETE /api/table/[tableId]/rows - Deletes rows matching filter criteria or by IDs. */ export async function DELETE(request: NextRequest, { params }: TableRowsRouteParams) { const requestId = generateRequestId() const { tableId } = await params @@ -586,7 +545,13 @@ export async function DELETE(request: NextRequest, { params }: TableRowsRoutePar return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + const validated = DeleteRowsRequestSchema.parse(body) const accessResult = await checkAccess(tableId, authResult.userId, 'write') @@ -601,110 +566,115 @@ export async function DELETE(request: NextRequest, { params }: TableRowsRoutePar return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - const baseConditions = [ - eq(userTableRows.tableId, tableId), - eq(userTableRows.workspaceId, validated.workspaceId), - ] + if ('rowIds' in validated) { + const result = await deleteRowsByIds( + { tableId, rowIds: validated.rowIds, workspaceId: validated.workspaceId }, + requestId + ) - let rowIds: string[] = [] - let missingRowIds: string[] | undefined - let requestedCount: number | undefined + return NextResponse.json({ + success: true, + data: { + message: + result.deletedCount === 0 + ? 'No matching rows found for the provided IDs' + : 'Rows deleted successfully', + deletedCount: result.deletedCount, + deletedRowIds: result.deletedRowIds, + requestedCount: result.requestedCount, + ...(result.missingRowIds.length > 0 ? { missingRowIds: result.missingRowIds } : {}), + }, + }) + } - if ('rowIds' in validated) { - const uniqueRequestedRowIds = Array.from(new Set(validated.rowIds)) - requestedCount = uniqueRequestedRowIds.length - - const matchingRows = await db - .select({ id: userTableRows.id }) - .from(userTableRows) - .where( - and( - ...baseConditions, - sql`${userTableRows.id} = ANY(ARRAY[${sql.join( - uniqueRequestedRowIds.map((id) => sql`${id}`), - sql`, ` - )}])` - ) - ) - - const matchedRowIds = matchingRows.map((r) => r.id) - const matchedIdSet = new Set(matchedRowIds) - missingRowIds = uniqueRequestedRowIds.filter((id) => !matchedIdSet.has(id)) - rowIds = matchedRowIds - } else { - const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME) - if (filterClause) { - baseConditions.push(filterClause) - } + const result = await deleteRowsByFilter( + { + tableId, + filter: validated.filter as Filter, + limit: validated.limit, + workspaceId: validated.workspaceId, + }, + requestId + ) - let matchingRowsQuery = db - .select({ id: userTableRows.id }) - .from(userTableRows) - .where(and(...baseConditions)) + return NextResponse.json({ + success: true, + data: { + message: + result.affectedCount === 0 + ? 'No rows matched the filter criteria' + : 'Rows deleted successfully', + deletedCount: result.affectedCount, + deletedRowIds: result.affectedRowIds, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } - if (validated.limit) { - matchingRowsQuery = matchingRowsQuery.limit(validated.limit) as typeof matchingRowsQuery - } + const errorMessage = error instanceof Error ? error.message : String(error) - const matchingRows = await matchingRowsQuery - rowIds = matchingRows.map((r) => r.id) + if (errorMessage.includes('Filter is required')) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) } - if (rowIds.length === 0) { - return NextResponse.json( - { - success: true, - data: { - message: - 'rowIds' in validated - ? 'No matching rows found for the provided IDs' - : 'No rows matched the filter criteria', - deletedCount: 0, - deletedRowIds: [], - ...(requestedCount !== undefined ? { requestedCount } : {}), - ...(missingRowIds ? { missingRowIds } : {}), - }, - }, - { status: 200 } - ) + logger.error(`[${requestId}] Error deleting rows:`, error) + return NextResponse.json({ error: 'Failed to delete rows' }, { status: 500 }) + } +} + +/** PATCH /api/table/[tableId]/rows - Batch updates rows by ID. */ +export async function PATCH(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - if (rowIds.length > TABLE_LIMITS.DELETE_BATCH_SIZE) { - logger.warn(`[${requestId}] Deleting ${rowIds.length} rows. This may take some time.`) + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) } - await db.transaction(async (trx) => { - let totalDeleted = 0 - - for (let i = 0; i < rowIds.length; i += TABLE_LIMITS.DELETE_BATCH_SIZE) { - const batch = rowIds.slice(i, i + TABLE_LIMITS.DELETE_BATCH_SIZE) - await trx.delete(userTableRows).where( - and( - eq(userTableRows.tableId, tableId), - eq(userTableRows.workspaceId, validated.workspaceId), - sql`${userTableRows.id} = ANY(ARRAY[${sql.join( - batch.map((id) => sql`${id}`), - sql`, ` - )}])` - ) - ) - totalDeleted += batch.length - logger.info( - `[${requestId}] Deleted batch ${Math.floor(i / TABLE_LIMITS.DELETE_BATCH_SIZE) + 1} (${totalDeleted}/${rowIds.length} rows)` - ) - } - }) + const validated = BatchUpdateByIdsSchema.parse(body) + + const accessResult = await checkAccess(tableId, authResult.userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + logger.warn( + `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` + ) + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } - logger.info(`[${requestId}] Deleted ${rowIds.length} rows from table ${tableId}`) + const result = await batchUpdateRows( + { + tableId, + updates: validated.updates as Array<{ rowId: string; data: RowData }>, + workspaceId: validated.workspaceId, + }, + table, + requestId + ) return NextResponse.json({ success: true, data: { - message: 'Rows deleted successfully', - deletedCount: rowIds.length, - deletedRowIds: rowIds, - ...(requestedCount !== undefined ? { requestedCount } : {}), - ...(missingRowIds ? { missingRowIds } : {}), + message: 'Rows updated successfully', + updatedCount: result.affectedCount, + updatedRowIds: result.affectedRowIds, }, }) } catch (error) { @@ -715,11 +685,24 @@ export async function DELETE(request: NextRequest, { params }: TableRowsRoutePar ) } - logger.error(`[${requestId}] Error deleting rows by filter:`, error) - const errorMessage = error instanceof Error ? error.message : String(error) - const detailedError = `Failed to delete rows: ${errorMessage}` - return NextResponse.json({ error: detailedError }, { status: 500 }) + if ( + errorMessage.includes('Row size exceeds') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be valid') || + errorMessage.includes('must be string') || + errorMessage.includes('must be number') || + errorMessage.includes('must be boolean') || + errorMessage.includes('must be unique') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('Cannot set unique column') || + errorMessage.includes('Rows not found') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error batch updating rows:`, error) + return NextResponse.json({ error: 'Failed to update rows' }, { status: 500 }) } } diff --git a/apps/sim/app/api/table/[tableId]/rows/upsert/route.ts b/apps/sim/app/api/table/[tableId]/rows/upsert/route.ts index a7b9e81468..f78c90b2e0 100644 --- a/apps/sim/app/api/table/[tableId]/rows/upsert/route.ts +++ b/apps/sim/app/api/table/[tableId]/rows/upsert/route.ts @@ -1,20 +1,18 @@ -import { db } from '@sim/db' -import { userTableRows } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq, or, sql } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' -import type { RowData, TableSchema } from '@/lib/table' -import { getUniqueColumns, validateRowData } from '@/lib/table' -import { accessError, checkAccess, verifyTableWorkspace } from '../../../utils' +import type { RowData } from '@/lib/table' +import { upsertRow } from '@/lib/table' +import { accessError, checkAccess } from '@/app/api/table/utils' const logger = createLogger('TableUpsertAPI') const UpsertRowSchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), data: z.record(z.unknown(), { required_error: 'Row data is required' }), + conflictTarget: z.string().optional(), }) interface UpsertRouteParams { @@ -32,7 +30,13 @@ export async function POST(request: NextRequest, { params }: UpsertRouteParams) return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + const validated = UpsertRowSchema.parse(body) const result = await checkAccess(tableId, authResult.userId, 'write') @@ -40,115 +44,20 @@ export async function POST(request: NextRequest, { params }: UpsertRouteParams) const { table } = result - const isValidWorkspace = await verifyTableWorkspace(tableId, validated.workspaceId) - if (!isValidWorkspace) { - logger.warn( - `[${requestId}] Workspace ID mismatch for table ${tableId}. Provided: ${validated.workspaceId}, Actual: ${table.workspaceId}` - ) + if (table.workspaceId !== validated.workspaceId) { return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) } - const schema = table.schema as TableSchema - const rowData = validated.data as RowData - - const validation = await validateRowData({ - rowData, - schema, - tableId, - checkUnique: false, - }) - if (!validation.valid) return validation.response - - const uniqueColumns = getUniqueColumns(schema) - - if (uniqueColumns.length === 0) { - return NextResponse.json( - { - error: - 'Upsert requires at least one unique column in the schema. Please add a unique constraint to a column or use insert instead.', - }, - { status: 400 } - ) - } - - const uniqueFilters = uniqueColumns.map((col) => { - const value = rowData[col.name] - if (value === undefined || value === null) { - return null - } - return sql`${userTableRows.data}->>${col.name} = ${String(value)}` - }) - - const validUniqueFilters = uniqueFilters.filter((f): f is Exclude => f !== null) - - if (validUniqueFilters.length === 0) { - return NextResponse.json( - { - error: `Upsert requires values for at least one unique field: ${uniqueColumns.map((c) => c.name).join(', ')}`, - }, - { status: 400 } - ) - } - - const [existingRow] = await db - .select() - .from(userTableRows) - .where( - and( - eq(userTableRows.tableId, tableId), - eq(userTableRows.workspaceId, validated.workspaceId), - or(...validUniqueFilters) - ) - ) - .limit(1) - - const now = new Date() - - if (!existingRow && table.rowCount >= table.maxRows) { - return NextResponse.json( - { error: `Table row limit reached (${table.maxRows} rows max)` }, - { status: 400 } - ) - } - - const upsertResult = await db.transaction(async (trx) => { - if (existingRow) { - const [updatedRow] = await trx - .update(userTableRows) - .set({ - data: validated.data, - updatedAt: now, - }) - .where(eq(userTableRows.id, existingRow.id)) - .returning() - - return { - row: updatedRow, - operation: 'update' as const, - } - } - - const [insertedRow] = await trx - .insert(userTableRows) - .values({ - id: `row_${crypto.randomUUID().replace(/-/g, '')}`, - tableId, - workspaceId: validated.workspaceId, - data: validated.data, - createdAt: now, - updatedAt: now, - createdBy: authResult.userId, - }) - .returning() - - return { - row: insertedRow, - operation: 'insert' as const, - } - }) - - logger.info( - `[${requestId}] Upserted (${upsertResult.operation}) row ${upsertResult.row.id} in table ${tableId}` + const upsertResult = await upsertRow( + { + tableId, + workspaceId: validated.workspaceId, + data: validated.data as RowData, + userId: authResult.userId, + conflictTarget: validated.conflictTarget, + }, + table, + requestId ) return NextResponse.json({ @@ -157,8 +66,14 @@ export async function POST(request: NextRequest, { params }: UpsertRouteParams) row: { id: upsertResult.row.id, data: upsertResult.row.data, - createdAt: upsertResult.row.createdAt.toISOString(), - updatedAt: upsertResult.row.updatedAt.toISOString(), + createdAt: + upsertResult.row.createdAt instanceof Date + ? upsertResult.row.createdAt.toISOString() + : upsertResult.row.createdAt, + updatedAt: + upsertResult.row.updatedAt instanceof Date + ? upsertResult.row.updatedAt.toISOString() + : upsertResult.row.updatedAt, }, operation: upsertResult.operation, message: `Row ${upsertResult.operation === 'update' ? 'updated' : 'inserted'} successfully`, @@ -172,11 +87,22 @@ export async function POST(request: NextRequest, { params }: UpsertRouteParams) ) } - logger.error(`[${requestId}] Error upserting row:`, error) - const errorMessage = error instanceof Error ? error.message : String(error) - const detailedError = `Failed to upsert row: ${errorMessage}` - return NextResponse.json({ error: detailedError }, { status: 500 }) + // Service layer throws descriptive errors for validation/capacity issues + if ( + errorMessage.includes('unique column') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('conflictTarget') || + errorMessage.includes('row limit') || + errorMessage.includes('Schema validation') || + errorMessage.includes('Upsert requires') || + errorMessage.includes('Row size exceeds') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error upserting row:`, error) + return NextResponse.json({ error: 'Failed to upsert row' }, { status: 500 }) } } diff --git a/apps/sim/app/api/table/route.ts b/apps/sim/app/api/table/route.ts index cd9aa7ff3e..18387ea80d 100644 --- a/apps/sim/app/api/table/route.ts +++ b/apps/sim/app/api/table/route.ts @@ -4,15 +4,15 @@ import { z } from 'zod' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' import { - canCreateTable, createTable, getWorkspaceTableLimits, listTables, TABLE_LIMITS, type TableSchema, + type TableScope, } from '@/lib/table' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' -import { normalizeColumn } from './utils' +import { normalizeColumn } from '@/app/api/table/utils' const logger = createLogger('TableAPI') @@ -66,10 +66,12 @@ const CreateTableSchema = z.object({ ), }), workspaceId: z.string().min(1, 'Workspace ID is required'), + initialRowCount: z.number().int().min(0).max(100).optional(), }) const ListTablesSchema = z.object({ workspaceId: z.string().min(1, 'Workspace ID is required'), + scope: z.enum(['active', 'archived', 'all']).optional().default('active'), }) interface WorkspaceAccessResult { @@ -101,7 +103,13 @@ export async function POST(request: NextRequest) { return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) } - const body: unknown = await request.json() + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + const params = CreateTableSchema.parse(body) const { hasAccess, canWrite } = await checkWorkspaceAccess( @@ -113,22 +121,7 @@ export async function POST(request: NextRequest) { return NextResponse.json({ error: 'Access denied' }, { status: 403 }) } - // Check billing plan limits - const existingTables = await listTables(params.workspaceId) - const { canCreate, maxTables } = await canCreateTable(params.workspaceId, existingTables.length) - - if (!canCreate) { - return NextResponse.json( - { - error: `Workspace has reached the maximum table limit (${maxTables}) for your plan. Please upgrade to create more tables.`, - }, - { status: 403 } - ) - } - - // Get plan-based row limits const planLimits = await getWorkspaceTableLimits(params.workspaceId) - const maxRowsPerTable = planLimits.maxRowsPerTable const normalizedSchema: TableSchema = { columns: params.schema.columns.map(normalizeColumn), @@ -141,7 +134,9 @@ export async function POST(request: NextRequest) { schema: normalizedSchema, workspaceId: params.workspaceId, userId: authResult.userId, - maxRows: maxRowsPerTable, + maxRows: planLimits.maxRowsPerTable, + maxTables: planLimits.maxTables, + initialRowCount: params.initialRowCount, }, requestId ) @@ -153,7 +148,9 @@ export async function POST(request: NextRequest) { id: table.id, name: table.name, description: table.description, - schema: table.schema, + schema: { + columns: (table.schema as TableSchema).columns.map(normalizeColumn), + }, rowCount: table.rowCount, maxRows: table.maxRows, createdAt: @@ -177,11 +174,13 @@ export async function POST(request: NextRequest) { } if (error instanceof Error) { + if (error.message.includes('maximum table limit')) { + return NextResponse.json({ error: error.message }, { status: 403 }) + } if ( error.message.includes('Invalid table name') || error.message.includes('Invalid schema') || - error.message.includes('already exists') || - error.message.includes('maximum table limit') + error.message.includes('already exists') ) { return NextResponse.json({ error: error.message }, { status: 400 }) } @@ -204,8 +203,9 @@ export async function GET(request: NextRequest) { const { searchParams } = new URL(request.url) const workspaceId = searchParams.get('workspaceId') + const scope = searchParams.get('scope') - const validation = ListTablesSchema.safeParse({ workspaceId }) + const validation = ListTablesSchema.safeParse({ workspaceId, scope }) if (!validation.success) { return NextResponse.json( { error: 'Validation error', details: validation.error.errors }, @@ -221,7 +221,7 @@ export async function GET(request: NextRequest) { return NextResponse.json({ error: 'Access denied' }, { status: 403 }) } - const tables = await listTables(params.workspaceId) + const tables = await listTables(params.workspaceId, { scope: params.scope as TableScope }) logger.info(`[${requestId}] Listed ${tables.length} tables in workspace ${params.workspaceId}`) @@ -231,10 +231,15 @@ export async function GET(request: NextRequest) { tables: tables.map((t) => { const schemaData = t.schema as TableSchema return { - ...t, + id: t.id, + name: t.name, + description: t.description, schema: { columns: schemaData.columns.map(normalizeColumn), }, + rowCount: t.rowCount, + maxRows: t.maxRows, + createdBy: t.createdBy, createdAt: t.createdAt instanceof Date ? t.createdAt.toISOString() : String(t.createdAt), updatedAt: diff --git a/apps/sim/app/api/table/utils.ts b/apps/sim/app/api/table/utils.ts index 5aee66315a..091fc9f898 100644 --- a/apps/sim/app/api/table/utils.ts +++ b/apps/sim/app/api/table/utils.ts @@ -1,7 +1,8 @@ import { createLogger } from '@sim/logger' import { NextResponse } from 'next/server' +import { z } from 'zod' import type { ColumnDefinition, TableDefinition } from '@/lib/table' -import { getTableById } from '@/lib/table' +import { COLUMN_TYPES, getTableById } from '@/lib/table' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const logger = createLogger('TableUtils') @@ -154,6 +155,37 @@ export function serverErrorResponse(message = 'Internal server error') { return errorResponse(message, 500) } +const columnTypeEnum = z.enum( + COLUMN_TYPES as unknown as [(typeof COLUMN_TYPES)[number], ...(typeof COLUMN_TYPES)[number][]] +) + +export const CreateColumnSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + column: z.object({ + name: z.string().min(1, 'Column name is required'), + type: columnTypeEnum, + required: z.boolean().optional(), + unique: z.boolean().optional(), + position: z.number().int().min(0).optional(), + }), +}) + +export const UpdateColumnSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + columnName: z.string().min(1, 'Column name is required'), + updates: z.object({ + name: z.string().min(1).optional(), + type: columnTypeEnum.optional(), + required: z.boolean().optional(), + unique: z.boolean().optional(), + }), +}) + +export const DeleteColumnSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + columnName: z.string().min(1, 'Column name is required'), +}) + export function normalizeColumn(col: ColumnDefinition): ColumnDefinition { return { name: col.name, diff --git a/apps/sim/app/api/templates/[id]/route.ts b/apps/sim/app/api/templates/[id]/route.ts index 2ea215566e..260b64f582 100644 --- a/apps/sim/app/api/templates/[id]/route.ts +++ b/apps/sim/app/api/templates/[id]/route.ts @@ -7,6 +7,7 @@ import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' +import { canAccessTemplate } from '@/lib/templates/permissions' import { extractRequiredCredentials, sanitizeCredentials, @@ -24,6 +25,12 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ try { const session = await getSession() + const access = await canAccessTemplate(id, session?.user?.id) + if (!access.allowed || !access.template) { + logger.warn(`[${requestId}] Template not found: ${id}`) + return NextResponse.json({ error: 'Template not found' }, { status: 404 }) + } + const result = await db .select({ template: templates, @@ -34,21 +41,12 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ .where(eq(templates.id, id)) .limit(1) - if (result.length === 0) { - logger.warn(`[${requestId}] Template not found: ${id}`) - return NextResponse.json({ error: 'Template not found' }, { status: 404 }) - } - const { template, creator } = result[0] const templateWithCreator = { ...template, creator: creator || undefined, } - if (!session?.user?.id && template.status !== 'approved') { - return NextResponse.json({ error: 'Template not found' }, { status: 404 }) - } - let isStarred = false if (session?.user?.id) { const { templateStars } = await import('@sim/db/schema') diff --git a/apps/sim/app/api/templates/[id]/use/route.ts b/apps/sim/app/api/templates/[id]/use/route.ts index b08d6dfb8f..ecbbb850dd 100644 --- a/apps/sim/app/api/templates/[id]/use/route.ts +++ b/apps/sim/app/api/templates/[id]/use/route.ts @@ -7,10 +7,13 @@ import { v4 as uuidv4 } from 'uuid' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' import { getInternalApiBaseUrl } from '@/lib/core/utils/urls' +import { canAccessTemplate, verifyTemplateOwnership } from '@/lib/templates/permissions' import { type RegenerateStateInput, regenerateWorkflowStateIds, } from '@/lib/workflows/persistence/utils' +import { deduplicateWorkflowName } from '@/lib/workflows/utils' +import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils' const logger = createLogger('TemplateUseAPI') @@ -44,11 +47,37 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ return NextResponse.json({ error: 'Workspace ID is required' }, { status: 400 }) } + const workspace = await getWorkspaceById(workspaceId) + if (!workspace) { + return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) + } + + const permission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId) + if (permission !== 'admin' && permission !== 'write') { + return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + } + logger.debug( `[${requestId}] Using template: ${id}, user: ${session.user.id}, workspace: ${workspaceId}, connect: ${connectToTemplate}` ) // Get the template + const templateAccess = await canAccessTemplate(id, session.user.id) + if (!templateAccess.allowed) { + logger.warn(`[${requestId}] Template not found: ${id}`) + return NextResponse.json({ error: 'Template not found' }, { status: 404 }) + } + + if (connectToTemplate) { + const ownership = await verifyTemplateOwnership(id, session.user.id, 'admin') + if (!ownership.authorized) { + return NextResponse.json( + { error: ownership.error || 'Access denied' }, + { status: ownership.status || 403 } + ) + } + } + const template = await db .select({ id: templates.id, @@ -61,11 +90,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ .where(eq(templates.id, id)) .limit(1) - if (template.length === 0) { - logger.warn(`[${requestId}] Template not found: ${id}`) - return NextResponse.json({ error: 'Template not found' }, { status: 404 }) - } - const templateData = template[0] // Create a new workflow ID @@ -86,14 +110,16 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ return mapped })() - // Step 1: Create the workflow record (like imports do) + const rawName = + connectToTemplate && !templateData.workflowId + ? templateData.name + : `${templateData.name} (copy)` + const dedupedName = await deduplicateWorkflowName(rawName, workspaceId, null) + await db.insert(workflow).values({ id: newWorkflowId, workspaceId: workspaceId, - name: - connectToTemplate && !templateData.workflowId - ? templateData.name - : `${templateData.name} (copy)`, + name: dedupedName, description: (templateData.details as TemplateDetails | null)?.tagline || null, userId: session.user.id, variables: remappedVariables, // Remap variable IDs and workflowId for the new workflow diff --git a/apps/sim/app/api/templates/route.ts b/apps/sim/app/api/templates/route.ts index 55628bfc7c..e12cc47ad9 100644 --- a/apps/sim/app/api/templates/route.ts +++ b/apps/sim/app/api/templates/route.ts @@ -14,11 +14,12 @@ import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' -import { verifyEffectiveSuperUser } from '@/lib/templates/permissions' +import { canAccessTemplate, verifyEffectiveSuperUser } from '@/lib/templates/permissions' import { extractRequiredCredentials, sanitizeCredentials, } from '@/lib/workflows/credentials/credential-extractor' +import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' const logger = createLogger('TemplatesAPI') @@ -79,12 +80,45 @@ export async function GET(request: NextRequest) { // When fetching by workflowId, we want to get the template regardless of status // This is used by the deploy modal to check if a template exists if (params.workflowId) { + const authorization = await authorizeWorkflowByWorkspacePermission({ + workflowId: params.workflowId, + userId: session.user.id, + action: 'write', + }) + if (!authorization.allowed) { + return NextResponse.json( + { + data: [], + pagination: { + total: 0, + limit: params.limit, + offset: params.offset, + page: 1, + totalPages: 0, + }, + }, + { status: 200 } + ) + } conditions.push(eq(templates.workflowId, params.workflowId)) - // Don't apply status filter when fetching by workflowId - we want to show - // the template to its owner even if it's pending } else { // Apply status filter - only approved templates for non-super users if (params.status) { + if (!isSuperUser && params.status !== 'approved') { + return NextResponse.json( + { + data: [], + pagination: { + total: 0, + limit: params.limit, + offset: params.offset, + page: 1, + totalPages: 0, + }, + }, + { status: 200 } + ) + } conditions.push(eq(templates.status, params.status)) } else if (!isSuperUser || !params.includeAllStatuses) { // Non-super users and super users without includeAllStatuses flag see only approved templates @@ -145,16 +179,33 @@ export async function GET(request: NextRequest) { const total = totalCount[0]?.count || 0 - logger.info(`[${requestId}] Successfully retrieved ${results.length} templates`) + const visibleResults = + params.workflowId && !isSuperUser + ? ( + await Promise.all( + results.map(async (template) => { + if (template.status === 'approved') { + return template + } + const access = await canAccessTemplate(template.id, session.user.id) + return access.allowed ? template : null + }) + ) + ).filter((template): template is (typeof results)[number] => template !== null) + : results + + logger.info(`[${requestId}] Successfully retrieved ${visibleResults.length} templates`) return NextResponse.json({ - data: results, + data: visibleResults, pagination: { - total, + total: params.workflowId && !isSuperUser ? visibleResults.length : total, limit: params.limit, offset: params.offset, page: Math.floor(params.offset / params.limit) + 1, - totalPages: Math.ceil(total / params.limit), + totalPages: Math.ceil( + (params.workflowId && !isSuperUser ? visibleResults.length : total) / params.limit + ), }, }) } catch (error: any) { @@ -185,18 +236,25 @@ export async function POST(request: NextRequest) { const body = await request.json() const data = CreateTemplateSchema.parse(body) - // Verify the workflow exists and belongs to the user - const workflowExists = await db - .select({ id: workflow.id }) - .from(workflow) - .where(eq(workflow.id, data.workflowId)) - .limit(1) + const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({ + workflowId: data.workflowId, + userId: session.user.id, + action: 'write', + }) - if (workflowExists.length === 0) { + if (!workflowAuthorization.workflow) { logger.warn(`[${requestId}] Workflow not found: ${data.workflowId}`) return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) } + if (!workflowAuthorization.allowed) { + logger.warn(`[${requestId}] User denied permission to template workflow ${data.workflowId}`) + return NextResponse.json( + { error: workflowAuthorization.message || 'Access denied' }, + { status: workflowAuthorization.status || 403 } + ) + } + const { verifyCreatorPermission } = await import('@/lib/templates/permissions') const { hasPermission, error: permissionError } = await verifyCreatorPermission( session.user.id, diff --git a/apps/sim/app/api/users/me/api-keys/[id]/route.ts b/apps/sim/app/api/users/me/api-keys/[id]/route.ts index e9344b86dd..d5a8831544 100644 --- a/apps/sim/app/api/users/me/api-keys/[id]/route.ts +++ b/apps/sim/app/api/users/me/api-keys/[id]/route.ts @@ -33,7 +33,7 @@ export async function DELETE( // Delete the API key, ensuring it belongs to the current user const result = await db .delete(apiKey) - .where(and(eq(apiKey.id, keyId), eq(apiKey.userId, userId))) + .where(and(eq(apiKey.id, keyId), eq(apiKey.userId, userId), eq(apiKey.type, 'personal'))) .returning({ id: apiKey.id, name: apiKey.name }) if (!result.length) { diff --git a/apps/sim/app/api/users/me/settings/route.ts b/apps/sim/app/api/users/me/settings/route.ts index c8de2b0568..78db186d48 100644 --- a/apps/sim/app/api/users/me/settings/route.ts +++ b/apps/sim/app/api/users/me/settings/route.ts @@ -31,7 +31,7 @@ const SettingsSchema = z.object({ }) const defaultSettings = { - theme: 'dark', + theme: 'system', autoConnect: true, telemetryEnabled: true, emailPreferences: {}, diff --git a/apps/sim/app/api/users/me/usage-logs/route.ts b/apps/sim/app/api/users/me/usage-logs/route.ts index 038cf2ece3..e95b6fc03a 100644 --- a/apps/sim/app/api/users/me/usage-logs/route.ts +++ b/apps/sim/app/api/users/me/usage-logs/route.ts @@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { getUserUsageLogs, type UsageLogSource } from '@/lib/billing/core/usage-log' +import { dollarsToCredits } from '@/lib/billing/credits/conversion' const logger = createLogger('UsageLogsAPI') @@ -78,6 +79,16 @@ export async function GET(req: NextRequest) { cursor, }) + const logsWithCredits = result.logs.map((log) => ({ + ...log, + creditCost: dollarsToCredits(log.cost), + })) + + const bySourceCredits: Record = {} + for (const [src, cost] of Object.entries(result.summary.bySource)) { + bySourceCredits[src] = dollarsToCredits(cost) + } + logger.debug('Retrieved usage logs', { userId, source, @@ -88,7 +99,13 @@ export async function GET(req: NextRequest) { return NextResponse.json({ success: true, - ...result, + logs: logsWithCredits, + summary: { + ...result.summary, + totalCostCredits: dollarsToCredits(result.summary.totalCost), + bySourceCredits, + }, + pagination: result.pagination, }) } catch (error) { logger.error('Failed to get usage logs', { diff --git a/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts b/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts index 3eab0374db..9a6eeba491 100644 --- a/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/[id]/deploy/route.ts @@ -1,4 +1,4 @@ -import { db, workflow, workflowDeploymentVersion } from '@sim/db' +import { db, workflowDeploymentVersion } from '@sim/db' import { createLogger } from '@sim/logger' import { and, eq } from 'drizzle-orm' import { generateRequestId } from '@/lib/core/utils/request' @@ -8,7 +8,9 @@ import { restorePreviousVersionWebhooks, saveTriggerWebhooksForDeploy, } from '@/lib/webhooks/deploy' +import { getActiveWorkflowRecord } from '@/lib/workflows/active-context' import { + activateWorkflowVersionById, deployWorkflow, loadWorkflowFromNormalizedTables, undeployWorkflow, @@ -40,11 +42,7 @@ export const POST = withAdminAuthParams(async (request, context) => const requestId = generateRequestId() try { - const [workflowRecord] = await db - .select() - .from(workflow) - .where(eq(workflow.id, workflowId)) - .limit(1) + const workflowRecord = await getActiveWorkflowRecord(workflowId) if (!workflowRecord) { return notFoundResponse('Workflow') @@ -72,6 +70,27 @@ export const POST = withAdminAuthParams(async (request, context) => .limit(1) const previousVersionId = currentActiveVersion?.id + const rollbackDeployment = async () => { + if (previousVersionId) { + await restorePreviousVersionWebhooks({ + request, + workflow: workflowData, + userId: workflowRecord.userId, + previousVersionId, + requestId, + }) + const reactivateResult = await activateWorkflowVersionById({ + workflowId, + deploymentVersionId: previousVersionId, + }) + if (reactivateResult.success) { + return + } + } + + await undeployWorkflow({ workflowId }) + } + const deployResult = await deployWorkflow({ workflowId, deployedBy: ADMIN_ACTOR_ID, @@ -107,7 +126,7 @@ export const POST = withAdminAuthParams(async (request, context) => requestId, deploymentVersionId: deployResult.deploymentVersionId, }) - await undeployWorkflow({ workflowId }) + await rollbackDeployment() return internalErrorResponse( triggerSaveResult.error?.message || 'Failed to sync trigger configuration' ) @@ -129,16 +148,7 @@ export const POST = withAdminAuthParams(async (request, context) => requestId, deploymentVersionId: deployResult.deploymentVersionId, }) - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow: workflowData, - userId: workflowRecord.userId, - previousVersionId, - requestId, - }) - } - await undeployWorkflow({ workflowId }) + await rollbackDeployment() return internalErrorResponse(scheduleResult.error || 'Failed to create schedule') } @@ -186,27 +196,23 @@ export const DELETE = withAdminAuthParams(async (request, context) const requestId = generateRequestId() try { - const [workflowRecord] = await db - .select() - .from(workflow) - .where(eq(workflow.id, workflowId)) - .limit(1) + const workflowRecord = await getActiveWorkflowRecord(workflowId) if (!workflowRecord) { return notFoundResponse('Workflow') } + const result = await undeployWorkflow({ workflowId }) + if (!result.success) { + return internalErrorResponse(result.error || 'Failed to undeploy workflow') + } + await cleanupWebhooksForWorkflow( workflowId, workflowRecord as Record, requestId ) - const result = await undeployWorkflow({ workflowId }) - if (!result.success) { - return internalErrorResponse(result.error || 'Failed to undeploy workflow') - } - await removeMcpToolsForWorkflow(workflowId, requestId) logger.info(`Admin API: Undeployed workflow ${workflowId}`) diff --git a/apps/sim/app/api/v1/admin/workflows/[id]/route.ts b/apps/sim/app/api/v1/admin/workflows/[id]/route.ts index ca596d6afd..ad8644aa49 100644 --- a/apps/sim/app/api/v1/admin/workflows/[id]/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/[id]/route.ts @@ -13,10 +13,12 @@ */ import { db } from '@sim/db' -import { workflow, workflowBlocks, workflowEdges, workflowSchedule } from '@sim/db/schema' +import { templates, workflowBlocks, workflowEdges } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { count, eq } from 'drizzle-orm' import { NextResponse } from 'next/server' +import { getActiveWorkflowRecord } from '@/lib/workflows/active-context' +import { archiveWorkflow } from '@/lib/workflows/lifecycle' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { internalErrorResponse, @@ -35,11 +37,7 @@ export const GET = withAdminAuthParams(async (request, context) => const { id: workflowId } = await context.params try { - const [workflowData] = await db - .select() - .from(workflow) - .where(eq(workflow.id, workflowId)) - .limit(1) + const workflowData = await getActiveWorkflowRecord(workflowId) if (!workflowData) { return notFoundResponse('Workflow') @@ -75,24 +73,16 @@ export const DELETE = withAdminAuthParams(async (request, context) const { id: workflowId } = await context.params try { - const [workflowData] = await db - .select({ id: workflow.id, name: workflow.name }) - .from(workflow) - .where(eq(workflow.id, workflowId)) - .limit(1) + const workflowData = await getActiveWorkflowRecord(workflowId) if (!workflowData) { return notFoundResponse('Workflow') } - await db.transaction(async (tx) => { - await Promise.all([ - tx.delete(workflowBlocks).where(eq(workflowBlocks.workflowId, workflowId)), - tx.delete(workflowEdges).where(eq(workflowEdges.workflowId, workflowId)), - tx.delete(workflowSchedule).where(eq(workflowSchedule.workflowId, workflowId)), - ]) + await db.update(templates).set({ workflowId: null }).where(eq(templates.workflowId, workflowId)) - await tx.delete(workflow).where(eq(workflow.id, workflowId)) + await archiveWorkflow(workflowId, { + requestId: `admin-workflow-${workflowId}`, }) logger.info(`Admin API: Deleted workflow ${workflowId} (${workflowData.name})`) diff --git a/apps/sim/app/api/v1/admin/workflows/[id]/versions/[versionId]/activate/route.ts b/apps/sim/app/api/v1/admin/workflows/[id]/versions/[versionId]/activate/route.ts index a1406ca830..1824c6508f 100644 --- a/apps/sim/app/api/v1/admin/workflows/[id]/versions/[versionId]/activate/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/[id]/versions/[versionId]/activate/route.ts @@ -1,9 +1,10 @@ -import { db, workflow, workflowDeploymentVersion } from '@sim/db' +import { db, workflowDeploymentVersion } from '@sim/db' import { createLogger } from '@sim/logger' import { and, eq } from 'drizzle-orm' import { generateRequestId } from '@/lib/core/utils/request' import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync' import { restorePreviousVersionWebhooks, saveTriggerWebhooksForDeploy } from '@/lib/webhooks/deploy' +import { getActiveWorkflowRecord } from '@/lib/workflows/active-context' import { activateWorkflowVersion } from '@/lib/workflows/persistence/utils' import { cleanupDeploymentVersion, @@ -31,11 +32,7 @@ export const POST = withAdminAuthParams(async (request, context) => const { id: workflowId, versionId } = await context.params try { - const [workflowRecord] = await db - .select() - .from(workflow) - .where(eq(workflow.id, workflowId)) - .limit(1) + const workflowRecord = await getActiveWorkflowRecord(workflowId) if (!workflowRecord) { return notFoundResponse('Workflow') diff --git a/apps/sim/app/api/v1/admin/workflows/[id]/versions/route.ts b/apps/sim/app/api/v1/admin/workflows/[id]/versions/route.ts index 004e4c15b0..846f4c7f48 100644 --- a/apps/sim/app/api/v1/admin/workflows/[id]/versions/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/[id]/versions/route.ts @@ -1,6 +1,5 @@ -import { db, workflow } from '@sim/db' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { getActiveWorkflowRecord } from '@/lib/workflows/active-context' import { listWorkflowVersions } from '@/lib/workflows/persistence/utils' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { @@ -20,11 +19,7 @@ export const GET = withAdminAuthParams(async (request, context) => const { id: workflowId } = await context.params try { - const [workflowRecord] = await db - .select({ id: workflow.id }) - .from(workflow) - .where(eq(workflow.id, workflowId)) - .limit(1) + const workflowRecord = await getActiveWorkflowRecord(workflowId) if (!workflowRecord) { return notFoundResponse('Workflow') diff --git a/apps/sim/app/api/v1/admin/workflows/import/route.ts b/apps/sim/app/api/v1/admin/workflows/import/route.ts index 7c3dd58ad6..d5907ce399 100644 --- a/apps/sim/app/api/v1/admin/workflows/import/route.ts +++ b/apps/sim/app/api/v1/admin/workflows/import/route.ts @@ -17,10 +17,11 @@ import { db } from '@sim/db' import { workflow, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { NextResponse } from 'next/server' import { parseWorkflowJson } from '@/lib/workflows/operations/import-export' import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils' +import { deduplicateWorkflowName } from '@/lib/workflows/utils' import { withAdminAuth } from '@/app/api/v1/admin/middleware' import { badRequestResponse, @@ -58,7 +59,7 @@ export const POST = withAdminAuth(async (request) => { const [workspaceData] = await db .select({ id: workspace.id, ownerId: workspace.ownerId }) .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .limit(1) if (!workspaceData) { @@ -93,13 +94,14 @@ export const POST = withAdminAuth(async (request) => { const workflowId = crypto.randomUUID() const now = new Date() + const dedupedName = await deduplicateWorkflowName(workflowName, workspaceId, folderId || null) await db.insert(workflow).values({ id: workflowId, userId: workspaceData.ownerId, workspaceId, folderId: folderId || null, - name: workflowName, + name: dedupedName, description: workflowDescription, color: workflowColor, lastSynced: now, @@ -136,12 +138,12 @@ export const POST = withAdminAuth(async (request) => { } logger.info( - `Admin API: Imported workflow ${workflowId} (${workflowName}) into workspace ${workspaceId}` + `Admin API: Imported workflow ${workflowId} (${dedupedName}) into workspace ${workspaceId}` ) const response: ImportSuccessResponse = { workflowId, - name: workflowName, + name: dedupedName, success: true, } diff --git a/apps/sim/app/api/v1/admin/workspaces/[id]/import/route.ts b/apps/sim/app/api/v1/admin/workspaces/[id]/import/route.ts index 6bb6a4db66..15830d2e6d 100644 --- a/apps/sim/app/api/v1/admin/workspaces/[id]/import/route.ts +++ b/apps/sim/app/api/v1/admin/workspaces/[id]/import/route.ts @@ -24,7 +24,7 @@ */ import { db } from '@sim/db' -import { workflow, workflowFolder, workspace } from '@sim/db/schema' +import { workflow, workflowFolder } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { eq } from 'drizzle-orm' import { NextResponse } from 'next/server' @@ -34,6 +34,8 @@ import { parseWorkflowJson, } from '@/lib/workflows/operations/import-export' import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils' +import { deduplicateWorkflowName } from '@/lib/workflows/utils' +import { getWorkspaceWithOwner } from '@/lib/workspaces/permissions/utils' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { badRequestResponse, @@ -67,11 +69,7 @@ export const POST = withAdminAuthParams(async (request, context) => const rootFolderName = url.searchParams.get('rootFolderName') try { - const [workspaceData] = await db - .select({ id: workspace.id, ownerId: workspace.ownerId }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceWithOwner(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') @@ -238,13 +236,14 @@ async function importSingleWorkflow( const { color: workflowColor } = extractWorkflowMetadata(parsedContent) const workflowId = crypto.randomUUID() const now = new Date() + const dedupedName = await deduplicateWorkflowName(workflowName, workspaceId, targetFolderId) await db.insert(workflow).values({ id: workflowId, userId: ownerId, workspaceId, folderId: targetFolderId, - name: workflowName, + name: dedupedName, description: workflowData.metadata?.description || 'Imported via Admin API', color: workflowColor, lastSynced: now, @@ -261,7 +260,7 @@ async function importSingleWorkflow( await db.delete(workflow).where(eq(workflow.id, workflowId)) return { workflowId: '', - name: workflowName, + name: dedupedName, success: false, error: `Failed to save state: ${saveResult.error}`, } @@ -287,7 +286,7 @@ async function importSingleWorkflow( return { workflowId, - name: workflowName, + name: dedupedName, success: true, } } catch (error) { diff --git a/apps/sim/app/api/v1/admin/workspaces/[id]/members/[memberId]/route.ts b/apps/sim/app/api/v1/admin/workspaces/[id]/members/[memberId]/route.ts index 30afdda571..07da573424 100644 --- a/apps/sim/app/api/v1/admin/workspaces/[id]/members/[memberId]/route.ts +++ b/apps/sim/app/api/v1/admin/workspaces/[id]/members/[memberId]/route.ts @@ -22,10 +22,11 @@ */ import { db } from '@sim/db' -import { permissions, user, workspace } from '@sim/db/schema' +import { permissions, user } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq } from 'drizzle-orm' import { revokeWorkspaceCredentialMemberships } from '@/lib/credentials/access' +import { getWorkspaceById } from '@/lib/workspaces/permissions/utils' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { badRequestResponse, @@ -46,11 +47,7 @@ export const GET = withAdminAuthParams(async (_, context) => { const { id: workspaceId, memberId } = await context.params try { - const [workspaceData] = await db - .select({ id: workspace.id }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceById(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') @@ -113,11 +110,7 @@ export const PATCH = withAdminAuthParams(async (request, context) = return badRequestResponse('permissions must be "admin", "write", or "read"') } - const [workspaceData] = await db - .select({ id: workspace.id }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceById(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') @@ -185,11 +178,7 @@ export const DELETE = withAdminAuthParams(async (_, context) => { const { id: workspaceId, memberId } = await context.params try { - const [workspaceData] = await db - .select({ id: workspace.id }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceById(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') diff --git a/apps/sim/app/api/v1/admin/workspaces/[id]/members/route.ts b/apps/sim/app/api/v1/admin/workspaces/[id]/members/route.ts index 78298feb49..78b70b7d52 100644 --- a/apps/sim/app/api/v1/admin/workspaces/[id]/members/route.ts +++ b/apps/sim/app/api/v1/admin/workspaces/[id]/members/route.ts @@ -32,10 +32,11 @@ import crypto from 'crypto' import { db } from '@sim/db' -import { permissions, user, workspace, workspaceEnvironment } from '@sim/db/schema' +import { permissions, user, workspaceEnvironment } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, count, eq } from 'drizzle-orm' import { syncWorkspaceEnvCredentials } from '@/lib/credentials/environment' +import { getWorkspaceById } from '@/lib/workspaces/permissions/utils' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { badRequestResponse, @@ -62,11 +63,7 @@ export const GET = withAdminAuthParams(async (request, context) => const { limit, offset } = parsePaginationParams(url) try { - const [workspaceData] = await db - .select({ id: workspace.id }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceById(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') @@ -134,11 +131,7 @@ export const POST = withAdminAuthParams(async (request, context) => return badRequestResponse('permissions must be "admin", "write", or "read"') } - const [workspaceData] = await db - .select({ id: workspace.id, name: workspace.name }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceById(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') @@ -275,11 +268,7 @@ export const DELETE = withAdminAuthParams(async (request, context) return badRequestResponse('userId query parameter is required') } - const [workspaceData] = await db - .select({ id: workspace.id }) - .from(workspace) - .where(eq(workspace.id, workspaceId)) - .limit(1) + const workspaceData = await getWorkspaceById(workspaceId) if (!workspaceData) { return notFoundResponse('Workspace') diff --git a/apps/sim/app/api/v1/admin/workspaces/[id]/workflows/route.ts b/apps/sim/app/api/v1/admin/workspaces/[id]/workflows/route.ts index ea1ab87fc5..896af40d6a 100644 --- a/apps/sim/app/api/v1/admin/workspaces/[id]/workflows/route.ts +++ b/apps/sim/app/api/v1/admin/workspaces/[id]/workflows/route.ts @@ -17,16 +17,11 @@ */ import { db } from '@sim/db' -import { - workflow, - workflowBlocks, - workflowEdges, - workflowSchedule, - workspace, -} from '@sim/db/schema' +import { workflow, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { count, eq, inArray } from 'drizzle-orm' +import { and, count, eq, isNull } from 'drizzle-orm' import { NextResponse } from 'next/server' +import { archiveWorkflowsForWorkspace } from '@/lib/workflows/lifecycle' import { withAdminAuthParams } from '@/app/api/v1/admin/middleware' import { internalErrorResponse, listResponse, notFoundResponse } from '@/app/api/v1/admin/responses' import { @@ -51,7 +46,7 @@ export const GET = withAdminAuthParams(async (request, context) => const [workspaceData] = await db .select({ id: workspace.id }) .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .limit(1) if (!workspaceData) { @@ -59,11 +54,14 @@ export const GET = withAdminAuthParams(async (request, context) => } const [countResult, workflows] = await Promise.all([ - db.select({ total: count() }).from(workflow).where(eq(workflow.workspaceId, workspaceId)), + db + .select({ total: count() }) + .from(workflow) + .where(and(eq(workflow.workspaceId, workspaceId), isNull(workflow.archivedAt))), db .select() .from(workflow) - .where(eq(workflow.workspaceId, workspaceId)) + .where(and(eq(workflow.workspaceId, workspaceId), isNull(workflow.archivedAt))) .orderBy(workflow.name) .limit(limit) .offset(offset), @@ -91,7 +89,7 @@ export const DELETE = withAdminAuthParams(async (request, context) const [workspaceData] = await db .select({ id: workspace.id }) .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .limit(1) if (!workspaceData) { @@ -101,27 +99,19 @@ export const DELETE = withAdminAuthParams(async (request, context) const workflowsToDelete = await db .select({ id: workflow.id }) .from(workflow) - .where(eq(workflow.workspaceId, workspaceId)) + .where(and(eq(workflow.workspaceId, workspaceId), isNull(workflow.archivedAt))) if (workflowsToDelete.length === 0) { return NextResponse.json({ success: true, deleted: 0 }) } - const workflowIds = workflowsToDelete.map((w) => w.id) - - await db.transaction(async (tx) => { - await Promise.all([ - tx.delete(workflowBlocks).where(inArray(workflowBlocks.workflowId, workflowIds)), - tx.delete(workflowEdges).where(inArray(workflowEdges.workflowId, workflowIds)), - tx.delete(workflowSchedule).where(inArray(workflowSchedule.workflowId, workflowIds)), - ]) - - await tx.delete(workflow).where(eq(workflow.workspaceId, workspaceId)) + const deletedCount = await archiveWorkflowsForWorkspace(workspaceId, { + requestId: `admin-workspace-${workspaceId}`, }) - logger.info(`Admin API: Deleted ${workflowIds.length} workflows from workspace ${workspaceId}`) + logger.info(`Admin API: Deleted ${deletedCount} workflows from workspace ${workspaceId}`) - return NextResponse.json({ success: true, deleted: workflowIds.length }) + return NextResponse.json({ success: true, deleted: deletedCount }) } catch (error) { logger.error('Admin API: Failed to delete workspace workflows', { error, workspaceId }) return internalErrorResponse('Failed to delete workflows') diff --git a/apps/sim/app/api/v1/copilot/chat/route.ts b/apps/sim/app/api/v1/copilot/chat/route.ts index 6a3817385b..e3acb50a10 100644 --- a/apps/sim/app/api/v1/copilot/chat/route.ts +++ b/apps/sim/app/api/v1/copilot/chat/route.ts @@ -1,10 +1,9 @@ import { createLogger } from '@sim/logger' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' -import { SIM_AGENT_VERSION } from '@/lib/copilot/constants' import { COPILOT_REQUEST_MODES } from '@/lib/copilot/models' import { orchestrateCopilotStream } from '@/lib/copilot/orchestrator' -import { resolveWorkflowIdForUser } from '@/lib/workflows/utils' +import { getWorkflowById, resolveWorkflowIdForUser } from '@/lib/workflows/utils' import { authenticateV1Request } from '@/app/api/v1/auth' const logger = createLogger('CopilotHeadlessAPI') @@ -48,7 +47,8 @@ export async function POST(req: NextRequest) { const resolved = await resolveWorkflowIdForUser( auth.userId, parsed.workflowId, - parsed.workflowName + parsed.workflowName, + auth.keyType === 'workspace' ? auth.workspaceId : undefined ) if (!resolved) { return NextResponse.json( @@ -60,6 +60,16 @@ export async function POST(req: NextRequest) { ) } + if (auth.keyType === 'workspace' && auth.workspaceId) { + const workflow = await getWorkflowById(resolved.workflowId) + if (!workflow?.workspaceId || workflow.workspaceId !== auth.workspaceId) { + return NextResponse.json( + { success: false, error: 'API key is not authorized for this workspace' }, + { status: 403 } + ) + } + } + // Transform mode to transport mode (same as client API) // build and agent both map to 'agent' on the backend const effectiveMode = parsed.mode === 'agent' ? 'build' : parsed.mode @@ -75,8 +85,6 @@ export async function POST(req: NextRequest) { model: selectedModel, mode: transportMode, messageId: crypto.randomUUID(), - version: SIM_AGENT_VERSION, - headless: true, chatId, } @@ -84,6 +92,7 @@ export async function POST(req: NextRequest) { userId: auth.userId, workflowId: resolved.workflowId, chatId, + goRoute: '/api/mcp', autoExecuteTools: parsed.autoExecuteTools, timeout: parsed.timeout, interactive: false, @@ -93,8 +102,7 @@ export async function POST(req: NextRequest) { success: result.success, content: result.content, toolCalls: result.toolCalls, - chatId: result.chatId || chatId, // Return the chatId for conversation continuity - conversationId: result.conversationId, + chatId: result.chatId || chatId, error: result.error, }) } catch (error) { diff --git a/apps/sim/app/api/v1/files/[fileId]/route.ts b/apps/sim/app/api/v1/files/[fileId]/route.ts new file mode 100644 index 0000000000..7007053681 --- /dev/null +++ b/apps/sim/app/api/v1/files/[fileId]/route.ts @@ -0,0 +1,158 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { generateRequestId } from '@/lib/core/utils/request' +import { + deleteWorkspaceFile, + downloadWorkspaceFile, + getWorkspaceFile, +} from '@/lib/uploads/contexts/workspace' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1FileDetailAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const WorkspaceIdSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), +}) + +interface FileRouteParams { + params: Promise<{ fileId: string }> +} + +/** GET /api/v1/files/[fileId] — Download file content. */ +export async function GET(request: NextRequest, { params }: FileRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'file-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { fileId } = await params + const { searchParams } = new URL(request.url) + + const validation = WorkspaceIdSchema.safeParse({ + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) { + return NextResponse.json( + { error: 'Validation error', details: validation.error.errors }, + { status: 400 } + ) + } + + const { workspaceId } = validation.data + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission === null) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const fileRecord = await getWorkspaceFile(workspaceId, fileId) + if (!fileRecord) { + return NextResponse.json({ error: 'File not found' }, { status: 404 }) + } + + const buffer = await downloadWorkspaceFile(fileRecord) + + return new Response(new Uint8Array(buffer), { + status: 200, + headers: { + 'Content-Type': fileRecord.type || 'application/octet-stream', + 'Content-Disposition': `attachment; filename="${fileRecord.name.replace(/[^\w.-]/g, '_')}"; filename*=UTF-8''${encodeURIComponent(fileRecord.name)}`, + 'Content-Length': String(buffer.length), + 'X-File-Id': fileRecord.id, + 'X-File-Name': encodeURIComponent(fileRecord.name), + 'X-Uploaded-At': + fileRecord.uploadedAt instanceof Date + ? fileRecord.uploadedAt.toISOString() + : String(fileRecord.uploadedAt), + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error downloading file:`, error) + return NextResponse.json({ error: 'Failed to download file' }, { status: 500 }) + } +} + +/** DELETE /api/v1/files/[fileId] — Archive a file. */ +export async function DELETE(request: NextRequest, { params }: FileRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'file-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { fileId } = await params + const { searchParams } = new URL(request.url) + + const validation = WorkspaceIdSchema.safeParse({ + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) { + return NextResponse.json( + { error: 'Validation error', details: validation.error.errors }, + { status: 400 } + ) + } + + const { workspaceId } = validation.data + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission === null || permission === 'read') { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const fileRecord = await getWorkspaceFile(workspaceId, fileId) + if (!fileRecord) { + return NextResponse.json({ error: 'File not found' }, { status: 404 }) + } + + await deleteWorkspaceFile(workspaceId, fileId) + + logger.info( + `[${requestId}] Archived file: ${fileRecord.name} (${fileId}) from workspace ${workspaceId}` + ) + + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.FILE_DELETED, + resourceType: AuditResourceType.FILE, + resourceId: fileId, + resourceName: fileRecord.name, + description: `Archived file "${fileRecord.name}" via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + message: 'File archived successfully', + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error deleting file:`, error) + return NextResponse.json({ error: 'Failed to delete file' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/files/route.ts b/apps/sim/app/api/v1/files/route.ts new file mode 100644 index 0000000000..f9d8228e4f --- /dev/null +++ b/apps/sim/app/api/v1/files/route.ts @@ -0,0 +1,194 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { generateRequestId } from '@/lib/core/utils/request' +import { + getWorkspaceFile, + listWorkspaceFiles, + uploadWorkspaceFile, +} from '@/lib/uploads/contexts/workspace' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1FilesAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const MAX_FILE_SIZE = 100 * 1024 * 1024 // 100MB + +const ListFilesSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), +}) + +/** GET /api/v1/files — List all files in a workspace. */ +export async function GET(request: NextRequest) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'files') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { searchParams } = new URL(request.url) + + const validation = ListFilesSchema.safeParse({ + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) { + return NextResponse.json( + { error: 'Validation error', details: validation.error.errors }, + { status: 400 } + ) + } + + const { workspaceId } = validation.data + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission === null) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const files = await listWorkspaceFiles(workspaceId) + + return NextResponse.json({ + success: true, + data: { + files: files.map((f) => ({ + id: f.id, + name: f.name, + size: f.size, + type: f.type, + key: f.key, + uploadedBy: f.uploadedBy, + uploadedAt: + f.uploadedAt instanceof Date ? f.uploadedAt.toISOString() : String(f.uploadedAt), + })), + totalCount: files.length, + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error listing files:`, error) + return NextResponse.json({ error: 'Failed to list files' }, { status: 500 }) + } +} + +/** POST /api/v1/files — Upload a file to a workspace. */ +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'files') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + + let formData: FormData + try { + formData = await request.formData() + } catch { + return NextResponse.json( + { error: 'Request body must be valid multipart form data' }, + { status: 400 } + ) + } + const rawFile = formData.get('file') + const file = rawFile instanceof File ? rawFile : null + const rawWorkspaceId = formData.get('workspaceId') + const workspaceId = typeof rawWorkspaceId === 'string' ? rawWorkspaceId : null + + if (!workspaceId) { + return NextResponse.json({ error: 'workspaceId form field is required' }, { status: 400 }) + } + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + if (!file) { + return NextResponse.json({ error: 'file form field is required' }, { status: 400 }) + } + + if (file.size > MAX_FILE_SIZE) { + return NextResponse.json( + { + error: `File size exceeds 100MB limit (${(file.size / (1024 * 1024)).toFixed(2)}MB)`, + }, + { status: 400 } + ) + } + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission === null || permission === 'read') { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const buffer = Buffer.from(await file.arrayBuffer()) + + const userFile = await uploadWorkspaceFile( + workspaceId, + userId, + buffer, + file.name, + file.type || 'application/octet-stream' + ) + + logger.info(`[${requestId}] Uploaded file: ${file.name} to workspace ${workspaceId}`) + + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.FILE_UPLOADED, + resourceType: AuditResourceType.FILE, + resourceId: userFile.id, + resourceName: file.name, + description: `Uploaded file "${file.name}" via API`, + request, + }) + + const fileRecord = await getWorkspaceFile(workspaceId, userFile.id) + const uploadedAt = + fileRecord?.uploadedAt instanceof Date + ? fileRecord.uploadedAt.toISOString() + : fileRecord?.uploadedAt + ? String(fileRecord.uploadedAt) + : new Date().toISOString() + + return NextResponse.json({ + success: true, + data: { + file: { + id: userFile.id, + name: userFile.name, + size: userFile.size, + type: userFile.type, + key: userFile.key, + uploadedBy: userId, + uploadedAt, + }, + message: 'File uploaded successfully', + }, + }) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Failed to upload file' + const isDuplicate = errorMessage.includes('already exists') + + if (isDuplicate) { + return NextResponse.json({ error: errorMessage }, { status: 409 }) + } + + logger.error(`[${requestId}] Error uploading file:`, error) + return NextResponse.json({ error: 'Failed to upload file' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/knowledge/[id]/documents/[documentId]/route.ts b/apps/sim/app/api/v1/knowledge/[id]/documents/[documentId]/route.ts new file mode 100644 index 0000000000..b69721329a --- /dev/null +++ b/apps/sim/app/api/v1/knowledge/[id]/documents/[documentId]/route.ts @@ -0,0 +1,182 @@ +import { db } from '@sim/db' +import { document, knowledgeConnector } from '@sim/db/schema' +import { and, eq, isNull } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { deleteDocument } from '@/lib/knowledge/documents/service' +import { + authenticateRequest, + handleError, + resolveKnowledgeBase, + serializeDate, + validateSchema, +} from '@/app/api/v1/knowledge/utils' + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +interface DocumentDetailRouteParams { + params: Promise<{ id: string; documentId: string }> +} + +const WorkspaceIdSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), +}) + +/** GET /api/v1/knowledge/[id]/documents/[documentId] — Get document details. */ +export async function GET(request: NextRequest, { params }: DocumentDetailRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id: knowledgeBaseId, documentId } = await params + const { searchParams } = new URL(request.url) + + const validation = validateSchema(WorkspaceIdSchema, { + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) return validation.response + + const result = await resolveKnowledgeBase( + knowledgeBaseId, + validation.data.workspaceId, + userId, + rateLimit + ) + if (result instanceof NextResponse) return result + + const docs = await db + .select({ + id: document.id, + knowledgeBaseId: document.knowledgeBaseId, + filename: document.filename, + fileSize: document.fileSize, + mimeType: document.mimeType, + processingStatus: document.processingStatus, + processingError: document.processingError, + processingStartedAt: document.processingStartedAt, + processingCompletedAt: document.processingCompletedAt, + chunkCount: document.chunkCount, + tokenCount: document.tokenCount, + characterCount: document.characterCount, + enabled: document.enabled, + uploadedAt: document.uploadedAt, + connectorId: document.connectorId, + connectorType: knowledgeConnector.connectorType, + sourceUrl: document.sourceUrl, + }) + .from(document) + .leftJoin(knowledgeConnector, eq(document.connectorId, knowledgeConnector.id)) + .where( + and( + eq(document.id, documentId), + eq(document.knowledgeBaseId, knowledgeBaseId), + eq(document.userExcluded, false), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) + .limit(1) + + if (docs.length === 0) { + return NextResponse.json({ error: 'Document not found' }, { status: 404 }) + } + + const doc = docs[0] + + return NextResponse.json({ + success: true, + data: { + document: { + id: doc.id, + knowledgeBaseId: doc.knowledgeBaseId, + filename: doc.filename, + fileSize: doc.fileSize, + mimeType: doc.mimeType, + processingStatus: doc.processingStatus, + processingError: doc.processingError, + processingStartedAt: serializeDate(doc.processingStartedAt), + processingCompletedAt: serializeDate(doc.processingCompletedAt), + chunkCount: doc.chunkCount, + tokenCount: doc.tokenCount, + characterCount: doc.characterCount, + enabled: doc.enabled, + connectorId: doc.connectorId, + connectorType: doc.connectorType, + sourceUrl: doc.sourceUrl, + createdAt: serializeDate(doc.uploadedAt), + }, + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to get document') + } +} + +/** DELETE /api/v1/knowledge/[id]/documents/[documentId] — Delete a document. */ +export async function DELETE(request: NextRequest, { params }: DocumentDetailRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id: knowledgeBaseId, documentId } = await params + const { searchParams } = new URL(request.url) + + const validation = validateSchema(WorkspaceIdSchema, { + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) return validation.response + + const result = await resolveKnowledgeBase( + knowledgeBaseId, + validation.data.workspaceId, + userId, + rateLimit, + 'write' + ) + if (result instanceof NextResponse) return result + + const docs = await db + .select({ id: document.id, filename: document.filename }) + .from(document) + .where( + and( + eq(document.id, documentId), + eq(document.knowledgeBaseId, knowledgeBaseId), + eq(document.userExcluded, false), + isNull(document.archivedAt), + isNull(document.deletedAt) + ) + ) + .limit(1) + + if (docs.length === 0) { + return NextResponse.json({ error: 'Document not found' }, { status: 404 }) + } + + await deleteDocument(documentId, requestId) + + recordAudit({ + workspaceId: validation.data.workspaceId, + actorId: userId, + action: AuditAction.DOCUMENT_DELETED, + resourceType: AuditResourceType.DOCUMENT, + resourceId: documentId, + resourceName: docs[0].filename, + description: `Deleted document "${docs[0].filename}" from knowledge base via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + message: 'Document deleted successfully', + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to delete document') + } +} diff --git a/apps/sim/app/api/v1/knowledge/[id]/documents/route.ts b/apps/sim/app/api/v1/knowledge/[id]/documents/route.ts new file mode 100644 index 0000000000..193111ee20 --- /dev/null +++ b/apps/sim/app/api/v1/knowledge/[id]/documents/route.ts @@ -0,0 +1,248 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { + createSingleDocument, + type DocumentData, + getDocuments, + processDocumentsWithQueue, +} from '@/lib/knowledge/documents/service' +import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types' +import { uploadWorkspaceFile } from '@/lib/uploads/contexts/workspace' +import { validateFileType } from '@/lib/uploads/utils/validation' +import { + authenticateRequest, + handleError, + resolveKnowledgeBase, + serializeDate, + validateSchema, +} from '@/app/api/v1/knowledge/utils' + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const MAX_FILE_SIZE = 100 * 1024 * 1024 // 100MB + +interface DocumentsRouteParams { + params: Promise<{ id: string }> +} + +const ListDocumentsSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), + limit: z.coerce.number().int().min(1).max(100).default(50), + offset: z.coerce.number().int().min(0).default(0), + search: z.string().optional(), + enabledFilter: z.enum(['all', 'enabled', 'disabled']).default('all'), + sortBy: z + .enum([ + 'filename', + 'fileSize', + 'tokenCount', + 'chunkCount', + 'uploadedAt', + 'processingStatus', + 'enabled', + ]) + .default('uploadedAt'), + sortOrder: z.enum(['asc', 'desc']).default('desc'), +}) + +/** GET /api/v1/knowledge/[id]/documents — List documents in a knowledge base. */ +export async function GET(request: NextRequest, { params }: DocumentsRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id: knowledgeBaseId } = await params + const { searchParams } = new URL(request.url) + + const validation = validateSchema(ListDocumentsSchema, { + workspaceId: searchParams.get('workspaceId'), + limit: searchParams.get('limit') ?? undefined, + offset: searchParams.get('offset') ?? undefined, + search: searchParams.get('search') ?? undefined, + enabledFilter: searchParams.get('enabledFilter') ?? undefined, + sortBy: searchParams.get('sortBy') ?? undefined, + sortOrder: searchParams.get('sortOrder') ?? undefined, + }) + if (!validation.success) return validation.response + + const { workspaceId, limit, offset, search, enabledFilter, sortBy, sortOrder } = validation.data + + const result = await resolveKnowledgeBase(knowledgeBaseId, workspaceId, userId, rateLimit) + if (result instanceof NextResponse) return result + + const documentsResult = await getDocuments( + knowledgeBaseId, + { + enabledFilter: enabledFilter === 'all' ? undefined : enabledFilter, + search, + limit, + offset, + sortBy: sortBy as DocumentSortField, + sortOrder: sortOrder as SortOrder, + }, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + documents: documentsResult.documents.map((doc) => ({ + id: doc.id, + knowledgeBaseId, + filename: doc.filename, + fileSize: doc.fileSize, + mimeType: doc.mimeType, + processingStatus: doc.processingStatus, + chunkCount: doc.chunkCount, + tokenCount: doc.tokenCount, + characterCount: doc.characterCount, + enabled: doc.enabled, + createdAt: serializeDate(doc.uploadedAt), + })), + pagination: documentsResult.pagination, + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to list documents') + } +} + +/** POST /api/v1/knowledge/[id]/documents — Upload a document to a knowledge base. */ +export async function POST(request: NextRequest, { params }: DocumentsRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id: knowledgeBaseId } = await params + + let formData: FormData + try { + formData = await request.formData() + } catch { + return NextResponse.json( + { error: 'Request body must be valid multipart form data' }, + { status: 400 } + ) + } + + const rawFile = formData.get('file') + const file = rawFile instanceof File ? rawFile : null + const rawWorkspaceId = formData.get('workspaceId') + const workspaceId = typeof rawWorkspaceId === 'string' ? rawWorkspaceId : null + + if (!workspaceId) { + return NextResponse.json({ error: 'workspaceId form field is required' }, { status: 400 }) + } + + if (!file) { + return NextResponse.json({ error: 'file form field is required' }, { status: 400 }) + } + + if (file.size > MAX_FILE_SIZE) { + return NextResponse.json( + { + error: `File size exceeds 100MB limit (${(file.size / (1024 * 1024)).toFixed(2)}MB)`, + }, + { status: 413 } + ) + } + + const fileTypeError = validateFileType(file.name, file.type || '') + if (fileTypeError) { + return NextResponse.json({ error: fileTypeError.message }, { status: 415 }) + } + + const result = await resolveKnowledgeBase( + knowledgeBaseId, + workspaceId, + userId, + rateLimit, + 'write' + ) + if (result instanceof NextResponse) return result + + const buffer = Buffer.from(await file.arrayBuffer()) + const contentType = file.type || 'application/octet-stream' + + const uploadedFile = await uploadWorkspaceFile( + workspaceId, + userId, + buffer, + file.name, + contentType + ) + + const newDocument = await createSingleDocument( + { + filename: file.name, + fileUrl: uploadedFile.url, + fileSize: file.size, + mimeType: contentType, + }, + knowledgeBaseId, + requestId + ) + + const chunkingConfig = result.kb.chunkingConfig ?? { maxSize: 1024, minSize: 100, overlap: 200 } + + const documentData: DocumentData = { + documentId: newDocument.id, + filename: file.name, + fileUrl: uploadedFile.url, + fileSize: file.size, + mimeType: contentType, + } + + processDocumentsWithQueue( + [documentData], + knowledgeBaseId, + { + chunkSize: chunkingConfig.maxSize, + minCharactersPerChunk: chunkingConfig.minSize, + chunkOverlap: chunkingConfig.overlap, + recipe: 'default', + lang: 'en', + }, + requestId + ).catch(() => { + // Processing errors are logged internally + }) + + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.DOCUMENT_UPLOADED, + resourceType: AuditResourceType.DOCUMENT, + resourceId: newDocument.id, + resourceName: file.name, + description: `Uploaded document "${file.name}" to knowledge base via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + document: { + id: newDocument.id, + knowledgeBaseId, + filename: newDocument.filename, + fileSize: newDocument.fileSize, + mimeType: newDocument.mimeType, + processingStatus: 'pending', + chunkCount: 0, + tokenCount: 0, + characterCount: 0, + enabled: newDocument.enabled, + createdAt: serializeDate(newDocument.uploadedAt), + }, + message: 'Document uploaded successfully. Processing will begin shortly.', + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to upload document') + } +} diff --git a/apps/sim/app/api/v1/knowledge/[id]/route.ts b/apps/sim/app/api/v1/knowledge/[id]/route.ts new file mode 100644 index 0000000000..0b7012c877 --- /dev/null +++ b/apps/sim/app/api/v1/knowledge/[id]/route.ts @@ -0,0 +1,175 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { deleteKnowledgeBase, updateKnowledgeBase } from '@/lib/knowledge/service' +import { + authenticateRequest, + formatKnowledgeBase, + handleError, + parseJsonBody, + resolveKnowledgeBase, + validateSchema, +} from '@/app/api/v1/knowledge/utils' + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +interface KnowledgeRouteParams { + params: Promise<{ id: string }> +} + +const WorkspaceIdSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), +}) + +const UpdateKBSchema = z + .object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + name: z.string().min(1).max(255, 'Name must be 255 characters or less').optional(), + description: z.string().max(1000, 'Description must be 1000 characters or less').optional(), + chunkingConfig: z + .object({ + maxSize: z.number().min(100).max(4000), + minSize: z.number().min(1).max(2000), + overlap: z.number().min(0).max(500), + }) + .optional(), + }) + .refine( + (data) => + data.name !== undefined || + data.description !== undefined || + data.chunkingConfig !== undefined, + { message: 'At least one of name, description, or chunkingConfig must be provided' } + ) + +/** GET /api/v1/knowledge/[id] — Get knowledge base details. */ +export async function GET(request: NextRequest, { params }: KnowledgeRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id } = await params + const { searchParams } = new URL(request.url) + + const validation = validateSchema(WorkspaceIdSchema, { + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) return validation.response + + const result = await resolveKnowledgeBase(id, validation.data.workspaceId, userId, rateLimit) + if (result instanceof NextResponse) return result + + return NextResponse.json({ + success: true, + data: { + knowledgeBase: formatKnowledgeBase(result.kb), + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to get knowledge base') + } +} + +/** PUT /api/v1/knowledge/[id] — Update a knowledge base. */ +export async function PUT(request: NextRequest, { params }: KnowledgeRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id } = await params + + const body = await parseJsonBody(request) + if (!body.success) return body.response + + const validation = validateSchema(UpdateKBSchema, body.data) + if (!validation.success) return validation.response + + const { workspaceId, name, description, chunkingConfig } = validation.data + + const result = await resolveKnowledgeBase(id, workspaceId, userId, rateLimit, 'write') + if (result instanceof NextResponse) return result + + const updates: { + name?: string + description?: string + chunkingConfig?: { maxSize: number; minSize: number; overlap: number } + } = {} + if (name !== undefined) updates.name = name + if (description !== undefined) updates.description = description + if (chunkingConfig !== undefined) updates.chunkingConfig = chunkingConfig + + const updatedKb = await updateKnowledgeBase(id, updates, requestId) + + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.KNOWLEDGE_BASE_UPDATED, + resourceType: AuditResourceType.KNOWLEDGE_BASE, + resourceId: id, + resourceName: updatedKb.name, + description: `Updated knowledge base "${updatedKb.name}" via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + knowledgeBase: formatKnowledgeBase(updatedKb), + message: 'Knowledge base updated successfully', + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to update knowledge base') + } +} + +/** DELETE /api/v1/knowledge/[id] — Delete a knowledge base. */ +export async function DELETE(request: NextRequest, { params }: KnowledgeRouteParams) { + const auth = await authenticateRequest(request, 'knowledge-detail') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { id } = await params + const { searchParams } = new URL(request.url) + + const validation = validateSchema(WorkspaceIdSchema, { + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) return validation.response + + const result = await resolveKnowledgeBase( + id, + validation.data.workspaceId, + userId, + rateLimit, + 'write' + ) + if (result instanceof NextResponse) return result + + await deleteKnowledgeBase(id, requestId) + + recordAudit({ + workspaceId: validation.data.workspaceId, + actorId: userId, + action: AuditAction.KNOWLEDGE_BASE_DELETED, + resourceType: AuditResourceType.KNOWLEDGE_BASE, + resourceId: id, + resourceName: result.kb.name, + description: `Deleted knowledge base "${result.kb.name}" via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + message: 'Knowledge base deleted successfully', + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to delete knowledge base') + } +} diff --git a/apps/sim/app/api/v1/knowledge/route.ts b/apps/sim/app/api/v1/knowledge/route.ts new file mode 100644 index 0000000000..9d45e677bd --- /dev/null +++ b/apps/sim/app/api/v1/knowledge/route.ts @@ -0,0 +1,122 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { createKnowledgeBase, getKnowledgeBases } from '@/lib/knowledge/service' +import { + authenticateRequest, + formatKnowledgeBase, + handleError, + parseJsonBody, + validateSchema, + validateWorkspaceAccess, +} from '@/app/api/v1/knowledge/utils' + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const ListKBSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), +}) + +const ChunkingConfigSchema = z.object({ + maxSize: z.number().min(100).max(4000).default(1024), + minSize: z.number().min(1).max(2000).default(100), + overlap: z.number().min(0).max(500).default(200), +}) + +const CreateKBSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + name: z.string().min(1, 'Name is required').max(255, 'Name must be 255 characters or less'), + description: z.string().max(1000, 'Description must be 1000 characters or less').optional(), + chunkingConfig: ChunkingConfigSchema.optional().default({ + maxSize: 1024, + minSize: 100, + overlap: 200, + }), +}) + +/** GET /api/v1/knowledge — List knowledge bases in a workspace. */ +export async function GET(request: NextRequest) { + const auth = await authenticateRequest(request, 'knowledge') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const { searchParams } = new URL(request.url) + const validation = validateSchema(ListKBSchema, { + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) return validation.response + + const { workspaceId } = validation.data + + const accessError = await validateWorkspaceAccess(rateLimit, userId, workspaceId) + if (accessError) return accessError + + const knowledgeBases = await getKnowledgeBases(userId, workspaceId) + + return NextResponse.json({ + success: true, + data: { + knowledgeBases: knowledgeBases.map(formatKnowledgeBase), + totalCount: knowledgeBases.length, + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to list knowledge bases') + } +} + +/** POST /api/v1/knowledge — Create a new knowledge base. */ +export async function POST(request: NextRequest) { + const auth = await authenticateRequest(request, 'knowledge') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const body = await parseJsonBody(request) + if (!body.success) return body.response + + const validation = validateSchema(CreateKBSchema, body.data) + if (!validation.success) return validation.response + + const { workspaceId, name, description, chunkingConfig } = validation.data + + const accessError = await validateWorkspaceAccess(rateLimit, userId, workspaceId, 'write') + if (accessError) return accessError + + const kb = await createKnowledgeBase( + { + name, + description, + workspaceId, + userId, + embeddingModel: 'text-embedding-3-small', + embeddingDimension: 1536, + chunkingConfig: chunkingConfig ?? { maxSize: 1024, minSize: 100, overlap: 200 }, + }, + requestId + ) + + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.KNOWLEDGE_BASE_CREATED, + resourceType: AuditResourceType.KNOWLEDGE_BASE, + resourceId: kb.id, + resourceName: kb.name, + description: `Created knowledge base "${kb.name}" via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + knowledgeBase: formatKnowledgeBase(kb), + message: 'Knowledge base created successfully', + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to create knowledge base') + } +} diff --git a/apps/sim/app/api/v1/knowledge/search/route.ts b/apps/sim/app/api/v1/knowledge/search/route.ts new file mode 100644 index 0000000000..1b50d5d8af --- /dev/null +++ b/apps/sim/app/api/v1/knowledge/search/route.ts @@ -0,0 +1,268 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { ALL_TAG_SLOTS } from '@/lib/knowledge/constants' +import { getDocumentTagDefinitions } from '@/lib/knowledge/tags/service' +import { buildUndefinedTagsError, validateTagValue } from '@/lib/knowledge/tags/utils' +import type { StructuredFilter } from '@/lib/knowledge/types' +import { + generateSearchEmbedding, + getDocumentNamesByIds, + getQueryStrategy, + handleTagAndVectorSearch, + handleTagOnlySearch, + handleVectorOnlySearch, + type SearchResult, +} from '@/app/api/knowledge/search/utils' +import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils' +import { + authenticateRequest, + handleError, + parseJsonBody, + validateSchema, + validateWorkspaceAccess, +} from '@/app/api/v1/knowledge/utils' + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const StructuredTagFilterSchema = z.object({ + tagName: z.string(), + fieldType: z.enum(['text', 'number', 'date', 'boolean']).optional(), + operator: z.string().default('eq'), + value: z.union([z.string(), z.number(), z.boolean()]), + valueTo: z.union([z.string(), z.number()]).optional(), +}) + +const SearchSchema = z + .object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + knowledgeBaseIds: z.union([ + z.string().min(1, 'Knowledge base ID is required'), + z + .array(z.string().min(1)) + .min(1, 'At least one knowledge base ID is required') + .max(20, 'Maximum 20 knowledge base IDs allowed'), + ]), + query: z.string().optional(), + topK: z.number().min(1).max(100).default(10), + tagFilters: z.array(StructuredTagFilterSchema).optional(), + }) + .refine( + (data) => { + const hasQuery = data.query && data.query.trim().length > 0 + const hasTagFilters = data.tagFilters && data.tagFilters.length > 0 + return hasQuery || hasTagFilters + }, + { + message: 'Either query or tagFilters must be provided', + } + ) + +/** POST /api/v1/knowledge/search — Vector search across knowledge bases. */ +export async function POST(request: NextRequest) { + const auth = await authenticateRequest(request, 'knowledge-search') + if (auth instanceof NextResponse) return auth + const { requestId, userId, rateLimit } = auth + + try { + const body = await parseJsonBody(request) + if (!body.success) return body.response + + const validation = validateSchema(SearchSchema, body.data) + if (!validation.success) return validation.response + + const { workspaceId, topK, query, tagFilters } = validation.data + + const accessError = await validateWorkspaceAccess(rateLimit, userId, workspaceId) + if (accessError) return accessError + + const knowledgeBaseIds = Array.isArray(validation.data.knowledgeBaseIds) + ? validation.data.knowledgeBaseIds + : [validation.data.knowledgeBaseIds] + + const accessChecks = await Promise.all( + knowledgeBaseIds.map((kbId) => checkKnowledgeBaseAccess(kbId, userId)) + ) + const accessibleKbIds = knowledgeBaseIds.filter( + (_, idx) => + accessChecks[idx]?.hasAccess && + accessChecks[idx]?.knowledgeBase?.workspaceId === workspaceId + ) + + if (accessibleKbIds.length === 0) { + return NextResponse.json( + { error: 'Knowledge base not found or access denied' }, + { status: 404 } + ) + } + + const inaccessibleKbIds = knowledgeBaseIds.filter((id) => !accessibleKbIds.includes(id)) + if (inaccessibleKbIds.length > 0) { + return NextResponse.json( + { error: `Knowledge bases not found or access denied: ${inaccessibleKbIds.join(', ')}` }, + { status: 404 } + ) + } + + let structuredFilters: StructuredFilter[] = [] + const tagDefsCache = new Map>>() + + if (tagFilters && tagFilters.length > 0 && accessibleKbIds.length > 1) { + return NextResponse.json( + { error: 'Tag filters are only supported when searching a single knowledge base' }, + { status: 400 } + ) + } + + if (tagFilters && tagFilters.length > 0 && accessibleKbIds.length > 0) { + const kbId = accessibleKbIds[0] + const tagDefs = await getDocumentTagDefinitions(kbId) + tagDefsCache.set(kbId, tagDefs) + + const displayNameToTagDef: Record = {} + tagDefs.forEach((def) => { + displayNameToTagDef[def.displayName] = { + tagSlot: def.tagSlot, + fieldType: def.fieldType, + } + }) + + const undefinedTags: string[] = [] + const typeErrors: string[] = [] + + for (const filter of tagFilters) { + const tagDef = displayNameToTagDef[filter.tagName] + if (!tagDef) { + undefinedTags.push(filter.tagName) + continue + } + const validationError = validateTagValue( + filter.tagName, + String(filter.value), + tagDef.fieldType + ) + if (validationError) { + typeErrors.push(validationError) + } + } + + if (undefinedTags.length > 0 || typeErrors.length > 0) { + const errorParts: string[] = [] + if (undefinedTags.length > 0) { + errorParts.push(buildUndefinedTagsError(undefinedTags)) + } + if (typeErrors.length > 0) { + errorParts.push(...typeErrors) + } + return NextResponse.json({ error: errorParts.join('\n') }, { status: 400 }) + } + + structuredFilters = tagFilters.map((filter) => { + const tagDef = displayNameToTagDef[filter.tagName]! + return { + tagSlot: tagDef.tagSlot, + fieldType: tagDef.fieldType, + operator: filter.operator, + value: filter.value, + valueTo: filter.valueTo, + } + }) + } + + const hasQuery = query && query.trim().length > 0 + const hasFilters = structuredFilters.length > 0 + + let results: SearchResult[] + + if (!hasQuery && hasFilters) { + results = await handleTagOnlySearch({ + knowledgeBaseIds: accessibleKbIds, + topK, + structuredFilters, + }) + } else if (hasQuery && hasFilters) { + const strategy = getQueryStrategy(accessibleKbIds.length, topK) + const queryVector = JSON.stringify( + await generateSearchEmbedding(query!, undefined, workspaceId) + ) + results = await handleTagAndVectorSearch({ + knowledgeBaseIds: accessibleKbIds, + topK, + structuredFilters, + queryVector, + distanceThreshold: strategy.distanceThreshold, + }) + } else if (hasQuery) { + const strategy = getQueryStrategy(accessibleKbIds.length, topK) + const queryVector = JSON.stringify( + await generateSearchEmbedding(query!, undefined, workspaceId) + ) + results = await handleVectorOnlySearch({ + knowledgeBaseIds: accessibleKbIds, + topK, + queryVector, + distanceThreshold: strategy.distanceThreshold, + }) + } else { + return NextResponse.json( + { error: 'Either query or tagFilters must be provided' }, + { status: 400 } + ) + } + + const tagDefsResults = await Promise.all( + accessibleKbIds.map(async (kbId) => { + try { + const tagDefs = tagDefsCache.get(kbId) ?? (await getDocumentTagDefinitions(kbId)) + const map: Record = {} + tagDefs.forEach((def) => { + map[def.tagSlot] = def.displayName + }) + return { kbId, map } + } catch { + return { kbId, map: {} as Record } + } + }) + ) + const tagDefinitionsMap: Record> = {} + tagDefsResults.forEach(({ kbId, map }) => { + tagDefinitionsMap[kbId] = map + }) + + const documentIds = results.map((r) => r.documentId) + const documentNameMap = await getDocumentNamesByIds(documentIds) + + return NextResponse.json({ + success: true, + data: { + results: results.map((result) => { + const kbTagMap = tagDefinitionsMap[result.knowledgeBaseId] || {} + const tags: Record = {} + + ALL_TAG_SLOTS.forEach((slot) => { + const tagValue = result[slot as keyof SearchResult] + if (tagValue !== null && tagValue !== undefined) { + const displayName = kbTagMap[slot] || slot + tags[displayName] = tagValue as string | number | boolean | Date | null + } + }) + + return { + documentId: result.documentId, + documentName: documentNameMap[result.documentId] || undefined, + content: result.content, + chunkIndex: result.chunkIndex, + metadata: tags, + similarity: hasQuery ? 1 - result.distance : 1, + } + }), + query: query || '', + knowledgeBaseIds: accessibleKbIds, + topK, + totalResults: results.length, + }, + }) + } catch (error) { + return handleError(requestId, error, 'Failed to perform search') + } +} diff --git a/apps/sim/app/api/v1/knowledge/utils.ts b/apps/sim/app/api/v1/knowledge/utils.ts new file mode 100644 index 0000000000..9908457054 --- /dev/null +++ b/apps/sim/app/api/v1/knowledge/utils.ts @@ -0,0 +1,187 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { generateRequestId } from '@/lib/core/utils/request' +import { getKnowledgeBaseById } from '@/lib/knowledge/service' +import type { KnowledgeBaseWithCounts } from '@/lib/knowledge/types' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, + type RateLimitResult, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1KnowledgeAPI') + +type EndpointKey = 'knowledge' | 'knowledge-detail' | 'knowledge-search' + +/** + * Successful authentication result with request context + */ +export interface AuthorizedRequest { + requestId: string + userId: string + rateLimit: RateLimitResult +} + +/** + * Authenticates and rate-limits a v1 knowledge API request. + * Returns NextResponse on failure, AuthorizedRequest on success. + */ +export async function authenticateRequest( + request: NextRequest, + endpoint: EndpointKey +): Promise { + const requestId = generateRequestId() + const rateLimit = await checkRateLimit(request, endpoint) + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + return { requestId, userId: rateLimit.userId!, rateLimit } +} + +/** + * Validates workspace scope and user permission level. + * Returns null on success, NextResponse on failure. + */ +export async function validateWorkspaceAccess( + rateLimit: RateLimitResult, + userId: string, + workspaceId: string, + level: 'read' | 'write' = 'read' +): Promise { + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission === null) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + if (level === 'write' && permission === 'read') { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + return null +} + +/** + * Fetches a KB by ID, validates it exists, belongs to the workspace, + * and the user has permission. Returns the KB or a NextResponse error. + */ +export async function resolveKnowledgeBase( + id: string, + workspaceId: string, + userId: string, + rateLimit: RateLimitResult, + level: 'read' | 'write' = 'read' +): Promise<{ kb: KnowledgeBaseWithCounts } | NextResponse> { + const accessError = await validateWorkspaceAccess(rateLimit, userId, workspaceId, level) + if (accessError) return accessError + + const kb = await getKnowledgeBaseById(id) + if (!kb) { + return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 }) + } + if (kb.workspaceId !== workspaceId) { + return NextResponse.json({ error: 'Knowledge base not found' }, { status: 404 }) + } + return { kb } +} + +/** + * Validates data against a Zod schema with consistent error response. + */ +export function validateSchema( + schema: S, + data: unknown +): { success: true; data: z.output } | { success: false; response: NextResponse } { + const result = schema.safeParse(data) + if (!result.success) { + return { + success: false, + response: NextResponse.json( + { error: 'Validation error', details: result.error.errors }, + { status: 400 } + ), + } + } + return { success: true, data: result.data } +} + +/** + * Safely parses a JSON request body with consistent error response. + */ +export async function parseJsonBody( + request: NextRequest +): Promise<{ success: true; data: unknown } | { success: false; response: NextResponse }> { + try { + const data = await request.json() + return { success: true, data } + } catch { + return { + success: false, + response: NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }), + } + } +} + +/** + * Serializes a date value for JSON responses. + */ +export function serializeDate(date: Date | string | null | undefined): string | null { + if (date === null || date === undefined) return null + if (date instanceof Date) return date.toISOString() + return String(date) +} + +/** + * Formats a KnowledgeBaseWithCounts into the API response shape. + */ +export function formatKnowledgeBase(kb: KnowledgeBaseWithCounts) { + return { + id: kb.id, + name: kb.name, + description: kb.description, + tokenCount: kb.tokenCount, + embeddingModel: kb.embeddingModel, + embeddingDimension: kb.embeddingDimension, + chunkingConfig: kb.chunkingConfig, + docCount: kb.docCount, + connectorTypes: kb.connectorTypes, + createdAt: serializeDate(kb.createdAt), + updatedAt: serializeDate(kb.updatedAt), + } +} + +/** + * Handles unexpected errors with consistent logging and response. + */ +export function handleError( + requestId: string, + error: unknown, + defaultMessage: string +): NextResponse { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: 'Validation error', details: error.errors }, { status: 400 }) + } + + if (error instanceof Error) { + if (error.message.includes('does not have permission')) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const isStorageLimitError = + error.message.includes('Storage limit exceeded') || error.message.includes('storage limit') + if (isStorageLimitError) { + return NextResponse.json({ error: 'Storage limit exceeded' }, { status: 413 }) + } + + const isDuplicate = error.message.includes('already exists') + if (isDuplicate) { + return NextResponse.json({ error: 'Resource already exists' }, { status: 409 }) + } + } + + logger.error(`[${requestId}] ${defaultMessage}:`, error) + return NextResponse.json({ error: defaultMessage }, { status: 500 }) +} diff --git a/apps/sim/app/api/v1/logs/[id]/route.ts b/apps/sim/app/api/v1/logs/[id]/route.ts index b1d8f89ff3..6e5176c576 100644 --- a/apps/sim/app/api/v1/logs/[id]/route.ts +++ b/apps/sim/app/api/v1/logs/[id]/route.ts @@ -47,12 +47,12 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ workflowUpdatedAt: workflow.updatedAt, }) .from(workflowExecutionLogs) - .innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) + .leftJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) .innerJoin( permissions, and( eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, workflow.workspaceId), + eq(permissions.entityId, workflowExecutionLogs.workspaceId), eq(permissions.userId, userId) ) ) @@ -66,7 +66,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ const workflowSummary = { id: log.workflowId, - name: log.workflowName, + name: log.workflowName || 'Deleted Workflow', description: log.workflowDescription, color: log.workflowColor, folderId: log.workflowFolderId, @@ -74,6 +74,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ workspaceId: log.workflowWorkspaceId, createdAt: log.workflowCreatedAt, updatedAt: log.workflowUpdatedAt, + deleted: !log.workflowName, } const response = { diff --git a/apps/sim/app/api/v1/logs/executions/[executionId]/route.ts b/apps/sim/app/api/v1/logs/executions/[executionId]/route.ts index 5c2967ef73..f791c13b25 100644 --- a/apps/sim/app/api/v1/logs/executions/[executionId]/route.ts +++ b/apps/sim/app/api/v1/logs/executions/[executionId]/route.ts @@ -1,10 +1,5 @@ import { db } from '@sim/db' -import { - permissions, - workflow, - workflowExecutionLogs, - workflowExecutionSnapshots, -} from '@sim/db/schema' +import { permissions, workflowExecutionLogs, workflowExecutionSnapshots } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' @@ -31,15 +26,13 @@ export async function GET( const rows = await db .select({ log: workflowExecutionLogs, - workflow: workflow, }) .from(workflowExecutionLogs) - .innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) .innerJoin( permissions, and( eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, workflow.workspaceId), + eq(permissions.entityId, workflowExecutionLogs.workspaceId), eq(permissions.userId, userId) ) ) diff --git a/apps/sim/app/api/v1/logs/route.ts b/apps/sim/app/api/v1/logs/route.ts index 83a7b62192..bc9562fd27 100644 --- a/apps/sim/app/api/v1/logs/route.ts +++ b/apps/sim/app/api/v1/logs/route.ts @@ -123,12 +123,12 @@ export async function GET(request: NextRequest) { workflowDescription: workflow.description, }) .from(workflowExecutionLogs) - .innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) + .leftJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id)) .innerJoin( permissions, and( eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, params.workspaceId), + eq(permissions.entityId, workflowExecutionLogs.workspaceId), eq(permissions.userId, userId) ) ) @@ -168,8 +168,9 @@ export async function GET(request: NextRequest) { if (params.details === 'full') { result.workflow = { id: log.workflowId, - name: log.workflowName, + name: log.workflowName || 'Deleted Workflow', description: log.workflowDescription, + deleted: !log.workflowName, } if (log.cost) { diff --git a/apps/sim/app/api/v1/middleware.ts b/apps/sim/app/api/v1/middleware.ts index 60a7b93474..ad42be802a 100644 --- a/apps/sim/app/api/v1/middleware.ts +++ b/apps/sim/app/api/v1/middleware.ts @@ -1,7 +1,8 @@ import { createLogger } from '@sim/logger' import { type NextRequest, NextResponse } from 'next/server' import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription' -import { RateLimiter } from '@/lib/core/rate-limiter' +import type { SubscriptionPlan } from '@/lib/core/rate-limiter' +import { getRateLimit, RateLimiter } from '@/lib/core/rate-limiter' import { authenticateV1Request } from '@/app/api/v1/auth' const logger = createLogger('V1Middleware') @@ -14,12 +15,29 @@ export interface RateLimitResult { limit: number retryAfterMs?: number userId?: string + workspaceId?: string + keyType?: 'personal' | 'workspace' error?: string } export async function checkRateLimit( request: NextRequest, - endpoint: 'logs' | 'logs-detail' | 'workflows' | 'workflow-detail' | 'audit-logs' = 'logs' + endpoint: + | 'logs' + | 'logs-detail' + | 'workflows' + | 'workflow-detail' + | 'audit-logs' + | 'tables' + | 'table-detail' + | 'table-rows' + | 'table-row-detail' + | 'table-columns' + | 'files' + | 'file-detail' + | 'knowledge' + | 'knowledge-detail' + | 'knowledge-search' = 'logs' ): Promise { try { const auth = await authenticateV1Request(request) @@ -51,20 +69,18 @@ export async function checkRateLimit( }) } - const rateLimitStatus = await rateLimiter.getRateLimitStatusWithSubscription( - userId, - subscription, - 'api-endpoint', - false - ) + const plan = (subscription?.plan || 'free') as SubscriptionPlan + const config = getRateLimit(plan, 'api-endpoint') return { allowed: result.allowed, remaining: result.remaining, resetAt: result.resetAt, - limit: rateLimitStatus.requestsPerMinute, + limit: config.refillRate, retryAfterMs: result.retryAfterMs, userId, + workspaceId: auth.workspaceId, + keyType: auth.keyType, } } catch (error) { logger.error('Rate limit check error', { error }) @@ -89,26 +105,40 @@ export function createRateLimitResponse(result: RateLimitResult): NextResponse { return NextResponse.json({ error: result.error || 'Unauthorized' }, { status: 401, headers }) } - if (!result.allowed) { - const retryAfterSeconds = result.retryAfterMs - ? Math.ceil(result.retryAfterMs / 1000) - : Math.ceil((result.resetAt.getTime() - Date.now()) / 1000) + const retryAfterSeconds = result.retryAfterMs + ? Math.ceil(result.retryAfterMs / 1000) + : Math.ceil((result.resetAt.getTime() - Date.now()) / 1000) - return NextResponse.json( - { - error: 'Rate limit exceeded', - message: `API rate limit exceeded. Please retry after ${result.resetAt.toISOString()}`, - retryAfter: result.resetAt.getTime(), + return NextResponse.json( + { + error: 'Rate limit exceeded', + message: `API rate limit exceeded. Please retry after ${result.resetAt.toISOString()}`, + retryAfter: result.resetAt.getTime(), + }, + { + status: 429, + headers: { + ...headers, + 'Retry-After': retryAfterSeconds.toString(), }, - { - status: 429, - headers: { - ...headers, - 'Retry-After': retryAfterSeconds.toString(), - }, - } + } + ) +} + +/** Verify that a workspace-scoped API key is only used for its own workspace. */ +export function checkWorkspaceScope( + rateLimit: RateLimitResult, + requestedWorkspaceId: string +): NextResponse | null { + if ( + rateLimit.keyType === 'workspace' && + rateLimit.workspaceId && + rateLimit.workspaceId !== requestedWorkspaceId + ) { + return NextResponse.json( + { error: 'API key is not authorized for this workspace' }, + { status: 403 } ) } - - return NextResponse.json({ error: 'Bad request' }, { status: 400, headers }) + return null } diff --git a/apps/sim/app/api/v1/tables/[tableId]/columns/route.ts b/apps/sim/app/api/v1/tables/[tableId]/columns/route.ts new file mode 100644 index 0000000000..fb707274bf --- /dev/null +++ b/apps/sim/app/api/v1/tables/[tableId]/columns/route.ts @@ -0,0 +1,305 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { generateRequestId } from '@/lib/core/utils/request' +import { + addTableColumn, + deleteColumn, + renameColumn, + updateColumnConstraints, + updateColumnType, +} from '@/lib/table' +import { + accessError, + CreateColumnSchema, + checkAccess, + DeleteColumnSchema, + normalizeColumn, + UpdateColumnSchema, +} from '@/app/api/table/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1TableColumnsAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +interface ColumnsRouteParams { + params: Promise<{ tableId: string }> +} + +/** POST /api/v1/tables/[tableId]/columns — Add a column to the table schema. */ +export async function POST(request: NextRequest, { params }: ColumnsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const rateLimit = await checkRateLimit(request, 'table-columns') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = CreateColumnSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const updatedTable = await addTableColumn(tableId, validated.column, requestId) + + recordAudit({ + workspaceId: validated.workspaceId, + actorId: userId, + action: AuditAction.TABLE_UPDATED, + resourceType: AuditResourceType.TABLE, + resourceId: tableId, + resourceName: table.name, + description: `Added column "${validated.column.name}" to table "${table.name}"`, + metadata: { column: validated.column }, + request, + }) + + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + if (error.message.includes('already exists') || error.message.includes('maximum column')) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + if (error.message === 'Table not found') { + return NextResponse.json({ error: error.message }, { status: 404 }) + } + } + + logger.error(`[${requestId}] Error adding column to table ${tableId}:`, error) + return NextResponse.json({ error: 'Failed to add column' }, { status: 500 }) + } +} + +/** PATCH /api/v1/tables/[tableId]/columns — Update a column (rename, type change, constraints). */ +export async function PATCH(request: NextRequest, { params }: ColumnsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const rateLimit = await checkRateLimit(request, 'table-columns') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = UpdateColumnSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const { updates } = validated + let updatedTable = null + + if (updates.name) { + updatedTable = await renameColumn( + { tableId, oldName: validated.columnName, newName: updates.name }, + requestId + ) + } + + if (updates.type) { + updatedTable = await updateColumnType( + { tableId, columnName: updates.name ?? validated.columnName, newType: updates.type }, + requestId + ) + } + + if (updates.required !== undefined || updates.unique !== undefined) { + updatedTable = await updateColumnConstraints( + { + tableId, + columnName: updates.name ?? validated.columnName, + ...(updates.required !== undefined ? { required: updates.required } : {}), + ...(updates.unique !== undefined ? { unique: updates.unique } : {}), + }, + requestId + ) + } + + if (!updatedTable) { + return NextResponse.json({ error: 'No updates specified' }, { status: 400 }) + } + + recordAudit({ + workspaceId: validated.workspaceId, + actorId: userId, + action: AuditAction.TABLE_UPDATED, + resourceType: AuditResourceType.TABLE, + resourceId: tableId, + resourceName: table.name, + description: `Updated column "${validated.columnName}" in table "${table.name}"`, + metadata: { columnName: validated.columnName, updates }, + request, + }) + + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + const msg = error.message + if (msg.includes('not found') || msg.includes('Table not found')) { + return NextResponse.json({ error: msg }, { status: 404 }) + } + if ( + msg.includes('already exists') || + msg.includes('Cannot delete the last column') || + msg.includes('Cannot set column') || + msg.includes('Invalid column') || + msg.includes('exceeds maximum') || + msg.includes('incompatible') || + msg.includes('duplicate') + ) { + return NextResponse.json({ error: msg }, { status: 400 }) + } + } + + logger.error(`[${requestId}] Error updating column in table ${tableId}:`, error) + return NextResponse.json({ error: 'Failed to update column' }, { status: 500 }) + } +} + +/** DELETE /api/v1/tables/[tableId]/columns — Delete a column from the table schema. */ +export async function DELETE(request: NextRequest, { params }: ColumnsRouteParams) { + const requestId = generateRequestId() + const { tableId } = await params + + try { + const rateLimit = await checkRateLimit(request, 'table-columns') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = DeleteColumnSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const updatedTable = await deleteColumn( + { tableId, columnName: validated.columnName }, + requestId + ) + + recordAudit({ + workspaceId: validated.workspaceId, + actorId: userId, + action: AuditAction.TABLE_UPDATED, + resourceType: AuditResourceType.TABLE, + resourceId: tableId, + resourceName: table.name, + description: `Deleted column "${validated.columnName}" from table "${table.name}"`, + metadata: { columnName: validated.columnName }, + request, + }) + + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + if (error.message.includes('not found') || error.message === 'Table not found') { + return NextResponse.json({ error: error.message }, { status: 404 }) + } + if (error.message.includes('Cannot delete') || error.message.includes('last column')) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + } + + logger.error(`[${requestId}] Error deleting column from table ${tableId}:`, error) + return NextResponse.json({ error: 'Failed to delete column' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/tables/[tableId]/route.ts b/apps/sim/app/api/v1/tables/[tableId]/route.ts new file mode 100644 index 0000000000..06c2a1de4f --- /dev/null +++ b/apps/sim/app/api/v1/tables/[tableId]/route.ts @@ -0,0 +1,142 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { generateRequestId } from '@/lib/core/utils/request' +import { deleteTable, type TableSchema } from '@/lib/table' +import { accessError, checkAccess, normalizeColumn } from '@/app/api/table/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1TableDetailAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +interface TableRouteParams { + params: Promise<{ tableId: string }> +} + +/** GET /api/v1/tables/[tableId] — Get table details. */ +export async function GET(request: NextRequest, { params }: TableRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + const { searchParams } = new URL(request.url) + const workspaceId = searchParams.get('workspaceId') + + if (!workspaceId) { + return NextResponse.json( + { error: 'workspaceId query parameter is required' }, + { status: 400 } + ) + } + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'read') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const schemaData = table.schema as TableSchema + + return NextResponse.json({ + success: true, + data: { + table: { + id: table.id, + name: table.name, + description: table.description, + schema: { + columns: schemaData.columns.map(normalizeColumn), + }, + rowCount: table.rowCount, + maxRows: table.maxRows, + createdAt: + table.createdAt instanceof Date + ? table.createdAt.toISOString() + : String(table.createdAt), + updatedAt: + table.updatedAt instanceof Date + ? table.updatedAt.toISOString() + : String(table.updatedAt), + }, + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error getting table:`, error) + return NextResponse.json({ error: 'Failed to get table' }, { status: 500 }) + } +} + +/** DELETE /api/v1/tables/[tableId] — Archive a table. */ +export async function DELETE(request: NextRequest, { params }: TableRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + const { searchParams } = new URL(request.url) + const workspaceId = searchParams.get('workspaceId') + + if (!workspaceId) { + return NextResponse.json( + { error: 'workspaceId query parameter is required' }, + { status: 400 } + ) + } + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + if (result.table.workspaceId !== workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + await deleteTable(tableId, requestId) + + recordAudit({ + workspaceId, + actorId: userId, + action: AuditAction.TABLE_DELETED, + resourceType: AuditResourceType.TABLE, + resourceId: tableId, + resourceName: result.table.name, + description: `Archived table "${result.table.name}"`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + message: 'Table archived successfully', + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error deleting table:`, error) + return NextResponse.json({ error: 'Failed to delete table' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/tables/[tableId]/rows/[rowId]/route.ts b/apps/sim/app/api/v1/tables/[tableId]/rows/[rowId]/route.ts new file mode 100644 index 0000000000..bc7901a80d --- /dev/null +++ b/apps/sim/app/api/v1/tables/[tableId]/rows/[rowId]/route.ts @@ -0,0 +1,278 @@ +import { db } from '@sim/db' +import { userTableRows } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { generateRequestId } from '@/lib/core/utils/request' +import type { RowData } from '@/lib/table' +import { updateRow } from '@/lib/table' +import { accessError, checkAccess } from '@/app/api/table/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1TableRowAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const UpdateRowSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + data: z.record(z.unknown(), { required_error: 'Row data is required' }), +}) + +interface RowRouteParams { + params: Promise<{ tableId: string; rowId: string }> +} + +/** GET /api/v1/tables/[tableId]/rows/[rowId] — Get a single row. */ +export async function GET(request: NextRequest, { params }: RowRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-row-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId, rowId } = await params + const { searchParams } = new URL(request.url) + const workspaceId = searchParams.get('workspaceId') + + if (!workspaceId) { + return NextResponse.json( + { error: 'workspaceId query parameter is required' }, + { status: 400 } + ) + } + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'read') + if (!result.ok) return accessError(result, requestId, tableId) + + if (result.table.workspaceId !== workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const [row] = await db + .select({ + id: userTableRows.id, + data: userTableRows.data, + position: userTableRows.position, + createdAt: userTableRows.createdAt, + updatedAt: userTableRows.updatedAt, + }) + .from(userTableRows) + .where( + and( + eq(userTableRows.id, rowId), + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, workspaceId) + ) + ) + .limit(1) + + if (!row) { + return NextResponse.json({ error: 'Row not found' }, { status: 404 }) + } + + return NextResponse.json({ + success: true, + data: { + row: { + id: row.id, + data: row.data, + position: row.position, + createdAt: + row.createdAt instanceof Date ? row.createdAt.toISOString() : String(row.createdAt), + updatedAt: + row.updatedAt instanceof Date ? row.updatedAt.toISOString() : String(row.updatedAt), + }, + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error getting row:`, error) + return NextResponse.json({ error: 'Failed to get row' }, { status: 500 }) + } +} + +/** PATCH /api/v1/tables/[tableId]/rows/[rowId] — Partial update a single row. */ +export async function PATCH(request: NextRequest, { params }: RowRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-row-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId, rowId } = await params + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = UpdateRowSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + // Fetch existing row to merge partial update + const [existingRow] = await db + .select({ data: userTableRows.data }) + .from(userTableRows) + .where( + and( + eq(userTableRows.id, rowId), + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, validated.workspaceId) + ) + ) + .limit(1) + + if (!existingRow) { + return NextResponse.json({ error: 'Row not found' }, { status: 404 }) + } + + const mergedData = { + ...(existingRow.data as RowData), + ...(validated.data as RowData), + } + + const updatedRow = await updateRow( + { + tableId, + rowId, + data: mergedData, + workspaceId: validated.workspaceId, + }, + table, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + row: { + id: updatedRow.id, + data: updatedRow.data, + position: updatedRow.position, + createdAt: + updatedRow.createdAt instanceof Date + ? updatedRow.createdAt.toISOString() + : updatedRow.createdAt, + updatedAt: + updatedRow.updatedAt instanceof Date + ? updatedRow.updatedAt.toISOString() + : updatedRow.updatedAt, + }, + message: 'Row updated successfully', + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : String(error) + + if (errorMessage === 'Row not found') { + return NextResponse.json({ error: errorMessage }, { status: 404 }) + } + + if ( + errorMessage.includes('Row size exceeds') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('Cannot set unique column') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error updating row:`, error) + return NextResponse.json({ error: 'Failed to update row' }, { status: 500 }) + } +} + +/** DELETE /api/v1/tables/[tableId]/rows/[rowId] — Delete a single row. */ +export async function DELETE(request: NextRequest, { params }: RowRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-row-detail') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId, rowId } = await params + const { searchParams } = new URL(request.url) + const workspaceId = searchParams.get('workspaceId') + + if (!workspaceId) { + return NextResponse.json( + { error: 'workspaceId query parameter is required' }, + { status: 400 } + ) + } + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + if (result.table.workspaceId !== workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const [deletedRow] = await db + .delete(userTableRows) + .where( + and( + eq(userTableRows.id, rowId), + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, workspaceId) + ) + ) + .returning() + + if (!deletedRow) { + return NextResponse.json({ error: 'Row not found' }, { status: 404 }) + } + + return NextResponse.json({ + success: true, + data: { + message: 'Row deleted successfully', + deletedCount: 1, + }, + }) + } catch (error) { + logger.error(`[${requestId}] Error deleting row:`, error) + return NextResponse.json({ error: 'Failed to delete row' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/tables/[tableId]/rows/route.ts b/apps/sim/app/api/v1/tables/[tableId]/rows/route.ts new file mode 100644 index 0000000000..8021625b1b --- /dev/null +++ b/apps/sim/app/api/v1/tables/[tableId]/rows/route.ts @@ -0,0 +1,603 @@ +import { db } from '@sim/db' +import { userTableRows } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { generateRequestId } from '@/lib/core/utils/request' +import type { Filter, RowData, Sort, TableSchema } from '@/lib/table' +import { + batchInsertRows, + deleteRowsByFilter, + deleteRowsByIds, + insertRow, + TABLE_LIMITS, + USER_TABLE_ROWS_SQL_NAME, + updateRowsByFilter, + validateBatchRows, + validateRowData, + validateRowSize, +} from '@/lib/table' +import { buildFilterClause, buildSortClause } from '@/lib/table/sql' +import { accessError, checkAccess } from '@/app/api/table/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1TableRowsAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const InsertRowSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + data: z.record(z.unknown(), { required_error: 'Row data is required' }), +}) + +const BatchInsertRowsSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + rows: z + .array(z.record(z.unknown()), { required_error: 'Rows array is required' }) + .min(1, 'At least one row is required') + .max(1000, 'Cannot insert more than 1000 rows per batch'), +}) + +const QueryRowsSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + filter: z.record(z.unknown()).optional(), + sort: z.record(z.enum(['asc', 'desc'])).optional(), + limit: z + .preprocess( + (val) => (val === null || val === undefined || val === '' ? undefined : Number(val)), + z + .number({ required_error: 'Limit must be a number' }) + .int('Limit must be an integer') + .min(1, 'Limit must be at least 1') + .max(TABLE_LIMITS.MAX_QUERY_LIMIT, `Limit cannot exceed ${TABLE_LIMITS.MAX_QUERY_LIMIT}`) + .optional() + ) + .default(100), + offset: z + .preprocess( + (val) => (val === null || val === undefined || val === '' ? undefined : Number(val)), + z + .number({ required_error: 'Offset must be a number' }) + .int('Offset must be an integer') + .min(0, 'Offset must be 0 or greater') + .optional() + ) + .default(0), +}) + +const nonEmptyFilter = z + .record(z.unknown(), { required_error: 'Filter criteria is required' }) + .refine((f) => Object.keys(f).length > 0, { message: 'Filter must not be empty' }) + +const optionalPositiveLimit = (max: number, label: string) => + z.preprocess( + (val) => (val === null || val === undefined || val === '' ? undefined : Number(val)), + z + .number() + .int(`${label} must be an integer`) + .min(1, `${label} must be at least 1`) + .max(max, `Cannot ${label.toLowerCase()} more than ${max} rows per operation`) + .optional() + ) + +const UpdateRowsByFilterSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + filter: nonEmptyFilter, + data: z.record(z.unknown(), { required_error: 'Update data is required' }), + limit: optionalPositiveLimit(1000, 'Limit'), +}) + +const DeleteRowsByFilterSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + filter: nonEmptyFilter, + limit: optionalPositiveLimit(1000, 'Limit'), +}) + +const DeleteRowsByIdsSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + rowIds: z + .array(z.string().min(1), { required_error: 'Row IDs are required' }) + .min(1, 'At least one row ID is required') + .max(1000, 'Cannot delete more than 1000 rows per operation'), +}) + +const DeleteRowsRequestSchema = z.union([DeleteRowsByFilterSchema, DeleteRowsByIdsSchema]) + +interface TableRowsRouteParams { + params: Promise<{ tableId: string }> +} + +async function handleBatchInsert( + requestId: string, + tableId: string, + validated: z.infer, + userId: string +): Promise { + const accessResult = await checkAccess(tableId, userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const validation = await validateBatchRows({ + rows: validated.rows as RowData[], + schema: table.schema as TableSchema, + tableId, + }) + if (!validation.valid) return validation.response + + try { + const insertedRows = await batchInsertRows( + { + tableId, + rows: validated.rows as RowData[], + workspaceId: validated.workspaceId, + userId, + }, + table, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + rows: insertedRows.map((r) => ({ + id: r.id, + data: r.data, + position: r.position, + createdAt: r.createdAt instanceof Date ? r.createdAt.toISOString() : r.createdAt, + updatedAt: r.updatedAt instanceof Date ? r.updatedAt.toISOString() : r.updatedAt, + })), + insertedCount: insertedRows.length, + message: `Successfully inserted ${insertedRows.length} rows`, + }, + }) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error) + + if ( + errorMessage.includes('row limit') || + errorMessage.includes('Insufficient capacity') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Row size exceeds') || + errorMessage.match(/^Row \d+:/) + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error batch inserting rows:`, error) + return NextResponse.json({ error: 'Failed to insert rows' }, { status: 500 }) + } +} + +/** GET /api/v1/tables/[tableId]/rows — Query rows with filtering, sorting, pagination. */ +export async function GET(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-rows') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + const { searchParams } = new URL(request.url) + + let filter: Record | undefined + let sort: Sort | undefined + + try { + const filterParam = searchParams.get('filter') + const sortParam = searchParams.get('sort') + if (filterParam) { + filter = JSON.parse(filterParam) as Record + } + if (sortParam) { + sort = JSON.parse(sortParam) as Sort + } + } catch { + return NextResponse.json({ error: 'Invalid filter or sort JSON' }, { status: 400 }) + } + + const validated = QueryRowsSchema.parse({ + workspaceId: searchParams.get('workspaceId'), + filter, + sort, + limit: searchParams.get('limit'), + offset: searchParams.get('offset'), + }) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const accessResult = await checkAccess(tableId, userId, 'read') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const baseConditions = [ + eq(userTableRows.tableId, tableId), + eq(userTableRows.workspaceId, validated.workspaceId), + ] + + if (validated.filter) { + const filterClause = buildFilterClause(validated.filter as Filter, USER_TABLE_ROWS_SQL_NAME) + if (filterClause) { + baseConditions.push(filterClause) + } + } + + let query = db + .select({ + id: userTableRows.id, + data: userTableRows.data, + position: userTableRows.position, + createdAt: userTableRows.createdAt, + updatedAt: userTableRows.updatedAt, + }) + .from(userTableRows) + .where(and(...baseConditions)) + + if (validated.sort) { + const schema = table.schema as TableSchema + const sortClause = buildSortClause(validated.sort, USER_TABLE_ROWS_SQL_NAME, schema.columns) + if (sortClause) { + query = query.orderBy(sortClause) as typeof query + } else { + query = query.orderBy(userTableRows.position) as typeof query + } + } else { + query = query.orderBy(userTableRows.position) as typeof query + } + + const countQuery = db + .select({ count: sql`count(*)` }) + .from(userTableRows) + .where(and(...baseConditions)) + + const [countResult, rows] = await Promise.all([ + countQuery, + query.limit(validated.limit).offset(validated.offset), + ]) + const totalCount = countResult[0].count + + return NextResponse.json({ + success: true, + data: { + rows: rows.map((r) => ({ + id: r.id, + data: r.data, + position: r.position, + createdAt: r.createdAt instanceof Date ? r.createdAt.toISOString() : String(r.createdAt), + updatedAt: r.updatedAt instanceof Date ? r.updatedAt.toISOString() : String(r.updatedAt), + })), + rowCount: rows.length, + totalCount: Number(totalCount), + limit: validated.limit, + offset: validated.offset, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error querying rows:`, error) + return NextResponse.json({ error: 'Failed to query rows' }, { status: 500 }) + } +} + +/** POST /api/v1/tables/[tableId]/rows — Insert row(s). Supports single or batch. */ +export async function POST(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-rows') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + if ( + typeof body === 'object' && + body !== null && + 'rows' in body && + Array.isArray((body as Record).rows) + ) { + const batchValidated = BatchInsertRowsSchema.parse(body) + const scopeError = checkWorkspaceScope(rateLimit, batchValidated.workspaceId) + if (scopeError) return scopeError + return handleBatchInsert(requestId, tableId, batchValidated, userId) + } + + const validated = InsertRowSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const accessResult = await checkAccess(tableId, userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const rowData = validated.data as RowData + + const validation = await validateRowData({ + rowData, + schema: table.schema as TableSchema, + tableId, + }) + if (!validation.valid) return validation.response + + const row = await insertRow( + { + tableId, + data: rowData, + workspaceId: validated.workspaceId, + userId, + }, + table, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + row: { + id: row.id, + data: row.data, + position: row.position, + createdAt: row.createdAt instanceof Date ? row.createdAt.toISOString() : row.createdAt, + updatedAt: row.updatedAt instanceof Date ? row.updatedAt.toISOString() : row.updatedAt, + }, + message: 'Row inserted successfully', + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : String(error) + + if ( + errorMessage.includes('row limit') || + errorMessage.includes('Insufficient capacity') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Row size exceeds') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error inserting row:`, error) + return NextResponse.json({ error: 'Failed to insert row' }, { status: 500 }) + } +} + +/** PUT /api/v1/tables/[tableId]/rows — Bulk update rows by filter. */ +export async function PUT(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-rows') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = UpdateRowsByFilterSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const accessResult = await checkAccess(tableId, userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const sizeValidation = validateRowSize(validated.data as RowData) + if (!sizeValidation.valid) { + return NextResponse.json( + { error: 'Validation error', details: sizeValidation.errors }, + { status: 400 } + ) + } + + const result = await updateRowsByFilter( + { + tableId, + filter: validated.filter as Filter, + data: validated.data as RowData, + limit: validated.limit, + workspaceId: validated.workspaceId, + }, + table, + requestId + ) + + if (result.affectedCount === 0) { + return NextResponse.json({ + success: true, + data: { + message: 'No rows matched the filter criteria', + updatedCount: 0, + }, + }) + } + + return NextResponse.json({ + success: true, + data: { + message: 'Rows updated successfully', + updatedCount: result.affectedCount, + updatedRowIds: result.affectedRowIds, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : String(error) + + if ( + errorMessage.includes('Row size exceeds') || + errorMessage.includes('Schema validation') || + errorMessage.includes('must be unique') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('Cannot set unique column') || + errorMessage.includes('Filter is required') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error updating rows by filter:`, error) + return NextResponse.json({ error: 'Failed to update rows' }, { status: 500 }) + } +} + +/** DELETE /api/v1/tables/[tableId]/rows — Delete rows by filter or IDs. */ +export async function DELETE(request: NextRequest, { params }: TableRowsRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-rows') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = DeleteRowsRequestSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const accessResult = await checkAccess(tableId, userId, 'write') + if (!accessResult.ok) return accessError(accessResult, requestId, tableId) + + const { table } = accessResult + + if (validated.workspaceId !== table.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + if ('rowIds' in validated) { + const result = await deleteRowsByIds( + { tableId, rowIds: validated.rowIds, workspaceId: validated.workspaceId }, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + message: + result.deletedCount === 0 + ? 'No matching rows found for the provided IDs' + : 'Rows deleted successfully', + deletedCount: result.deletedCount, + deletedRowIds: result.deletedRowIds, + requestedCount: result.requestedCount, + ...(result.missingRowIds.length > 0 ? { missingRowIds: result.missingRowIds } : {}), + }, + }) + } + + const result = await deleteRowsByFilter( + { + tableId, + filter: validated.filter as Filter, + limit: validated.limit, + workspaceId: validated.workspaceId, + }, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + message: + result.affectedCount === 0 + ? 'No rows matched the filter criteria' + : 'Rows deleted successfully', + deletedCount: result.affectedCount, + deletedRowIds: result.affectedRowIds, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : String(error) + + if (errorMessage.includes('Filter is required')) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error deleting rows:`, error) + return NextResponse.json({ error: 'Failed to delete rows' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/tables/[tableId]/rows/upsert/route.ts b/apps/sim/app/api/v1/tables/[tableId]/rows/upsert/route.ts new file mode 100644 index 0000000000..93f1351a8f --- /dev/null +++ b/apps/sim/app/api/v1/tables/[tableId]/rows/upsert/route.ts @@ -0,0 +1,119 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { generateRequestId } from '@/lib/core/utils/request' +import type { RowData } from '@/lib/table' +import { upsertRow } from '@/lib/table' +import { accessError, checkAccess } from '@/app/api/table/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1TableUpsertAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const UpsertRowSchema = z.object({ + workspaceId: z.string().min(1, 'Workspace ID is required'), + data: z.record(z.unknown(), { required_error: 'Row data is required' }), + conflictTarget: z.string().optional(), +}) + +interface UpsertRouteParams { + params: Promise<{ tableId: string }> +} + +/** POST /api/v1/tables/[tableId]/rows/upsert — Insert or update a row based on unique columns. */ +export async function POST(request: NextRequest, { params }: UpsertRouteParams) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'table-rows') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { tableId } = await params + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const validated = UpsertRowSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId) + if (scopeError) return scopeError + + const result = await checkAccess(tableId, userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + + const { table } = result + + if (table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const upsertResult = await upsertRow( + { + tableId, + workspaceId: validated.workspaceId, + data: validated.data as RowData, + userId, + conflictTarget: validated.conflictTarget, + }, + table, + requestId + ) + + return NextResponse.json({ + success: true, + data: { + row: { + id: upsertResult.row.id, + data: upsertResult.row.data, + createdAt: + upsertResult.row.createdAt instanceof Date + ? upsertResult.row.createdAt.toISOString() + : upsertResult.row.createdAt, + updatedAt: + upsertResult.row.updatedAt instanceof Date + ? upsertResult.row.updatedAt.toISOString() + : upsertResult.row.updatedAt, + }, + operation: upsertResult.operation, + message: `Row ${upsertResult.operation === 'update' ? 'updated' : 'inserted'} successfully`, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + const errorMessage = error instanceof Error ? error.message : String(error) + + if ( + errorMessage.includes('unique column') || + errorMessage.includes('Unique constraint violation') || + errorMessage.includes('conflictTarget') || + errorMessage.includes('row limit') || + errorMessage.includes('Schema validation') || + errorMessage.includes('Upsert requires') || + errorMessage.includes('Row size exceeds') + ) { + return NextResponse.json({ error: errorMessage }, { status: 400 }) + } + + logger.error(`[${requestId}] Error upserting row:`, error) + return NextResponse.json({ error: 'Failed to upsert row' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/tables/route.ts b/apps/sim/app/api/v1/tables/route.ts new file mode 100644 index 0000000000..09ff717f9c --- /dev/null +++ b/apps/sim/app/api/v1/tables/route.ts @@ -0,0 +1,260 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { generateRequestId } from '@/lib/core/utils/request' +import { + createTable, + getWorkspaceTableLimits, + listTables, + TABLE_LIMITS, + type TableSchema, +} from '@/lib/table' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' +import { normalizeColumn } from '@/app/api/table/utils' +import { + checkRateLimit, + checkWorkspaceScope, + createRateLimitResponse, +} from '@/app/api/v1/middleware' + +const logger = createLogger('V1TablesAPI') + +export const dynamic = 'force-dynamic' +export const revalidate = 0 + +const ListTablesSchema = z.object({ + workspaceId: z.string().min(1, 'workspaceId query parameter is required'), +}) + +const ColumnSchema = z.object({ + name: z + .string() + .min(1, 'Column name is required') + .max( + TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH, + `Column name must be ${TABLE_LIMITS.MAX_COLUMN_NAME_LENGTH} characters or less` + ) + .regex( + /^[a-z_][a-z0-9_]*$/i, + 'Column name must start with a letter or underscore and contain only alphanumeric characters and underscores' + ), + type: z.enum(['string', 'number', 'boolean', 'date', 'json'], { + errorMap: () => ({ + message: 'Column type must be one of: string, number, boolean, date, json', + }), + }), + required: z.boolean().optional().default(false), + unique: z.boolean().optional().default(false), +}) + +const CreateTableSchema = z.object({ + name: z + .string() + .min(1, 'Table name is required') + .max( + TABLE_LIMITS.MAX_TABLE_NAME_LENGTH, + `Table name must be ${TABLE_LIMITS.MAX_TABLE_NAME_LENGTH} characters or less` + ) + .regex( + /^[a-z_][a-z0-9_]*$/i, + 'Table name must start with a letter or underscore and contain only alphanumeric characters and underscores' + ), + description: z + .string() + .max( + TABLE_LIMITS.MAX_DESCRIPTION_LENGTH, + `Description must be ${TABLE_LIMITS.MAX_DESCRIPTION_LENGTH} characters or less` + ) + .optional(), + schema: z.object({ + columns: z + .array(ColumnSchema) + .min(1, 'Table must have at least one column') + .max( + TABLE_LIMITS.MAX_COLUMNS_PER_TABLE, + `Table cannot have more than ${TABLE_LIMITS.MAX_COLUMNS_PER_TABLE} columns` + ), + }), + workspaceId: z.string().min(1, 'Workspace ID is required'), +}) + +/** GET /api/v1/tables — List all tables in a workspace. */ +export async function GET(request: NextRequest) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'tables') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + const { searchParams } = new URL(request.url) + + const validation = ListTablesSchema.safeParse({ + workspaceId: searchParams.get('workspaceId'), + }) + if (!validation.success) { + return NextResponse.json( + { error: 'Validation error', details: validation.error.errors }, + { status: 400 } + ) + } + + const { workspaceId } = validation.data + + const scopeError = checkWorkspaceScope(rateLimit, workspaceId) + if (scopeError) return scopeError + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission === null) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const tables = await listTables(workspaceId) + + return NextResponse.json({ + success: true, + data: { + tables: tables.map((t) => { + const schemaData = t.schema as TableSchema + return { + id: t.id, + name: t.name, + description: t.description, + schema: { + columns: schemaData.columns.map(normalizeColumn), + }, + rowCount: t.rowCount, + maxRows: t.maxRows, + createdAt: + t.createdAt instanceof Date ? t.createdAt.toISOString() : String(t.createdAt), + updatedAt: + t.updatedAt instanceof Date ? t.updatedAt.toISOString() : String(t.updatedAt), + } + }), + totalCount: tables.length, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error listing tables:`, error) + return NextResponse.json({ error: 'Failed to list tables' }, { status: 500 }) + } +} + +/** POST /api/v1/tables — Create a new table. */ +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const rateLimit = await checkRateLimit(request, 'tables') + if (!rateLimit.allowed) { + return createRateLimitResponse(rateLimit) + } + + const userId = rateLimit.userId! + + let body: unknown + try { + body = await request.json() + } catch { + return NextResponse.json({ error: 'Request body must be valid JSON' }, { status: 400 }) + } + + const params = CreateTableSchema.parse(body) + + const scopeError = checkWorkspaceScope(rateLimit, params.workspaceId) + if (scopeError) return scopeError + + const permission = await getUserEntityPermissions(userId, 'workspace', params.workspaceId) + if (permission === null || permission === 'read') { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const planLimits = await getWorkspaceTableLimits(params.workspaceId) + + const normalizedSchema: TableSchema = { + columns: params.schema.columns.map(normalizeColumn), + } + + const table = await createTable( + { + name: params.name, + description: params.description, + schema: normalizedSchema, + workspaceId: params.workspaceId, + userId, + maxRows: planLimits.maxRowsPerTable, + maxTables: planLimits.maxTables, + }, + requestId + ) + + recordAudit({ + workspaceId: params.workspaceId, + actorId: userId, + action: AuditAction.TABLE_CREATED, + resourceType: AuditResourceType.TABLE, + resourceId: table.id, + resourceName: table.name, + description: `Created table "${table.name}" via API`, + request, + }) + + return NextResponse.json({ + success: true, + data: { + table: { + id: table.id, + name: table.name, + description: table.description, + schema: { + columns: (table.schema as TableSchema).columns.map(normalizeColumn), + }, + rowCount: table.rowCount, + maxRows: table.maxRows, + createdAt: + table.createdAt instanceof Date + ? table.createdAt.toISOString() + : String(table.createdAt), + updatedAt: + table.updatedAt instanceof Date + ? table.updatedAt.toISOString() + : String(table.updatedAt), + }, + message: 'Table created successfully', + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json( + { error: 'Validation error', details: error.errors }, + { status: 400 } + ) + } + + if (error instanceof Error) { + if (error.message.includes('maximum table limit')) { + return NextResponse.json({ error: error.message }, { status: 403 }) + } + if ( + error.message.includes('Invalid table name') || + error.message.includes('Invalid schema') || + error.message.includes('already exists') + ) { + return NextResponse.json({ error: error.message }, { status: 400 }) + } + } + + logger.error(`[${requestId}] Error creating table:`, error) + return NextResponse.json({ error: 'Failed to create table' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/v1/workflows/[id]/route.ts b/apps/sim/app/api/v1/workflows/[id]/route.ts index 658a0f8ea4..9fe825e622 100644 --- a/apps/sim/app/api/v1/workflows/[id]/route.ts +++ b/apps/sim/app/api/v1/workflows/[id]/route.ts @@ -1,9 +1,11 @@ import { db } from '@sim/db' -import { permissions, workflow, workflowBlocks } from '@sim/db/schema' +import { workflowBlocks } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { eq } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' +import { getActiveWorkflowRecord } from '@/lib/workflows/active-context' import { extractInputFieldsFromBlocks } from '@/lib/workflows/input-format' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta' import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware' @@ -25,39 +27,20 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ logger.info(`[${requestId}] Fetching workflow details for ${id}`, { userId }) - const rows = await db - .select({ - id: workflow.id, - name: workflow.name, - description: workflow.description, - color: workflow.color, - folderId: workflow.folderId, - workspaceId: workflow.workspaceId, - isDeployed: workflow.isDeployed, - deployedAt: workflow.deployedAt, - runCount: workflow.runCount, - lastRunAt: workflow.lastRunAt, - variables: workflow.variables, - createdAt: workflow.createdAt, - updatedAt: workflow.updatedAt, - }) - .from(workflow) - .innerJoin( - permissions, - and( - eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, workflow.workspaceId), - eq(permissions.userId, userId) - ) - ) - .where(eq(workflow.id, id)) - .limit(1) - - const workflowData = rows[0] + const workflowData = await getActiveWorkflowRecord(id) if (!workflowData) { return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) } + const permission = await getUserEntityPermissions( + userId, + 'workspace', + workflowData.workspaceId! + ) + if (!permission) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + const blockRows = await db .select({ id: workflowBlocks.id, diff --git a/apps/sim/app/api/v1/workflows/route.ts b/apps/sim/app/api/v1/workflows/route.ts index 23bb707f15..267650aff1 100644 --- a/apps/sim/app/api/v1/workflows/route.ts +++ b/apps/sim/app/api/v1/workflows/route.ts @@ -1,9 +1,10 @@ import { db } from '@sim/db' -import { permissions, workflow } from '@sim/db/schema' +import { workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, asc, eq, gt, or } from 'drizzle-orm' +import { and, asc, eq, gt, isNull, or } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta' import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware' @@ -69,12 +70,12 @@ export async function GET(request: NextRequest) { }, }) - const conditions = [ - eq(workflow.workspaceId, params.workspaceId), - eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, params.workspaceId), - eq(permissions.userId, userId), - ] + const permission = await getUserEntityPermissions(userId, 'workspace', params.workspaceId) + if (!permission) { + return NextResponse.json({ error: 'Access denied' }, { status: 403 }) + } + + const conditions = [eq(workflow.workspaceId, params.workspaceId), isNull(workflow.archivedAt)] if (params.folderId) { conditions.push(eq(workflow.folderId, params.folderId)) @@ -124,14 +125,6 @@ export async function GET(request: NextRequest) { updatedAt: workflow.updatedAt, }) .from(workflow) - .innerJoin( - permissions, - and( - eq(permissions.entityType, 'workspace'), - eq(permissions.entityId, params.workspaceId), - eq(permissions.userId, userId) - ) - ) .where(and(...conditions)) .orderBy(...orderByClause) .limit(params.limit + 1) diff --git a/apps/sim/app/api/webhooks/[id]/route.ts b/apps/sim/app/api/webhooks/[id]/route.ts index f1f1fbd628..88d8f26e0b 100644 --- a/apps/sim/app/api/webhooks/[id]/route.ts +++ b/apps/sim/app/api/webhooks/[id]/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { webhook, workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' @@ -41,7 +41,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ }) .from(webhook) .innerJoin(workflow, eq(webhook.workflowId, workflow.id)) - .where(eq(webhook.id, id)) + .where(and(eq(webhook.id, id), isNull(webhook.archivedAt))) .limit(1) if (webhooks.length === 0) { @@ -106,7 +106,7 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise< }) .from(webhook) .innerJoin(workflow, eq(webhook.workflowId, workflow.id)) - .where(eq(webhook.id, id)) + .where(and(eq(webhook.id, id), isNull(webhook.archivedAt))) .limit(1) if (webhooks.length === 0) { @@ -204,7 +204,13 @@ export async function DELETE( const allCredentialSetWebhooks = await db .select() .from(webhook) - .where(and(eq(webhook.workflowId, webhookData.workflow.id), eq(webhook.blockId, blockId))) + .where( + and( + eq(webhook.workflowId, webhookData.workflow.id), + eq(webhook.blockId, blockId), + isNull(webhook.archivedAt) + ) + ) const webhooksToDelete = allCredentialSetWebhooks.filter( (w) => w.credentialSetId === credentialSetId diff --git a/apps/sim/app/api/webhooks/agentmail/route.ts b/apps/sim/app/api/webhooks/agentmail/route.ts new file mode 100644 index 0000000000..15ecf2693e --- /dev/null +++ b/apps/sim/app/api/webhooks/agentmail/route.ts @@ -0,0 +1,277 @@ +import { + db, + mothershipInboxAllowedSender, + mothershipInboxTask, + mothershipInboxWebhook, + permissions, + user, + workspace, +} from '@sim/db' +import { createLogger } from '@sim/logger' +import { tasks } from '@trigger.dev/sdk' +import { and, eq, gt, ne, sql } from 'drizzle-orm' +import { NextResponse } from 'next/server' +import { Webhook } from 'svix' +import { v4 as uuidv4 } from 'uuid' +import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags' +import { executeInboxTask } from '@/lib/mothership/inbox/executor' +import type { AgentMailWebhookPayload, RejectionReason } from '@/lib/mothership/inbox/types' + +const logger = createLogger('AgentMailWebhook') + +const AUTOMATED_SENDERS = ['mailer-daemon@', 'noreply@', 'no-reply@', 'postmaster@'] +const MAX_EMAILS_PER_HOUR = 20 + +export async function POST(req: Request) { + try { + const rawBody = await req.text() + const svixId = req.headers.get('svix-id') + const svixTimestamp = req.headers.get('svix-timestamp') + const svixSignature = req.headers.get('svix-signature') + + const payload = JSON.parse(rawBody) as AgentMailWebhookPayload + + if (payload.event_type !== 'message.received') { + return NextResponse.json({ ok: true }) + } + + const { message } = payload + const inboxId = message?.inbox_id + if (!message || !inboxId) { + return NextResponse.json({ ok: true }) + } + + const [result] = await db + .select({ + id: workspace.id, + inboxEnabled: workspace.inboxEnabled, + inboxAddress: workspace.inboxAddress, + inboxProviderId: workspace.inboxProviderId, + webhookSecret: mothershipInboxWebhook.secret, + }) + .from(workspace) + .leftJoin(mothershipInboxWebhook, eq(mothershipInboxWebhook.workspaceId, workspace.id)) + .where(eq(workspace.inboxProviderId, inboxId)) + .limit(1) + + if (!result || !result.webhookSecret) { + if (!result) { + logger.warn('No workspace found for inbox', { inboxId }) + } else { + logger.warn('No webhook secret found for workspace', { workspaceId: result.id }) + } + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + try { + const wh = new Webhook(result.webhookSecret) + wh.verify(rawBody, { + 'svix-id': svixId || '', + 'svix-timestamp': svixTimestamp || '', + 'svix-signature': svixSignature || '', + }) + } catch (verifyErr) { + logger.warn('Webhook signature verification failed', { + workspaceId: result.id, + error: verifyErr instanceof Error ? verifyErr.message : 'Unknown error', + }) + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + if (!result.inboxEnabled) { + logger.info('Inbox disabled, rejecting', { workspaceId: result.id }) + return NextResponse.json({ ok: true }) + } + + const fromEmail = extractSenderEmail(message.from_) || '' + logger.info('Webhook received', { fromEmail, from_raw: message.from_, workspaceId: result.id }) + + if (result.inboxAddress && fromEmail === result.inboxAddress.toLowerCase()) { + logger.info('Skipping email from inbox itself', { workspaceId: result.id }) + return NextResponse.json({ ok: true }) + } + + if (AUTOMATED_SENDERS.some((prefix) => fromEmail.startsWith(prefix))) { + await createRejectedTask(result.id, message, 'automated_sender') + return NextResponse.json({ ok: true }) + } + + const emailMessageId = message.message_id + const inReplyTo = message.in_reply_to || null + + const [existingResult, isAllowed, recentCount, parentTaskResult] = await Promise.all([ + emailMessageId + ? db + .select({ id: mothershipInboxTask.id }) + .from(mothershipInboxTask) + .where(eq(mothershipInboxTask.emailMessageId, emailMessageId)) + .limit(1) + : Promise.resolve([]), + isSenderAllowed(fromEmail, result.id), + getRecentTaskCount(result.id), + inReplyTo + ? db + .select({ chatId: mothershipInboxTask.chatId }) + .from(mothershipInboxTask) + .where(eq(mothershipInboxTask.responseMessageId, inReplyTo)) + .limit(1) + : Promise.resolve([]), + ]) + + if (existingResult[0]) { + logger.info('Duplicate webhook, skipping', { emailMessageId }) + return NextResponse.json({ ok: true }) + } + + if (!isAllowed) { + await createRejectedTask(result.id, message, 'sender_not_allowed') + return NextResponse.json({ ok: true }) + } + + if (recentCount >= MAX_EMAILS_PER_HOUR) { + await createRejectedTask(result.id, message, 'rate_limit_exceeded') + return NextResponse.json({ ok: true }) + } + + const chatId = parentTaskResult[0]?.chatId ?? null + + const fromName = extractDisplayName(message.from_) + + const taskId = uuidv4() + const bodyText = message.text?.substring(0, 50_000) || null + const bodyHtml = message.html?.substring(0, 50_000) || null + const bodyPreview = (bodyText || '')?.substring(0, 200) || null + + await db.insert(mothershipInboxTask).values({ + id: taskId, + workspaceId: result.id, + fromEmail, + fromName, + subject: message.subject || '(no subject)', + bodyPreview, + bodyText, + bodyHtml, + emailMessageId, + inReplyTo, + agentmailMessageId: message.message_id, + status: 'received', + chatId, + hasAttachments: (message.attachments?.length ?? 0) > 0, + ccRecipients: message.cc?.length ? JSON.stringify(message.cc) : null, + }) + + if (isTriggerDevEnabled) { + try { + const handle = await tasks.trigger('mothership-inbox-execution', { taskId }) + await db + .update(mothershipInboxTask) + .set({ triggerJobId: handle.id }) + .where(eq(mothershipInboxTask.id, taskId)) + } catch (triggerError) { + logger.warn('Trigger.dev dispatch failed, falling back to local execution', { + taskId, + triggerError, + }) + executeInboxTask(taskId).catch((err) => { + logger.error('Local inbox task execution failed', { + taskId, + error: err instanceof Error ? err.message : 'Unknown error', + }) + }) + } + } else { + logger.info('Trigger.dev not available, executing inbox task locally', { taskId }) + executeInboxTask(taskId).catch((err) => { + logger.error('Local inbox task execution failed', { + taskId, + error: err instanceof Error ? err.message : 'Unknown error', + }) + }) + } + + return NextResponse.json({ ok: true }) + } catch (error) { + logger.error('AgentMail webhook error', { + error: error instanceof Error ? error.message : 'Unknown error', + }) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + +async function isSenderAllowed(email: string, workspaceId: string): Promise { + const [allowedSenderResult, memberResult] = await Promise.all([ + db + .select({ id: mothershipInboxAllowedSender.id }) + .from(mothershipInboxAllowedSender) + .where( + and( + eq(mothershipInboxAllowedSender.workspaceId, workspaceId), + eq(mothershipInboxAllowedSender.email, email) + ) + ) + .limit(1), + db + .select({ userId: permissions.userId }) + .from(permissions) + .innerJoin(user, eq(permissions.userId, user.id)) + .where( + and( + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, workspaceId), + sql`lower(${user.email}) = ${email}` + ) + ) + .limit(1), + ]) + + return !!(allowedSenderResult[0] || memberResult[0]) +} + +async function getRecentTaskCount(workspaceId: string): Promise { + const oneHourAgo = new Date(Date.now() - 60 * 60 * 1000) + const [result] = await db + .select({ count: sql`count(*)::int` }) + .from(mothershipInboxTask) + .where( + and( + eq(mothershipInboxTask.workspaceId, workspaceId), + gt(mothershipInboxTask.createdAt, oneHourAgo), + ne(mothershipInboxTask.status, 'rejected') + ) + ) + return result?.count ?? 0 +} + +async function createRejectedTask( + workspaceId: string, + message: AgentMailWebhookPayload['message'], + reason: RejectionReason +): Promise { + await db.insert(mothershipInboxTask).values({ + id: uuidv4(), + workspaceId, + fromEmail: extractSenderEmail(message.from_) || 'unknown', + fromName: extractDisplayName(message.from_), + subject: message.subject || '(no subject)', + bodyPreview: (message.text || '').substring(0, 200) || null, + emailMessageId: message.message_id, + agentmailMessageId: message.message_id, + status: 'rejected', + rejectionReason: reason, + hasAttachments: (message.attachments?.length ?? 0) > 0, + }) +} + +/** + * Extract the raw email address from AgentMail's from_ field. + * Format: "username@domain.com" or "Display Name " + */ +function extractSenderEmail(from: string): string { + const match = from.match(/<([^>]+)>/) + return (match?.[1] || from).toLowerCase().trim() +} + +function extractDisplayName(from: string): string | null { + const match = from.match(/^(.+?)\s* { + if (existingWebhook) { + await db + .update(webhook) + .set({ + workflowId: existingWebhook.workflowId, + blockId: existingWebhook.blockId, + path: existingWebhook.path, + provider: existingWebhook.provider, + providerConfig: existingWebhook.providerConfig, + credentialSetId: existingWebhook.credentialSetId, + isActive: existingWebhook.isActive, + archivedAt: existingWebhook.archivedAt, + updatedAt: existingWebhook.updatedAt, + }) + .where(eq(webhook.id, savedWebhook.id)) + logger.info(`[${requestId}] Restored previous webhook configuration after failed re-save`, { + webhookId: savedWebhook.id, + }) + return + } + + await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) +} + // Get all webhooks for the current user export async function GET(request: NextRequest) { const requestId = generateRequestId() @@ -93,6 +123,7 @@ export async function GET(request: NextRequest) { and( eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId), + isNull(webhook.archivedAt), or( eq(webhook.deploymentVersionId, workflowDeploymentVersion.id), and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId)) @@ -132,7 +163,7 @@ export async function GET(request: NextRequest) { }) .from(webhook) .innerJoin(workflow, eq(webhook.workflowId, workflow.id)) - .where(inArray(workflow.workspaceId, workspaceIds)) + .where(and(inArray(workflow.workspaceId, workspaceIds), isNull(webhook.archivedAt))) logger.info(`[${requestId}] Retrieved ${webhooks.length} workspace-accessible webhooks`) return NextResponse.json({ webhooks }, { status: 200 }) @@ -196,6 +227,7 @@ export async function POST(request: NextRequest) { and( eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId), + isNull(webhook.archivedAt), or( eq(webhook.deploymentVersionId, workflowDeploymentVersion.id), and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId)) @@ -275,6 +307,7 @@ export async function POST(request: NextRequest) { and( eq(webhook.workflowId, workflowId), eq(webhook.blockId, blockId), + isNull(webhook.archivedAt), or( eq(webhook.deploymentVersionId, workflowDeploymentVersion.id), and(isNull(workflowDeploymentVersion.id), isNull(webhook.deploymentVersionId)) @@ -290,7 +323,7 @@ export async function POST(request: NextRequest) { const existingByPath = await db .select({ id: webhook.id, workflowId: webhook.workflowId }) .from(webhook) - .where(eq(webhook.path, finalPath)) + .where(and(eq(webhook.path, finalPath), isNull(webhook.archivedAt))) .limit(1) if (existingByPath.length > 0) { // If a webhook with the same path exists but belongs to a different workflow, return an error @@ -306,6 +339,7 @@ export async function POST(request: NextRequest) { } let savedWebhook: any = null + let existingWebhook: any = null const originalProviderConfig = providerConfig || {} let resolvedProviderConfig = await resolveEnvVarsInObject( originalProviderConfig, @@ -380,7 +414,7 @@ export async function POST(request: NextRequest) { const webhookRows = await db .select() .from(webhook) - .where(eq(webhook.id, wh.id)) + .where(and(eq(webhook.id, wh.id), isNull(webhook.archivedAt))) .limit(1) if (webhookRows.length > 0) { @@ -425,7 +459,7 @@ export async function POST(request: NextRequest) { const primaryWebhookRows = await db .select() .from(webhook) - .where(eq(webhook.id, syncResult.webhooks[0].id)) + .where(and(eq(webhook.id, syncResult.webhooks[0].id), isNull(webhook.archivedAt))) .limit(1) return NextResponse.json( @@ -466,26 +500,53 @@ export async function POST(request: NextRequest) { const userProvided = originalProviderConfig as Record const configToSave: Record = { ...userProvided } - try { - const result = await createExternalWebhookSubscription( - request, - createTempWebhookData(), - workflowRecord, - userId, - requestId - ) - const updatedConfig = result.updatedProviderConfig as Record - mergeNonUserFields(configToSave, updatedConfig, userProvided) - resolvedProviderConfig = updatedConfig - externalSubscriptionCreated = result.externalSubscriptionCreated - } catch (err) { - logger.error(`[${requestId}] Error creating external webhook subscription`, err) - return NextResponse.json( - { - error: 'Failed to create external webhook subscription', - details: err instanceof Error ? err.message : 'Unknown error', - }, - { status: 500 } + if (targetWebhookId) { + const existingRows = await db + .select() + .from(webhook) + .where(eq(webhook.id, targetWebhookId)) + .limit(1) + existingWebhook = existingRows[0] || null + } + + const shouldRecreateSubscription = + existingWebhook && + shouldRecreateExternalWebhookSubscription({ + previousProvider: existingWebhook.provider as string, + nextProvider: provider, + previousConfig: ((existingWebhook.providerConfig as Record) || + {}) as Record, + nextConfig: resolvedProviderConfig, + }) + + if (!existingWebhook || shouldRecreateSubscription) { + try { + const result = await createExternalWebhookSubscription( + request, + createTempWebhookData(), + workflowRecord, + userId, + requestId + ) + const updatedConfig = result.updatedProviderConfig as Record + mergeNonUserFields(configToSave, updatedConfig, userProvided) + resolvedProviderConfig = updatedConfig + externalSubscriptionCreated = result.externalSubscriptionCreated + } catch (err) { + logger.error(`[${requestId}] Error creating external webhook subscription`, err) + return NextResponse.json( + { + error: 'Failed to create external webhook subscription', + details: err instanceof Error ? err.message : 'Unknown error', + }, + { status: 500 } + ) + } + } else { + mergeNonUserFields( + configToSave, + (existingWebhook.providerConfig as Record) || {}, + userProvided ) } @@ -556,6 +617,17 @@ export async function POST(request: NextRequest) { throw dbError } + if (existingWebhook && shouldRecreateSubscription) { + try { + await cleanupExternalWebhook(existingWebhook, workflowRecord, requestId) + } catch (cleanupError) { + logger.warn( + `[${requestId}] Failed to cleanup previous external webhook subscription ${existingWebhook.id}`, + cleanupError + ) + } + } + // --- Gmail/Outlook webhook setup (these don't require external subscriptions, configure after DB save) --- if (savedWebhook && provider === 'gmail') { logger.info(`[${requestId}] Gmail provider detected. Setting up Gmail webhook configuration.`) @@ -564,7 +636,7 @@ export async function POST(request: NextRequest) { if (!success) { logger.error(`[${requestId}] Failed to configure Gmail polling, rolling back webhook`) - await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) + await revertSavedWebhook(savedWebhook, existingWebhook, requestId) return NextResponse.json( { error: 'Failed to configure Gmail polling', @@ -580,7 +652,7 @@ export async function POST(request: NextRequest) { `[${requestId}] Error setting up Gmail webhook configuration, rolling back webhook`, err ) - await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) + await revertSavedWebhook(savedWebhook, existingWebhook, requestId) return NextResponse.json( { error: 'Failed to configure Gmail webhook', @@ -602,7 +674,7 @@ export async function POST(request: NextRequest) { if (!success) { logger.error(`[${requestId}] Failed to configure Outlook polling, rolling back webhook`) - await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) + await revertSavedWebhook(savedWebhook, existingWebhook, requestId) return NextResponse.json( { error: 'Failed to configure Outlook polling', @@ -618,7 +690,7 @@ export async function POST(request: NextRequest) { `[${requestId}] Error setting up Outlook webhook configuration, rolling back webhook`, err ) - await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) + await revertSavedWebhook(savedWebhook, existingWebhook, requestId) return NextResponse.json( { error: 'Failed to configure Outlook webhook', @@ -638,7 +710,7 @@ export async function POST(request: NextRequest) { if (!success) { logger.error(`[${requestId}] Failed to configure RSS polling, rolling back webhook`) - await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) + await revertSavedWebhook(savedWebhook, existingWebhook, requestId) return NextResponse.json( { error: 'Failed to configure RSS polling', @@ -654,7 +726,7 @@ export async function POST(request: NextRequest) { `[${requestId}] Error setting up RSS webhook configuration, rolling back webhook`, err ) - await db.delete(webhook).where(eq(webhook.id, savedWebhook.id)) + await revertSavedWebhook(savedWebhook, existingWebhook, requestId) return NextResponse.json( { error: 'Failed to configure RSS webhook', diff --git a/apps/sim/app/api/workflows/[id]/chat/status/route.test.ts b/apps/sim/app/api/workflows/[id]/chat/status/route.test.ts index 3be0cba6e2..48a36ca069 100644 --- a/apps/sim/app/api/workflows/[id]/chat/status/route.test.ts +++ b/apps/sim/app/api/workflows/[id]/chat/status/route.test.ts @@ -23,7 +23,9 @@ const { })) vi.mock('drizzle-orm', () => ({ + and: vi.fn((...args: unknown[]) => ({ type: 'and', args })), eq: vi.fn(), + isNull: vi.fn((field: unknown) => ({ type: 'isNull', field })), })) vi.mock('@sim/db', () => ({ @@ -45,6 +47,7 @@ vi.mock('@sim/db/schema', () => ({ password: 'password', isActive: 'isActive', workflowId: 'workflowId', + archivedAt: 'archivedAt', }, })) diff --git a/apps/sim/app/api/workflows/[id]/chat/status/route.ts b/apps/sim/app/api/workflows/[id]/chat/status/route.ts index ef84667d5d..22d9c7d553 100644 --- a/apps/sim/app/api/workflows/[id]/chat/status/route.ts +++ b/apps/sim/app/api/workflows/[id]/chat/status/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { chat } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import type { NextRequest } from 'next/server' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' @@ -50,7 +50,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ isActive: chat.isActive, }) .from(chat) - .where(eq(chat.workflowId, id)) + .where(and(eq(chat.workflowId, id), isNull(chat.archivedAt))) .limit(1) const isDeployed = deploymentResults.length > 0 && deploymentResults[0].isActive diff --git a/apps/sim/app/api/workflows/[id]/deploy/route.ts b/apps/sim/app/api/workflows/[id]/deploy/route.ts index 1dd8798a3f..5ad2678238 100644 --- a/apps/sim/app/api/workflows/[id]/deploy/route.ts +++ b/apps/sim/app/api/workflows/[id]/deploy/route.ts @@ -11,6 +11,7 @@ import { saveTriggerWebhooksForDeploy, } from '@/lib/webhooks/deploy' import { + activateWorkflowVersionById, deployWorkflow, loadWorkflowFromNormalizedTables, undeployWorkflow, @@ -154,6 +155,27 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ .limit(1) const previousVersionId = currentActiveVersion?.id + const rollbackDeployment = async () => { + if (previousVersionId) { + await restorePreviousVersionWebhooks({ + request, + workflow: workflowData as Record, + userId: actorUserId, + previousVersionId, + requestId, + }) + const reactivateResult = await activateWorkflowVersionById({ + workflowId: id, + deploymentVersionId: previousVersionId, + }) + if (reactivateResult.success) { + return + } + } + + await undeployWorkflow({ workflowId: id }) + } + const deployResult = await deployWorkflow({ workflowId: id, deployedBy: actorUserId, @@ -190,7 +212,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ requestId, deploymentVersionId, }) - await undeployWorkflow({ workflowId: id }) + await rollbackDeployment() return createErrorResponse( triggerSaveResult.error?.message || 'Failed to save trigger configuration', triggerSaveResult.error?.status || 500 @@ -214,16 +236,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{ requestId, deploymentVersionId, }) - if (previousVersionId) { - await restorePreviousVersionWebhooks({ - request, - workflow: workflowData as Record, - userId: actorUserId, - previousVersionId, - requestId, - }) - } - await undeployWorkflow({ workflowId: id }) + await rollbackDeployment() return createErrorResponse(scheduleResult.error || 'Failed to create schedule', 500) } if (scheduleResult.scheduleId) { @@ -364,14 +377,13 @@ export async function DELETE( return createErrorResponse(error.message, error.status) } - // Clean up external webhook subscriptions before undeploying - await cleanupWebhooksForWorkflow(id, workflowData as Record, requestId) - const result = await undeployWorkflow({ workflowId: id }) if (!result.success) { return createErrorResponse(result.error || 'Failed to undeploy workflow', 500) } + await cleanupWebhooksForWorkflow(id, workflowData as Record, requestId) + await removeMcpToolsForWorkflow(id, requestId) logger.info(`[${requestId}] Workflow undeployed successfully: ${id}`) diff --git a/apps/sim/app/api/workflows/[id]/deployments/[version]/revert/route.ts b/apps/sim/app/api/workflows/[id]/deployments/[version]/revert/route.ts index 6050bb4b25..d3762c9181 100644 --- a/apps/sim/app/api/workflows/[id]/deployments/[version]/revert/route.ts +++ b/apps/sim/app/api/workflows/[id]/deployments/[version]/revert/route.ts @@ -5,7 +5,6 @@ import type { NextRequest } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { env } from '@/lib/core/config/env' import { generateRequestId } from '@/lib/core/utils/request' -import { syncMcpToolsForWorkflow } from '@/lib/mcp/workflow-mcp-sync' import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils' import { validateWorkflowPermissions } from '@/lib/workflows/utils' import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils' @@ -91,13 +90,6 @@ export async function POST( .set({ lastSynced: new Date(), updatedAt: new Date() }) .where(eq(workflow.id, id)) - await syncMcpToolsForWorkflow({ - workflowId: id, - requestId, - state: deployedState, - context: 'revert', - }) - try { const socketServerUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002' await fetch(`${socketServerUrl}/api/workflow-reverted`, { diff --git a/apps/sim/app/api/workflows/[id]/duplicate/route.ts b/apps/sim/app/api/workflows/[id]/duplicate/route.ts index ad37410c9d..cc00c0c0b7 100644 --- a/apps/sim/app/api/workflows/[id]/duplicate/route.ts +++ b/apps/sim/app/api/workflows/[id]/duplicate/route.ts @@ -15,6 +15,7 @@ const DuplicateRequestSchema = z.object({ color: z.string().optional(), workspaceId: z.string().optional(), folderId: z.string().nullable().optional(), + newId: z.string().uuid().optional(), }) // POST /api/workflows/[id]/duplicate - Duplicate a workflow with all its blocks, edges, and subflows @@ -32,7 +33,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: try { const body = await req.json() - const { name, description, color, workspaceId, folderId } = DuplicateRequestSchema.parse(body) + const { name, description, color, workspaceId, folderId, newId } = + DuplicateRequestSchema.parse(body) logger.info(`[${requestId}] Duplicating workflow ${sourceWorkflowId} for user ${userId}`) @@ -45,6 +47,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: workspaceId, folderId, requestId, + newWorkflowId: newId, }) try { diff --git a/apps/sim/app/api/workflows/[id]/execute/route.ts b/apps/sim/app/api/workflows/[id]/execute/route.ts index 1200debd41..15d5d22a6f 100644 --- a/apps/sim/app/api/workflows/[id]/execute/route.ts +++ b/apps/sim/app/api/workflows/[id]/execute/route.ts @@ -349,11 +349,55 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: runFromBlock: rawRunFromBlock, } = validation.data + if (isPublicApiAccess && isClientSession) { + return NextResponse.json( + { error: 'Public API callers cannot set isClientSession' }, + { status: 400 } + ) + } + + if (auth.authType === 'api_key') { + if (isClientSession) { + return NextResponse.json( + { error: 'API key callers cannot set isClientSession' }, + { status: 400 } + ) + } + + if (workflowStateOverride) { + return NextResponse.json( + { error: 'API key callers cannot provide workflowStateOverride' }, + { status: 400 } + ) + } + + if (useDraftState) { + return NextResponse.json( + { error: 'API key callers cannot execute draft workflow state' }, + { status: 400 } + ) + } + } + // Resolve runFromBlock snapshot from executionId if needed let resolvedRunFromBlock: | { startBlockId: string; sourceSnapshot: SerializableExecutionState } | undefined if (rawRunFromBlock) { + if (rawRunFromBlock.sourceSnapshot && auth.authType === 'api_key') { + return NextResponse.json( + { error: 'API key callers cannot provide runFromBlock.sourceSnapshot' }, + { status: 400 } + ) + } + + if (rawRunFromBlock.executionId && (auth.authType === 'api_key' || isPublicApiAccess)) { + return NextResponse.json( + { error: 'External callers cannot resume from stored execution snapshots' }, + { status: 400 } + ) + } + if (rawRunFromBlock.sourceSnapshot && !isPublicApiAccess) { // Public API callers cannot inject arbitrary block state via sourceSnapshot. // They must use executionId to resume from a server-stored execution state. @@ -362,13 +406,13 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: sourceSnapshot: rawRunFromBlock.sourceSnapshot as SerializableExecutionState, } } else if (rawRunFromBlock.executionId) { - const { getExecutionState, getLatestExecutionState } = await import( + const { getExecutionStateForWorkflow, getLatestExecutionState } = await import( '@/lib/workflows/executor/execution-state' ) const snapshot = rawRunFromBlock.executionId === 'latest' ? await getLatestExecutionState(workflowId) - : await getExecutionState(rawRunFromBlock.executionId) + : await getExecutionStateForWorkflow(rawRunFromBlock.executionId, workflowId) if (!snapshot) { return NextResponse.json( { @@ -425,6 +469,25 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const enableSSE = streamHeader || streamParam === true const executionModeHeader = req.headers.get('X-Execution-Mode') const isAsyncMode = executionModeHeader === 'async' + const requiresWriteExecutionAccess = Boolean( + useDraftState || workflowStateOverride || rawRunFromBlock + ) + + if ( + isAsyncMode && + (body.useDraftState !== undefined || + body.workflowStateOverride !== undefined || + body.runFromBlock !== undefined || + body.stopAfterBlockId !== undefined || + body.selectedOutputs?.length || + body.includeFileBase64 !== undefined || + body.base64MaxBytes !== undefined) + ) { + return NextResponse.json( + { error: 'Async execution does not support draft or override execution controls' }, + { status: 400 } + ) + } logger.info(`[${requestId}] Starting server-side execution`, { workflowId, @@ -460,7 +523,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const workflowAuthorization = await authorizeWorkflowByWorkspacePermission({ workflowId, userId, - action: shouldUseDraftState ? 'write' : 'read', + action: requiresWriteExecutionAccess ? 'write' : 'read', }) if (!workflowAuthorization.allowed) { return NextResponse.json( @@ -499,6 +562,13 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: } const workspaceId = workflow.workspaceId + if (auth.apiKeyType === 'workspace' && auth.workspaceId !== workspaceId) { + return NextResponse.json( + { error: 'API key is not authorized for this workspace' }, + { status: 403 } + ) + } + logger.info(`[${requestId}] Preprocessing passed`, { workflowId, actorUserId, diff --git a/apps/sim/app/api/workflows/[id]/executions/[executionId]/cancel/route.ts b/apps/sim/app/api/workflows/[id]/executions/[executionId]/cancel/route.ts index 49c99e1ede..842e130eaa 100644 --- a/apps/sim/app/api/workflows/[id]/executions/[executionId]/cancel/route.ts +++ b/apps/sim/app/api/workflows/[id]/executions/[executionId]/cancel/route.ts @@ -33,6 +33,16 @@ export async function POST( ) } + if ( + auth.apiKeyType === 'workspace' && + workflowAuthorization.workflow?.workspaceId !== auth.workspaceId + ) { + return NextResponse.json( + { error: 'API key is not authorized for this workspace' }, + { status: 403 } + ) + } + logger.info('Cancel execution requested', { workflowId, executionId, userId: auth.userId }) const marked = await markExecutionCancelled(executionId) diff --git a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts index 1f77ff391d..745c5b7d44 100644 --- a/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts +++ b/apps/sim/app/api/workflows/[id]/executions/[executionId]/stream/route.ts @@ -46,6 +46,16 @@ export async function GET( ) } + if ( + auth.apiKeyType === 'workspace' && + workflowAuthorization.workflow?.workspaceId !== auth.workspaceId + ) { + return NextResponse.json( + { error: 'API key is not authorized for this workspace' }, + { status: 403 } + ) + } + const meta = await getExecutionMeta(executionId) if (!meta) { return NextResponse.json({ error: 'Execution buffer not found or expired' }, { status: 404 }) diff --git a/apps/sim/app/api/workflows/[id]/restore/route.ts b/apps/sim/app/api/workflows/[id]/restore/route.ts new file mode 100644 index 0000000000..7e8a76e8a3 --- /dev/null +++ b/apps/sim/app/api/workflows/[id]/restore/route.ts @@ -0,0 +1,55 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { restoreWorkflow } from '@/lib/workflows/lifecycle' +import { getWorkflowById } from '@/lib/workflows/utils' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('RestoreWorkflowAPI') + +export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const requestId = generateRequestId() + const { id: workflowId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const workflowData = await getWorkflowById(workflowId, { includeArchived: true }) + if (!workflowData) { + return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) + } + + if (workflowData.workspaceId) { + const permission = await getUserEntityPermissions( + auth.userId, + 'workspace', + workflowData.workspaceId + ) + if (permission !== 'admin' && permission !== 'write') { + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + } else if (workflowData.userId !== auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const result = await restoreWorkflow(workflowId, { requestId }) + + if (!result.restored) { + return NextResponse.json({ error: 'Workflow is not archived' }, { status: 400 }) + } + + logger.info(`[${requestId}] Restored workflow ${workflowId}`) + + return NextResponse.json({ success: true }) + } catch (error) { + logger.error(`[${requestId}] Error restoring workflow ${workflowId}`, error) + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/workflows/[id]/route.test.ts b/apps/sim/app/api/workflows/[id]/route.test.ts index d886e27d46..2000e5093e 100644 --- a/apps/sim/app/api/workflows/[id]/route.test.ts +++ b/apps/sim/app/api/workflows/[id]/route.test.ts @@ -21,7 +21,7 @@ const mockCheckSessionOrInternalAuth = vi.fn() const mockLoadWorkflowFromNormalizedTables = vi.fn() const mockGetWorkflowById = vi.fn() const mockAuthorizeWorkflowByWorkspacePermission = vi.fn() -const mockDbDelete = vi.fn() +const mockArchiveWorkflow = vi.fn() const mockDbUpdate = vi.fn() const mockDbSelect = vi.fn() @@ -72,9 +72,12 @@ vi.mock('@/lib/workflows/utils', () => ({ }) => mockAuthorizeWorkflowByWorkspacePermission(params), })) +vi.mock('@/lib/workflows/lifecycle', () => ({ + archiveWorkflow: (...args: unknown[]) => mockArchiveWorkflow(...args), +})) + vi.mock('@sim/db', () => ({ db: { - delete: () => mockDbDelete(), update: () => mockDbUpdate(), select: () => mockDbSelect(), }, @@ -297,8 +300,9 @@ describe('Workflow By ID API Route', () => { }), }) - mockDbDelete.mockReturnValue({ - where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]), + mockArchiveWorkflow.mockResolvedValue({ + archived: true, + workflow: mockWorkflow, }) setupGlobalFetchMock({ ok: true }) @@ -340,8 +344,9 @@ describe('Workflow By ID API Route', () => { }), }) - mockDbDelete.mockReturnValue({ - where: vi.fn().mockResolvedValue([{ id: 'workflow-123' }]), + mockArchiveWorkflow.mockResolvedValue({ + archived: true, + workflow: mockWorkflow, }) setupGlobalFetchMock({ ok: true }) diff --git a/apps/sim/app/api/workflows/[id]/route.ts b/apps/sim/app/api/workflows/[id]/route.ts index 19d89e8eeb..8b79fe2c28 100644 --- a/apps/sim/app/api/workflows/[id]/route.ts +++ b/apps/sim/app/api/workflows/[id]/route.ts @@ -1,14 +1,13 @@ import { db } from '@sim/db' -import { templates, webhook, workflow } from '@sim/db/schema' +import { templates, workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' import { and, eq, isNull, ne } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { AuthType, checkHybridAuth, checkSessionOrInternalAuth } from '@/lib/auth/hybrid' -import { env } from '@/lib/core/config/env' -import { PlatformEvents } from '@/lib/core/telemetry' import { generateRequestId } from '@/lib/core/utils/request' +import { archiveWorkflow } from '@/lib/workflows/lifecycle' import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils' import { authorizeWorkflowByWorkspacePermission, getWorkflowById } from '@/lib/workflows/utils' @@ -49,6 +48,13 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) } + if (auth.apiKeyType === 'workspace' && auth.workspaceId !== workflowData.workspaceId) { + return NextResponse.json( + { error: 'API key is not authorized for this workspace' }, + { status: 403 } + ) + } + if (isInternalCall && !userId) { // Internal system calls (e.g. workflow-in-workflow executor) may not carry a userId. // These are already authenticated via internal JWT; allow read access. @@ -183,7 +189,7 @@ export async function DELETE( const totalWorkflowsInWorkspace = await db .select({ id: workflow.id }) .from(workflow) - .where(eq(workflow.workspaceId, workflowData.workspaceId)) + .where(and(eq(workflow.workspaceId, workflowData.workspaceId), isNull(workflow.archivedAt))) if (totalWorkflowsInWorkspace.length <= 1) { return NextResponse.json( @@ -241,92 +247,13 @@ export async function DELETE( } } - // Clean up external webhooks before deleting workflow - try { - const { cleanupExternalWebhook } = await import('@/lib/webhooks/provider-subscriptions') - const webhooksToCleanup = await db - .select({ - webhook: webhook, - workflow: { - id: workflow.id, - userId: workflow.userId, - workspaceId: workflow.workspaceId, - }, - }) - .from(webhook) - .innerJoin(workflow, eq(webhook.workflowId, workflow.id)) - .where(eq(webhook.workflowId, workflowId)) - - if (webhooksToCleanup.length > 0) { - logger.info( - `[${requestId}] Found ${webhooksToCleanup.length} webhook(s) to cleanup for workflow ${workflowId}` - ) - - // Clean up each webhook (don't fail if cleanup fails) - for (const webhookData of webhooksToCleanup) { - try { - await cleanupExternalWebhook(webhookData.webhook, webhookData.workflow, requestId) - } catch (cleanupError) { - logger.warn( - `[${requestId}] Failed to cleanup external webhook ${webhookData.webhook.id} during workflow deletion`, - cleanupError - ) - // Continue with deletion even if cleanup fails - } - } - } - } catch (webhookCleanupError) { - logger.warn( - `[${requestId}] Error during webhook cleanup for workflow deletion (continuing with deletion)`, - webhookCleanupError - ) - // Continue with workflow deletion even if webhook cleanup fails - } - - await db.delete(workflow).where(eq(workflow.id, workflowId)) - - try { - PlatformEvents.workflowDeleted({ - workflowId, - workspaceId: workflowData.workspaceId || undefined, - }) - } catch { - // Telemetry should not fail the operation + const archiveResult = await archiveWorkflow(workflowId, { requestId }) + if (!archiveResult.workflow) { + return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) } const elapsed = Date.now() - startTime - logger.info(`[${requestId}] Successfully deleted workflow ${workflowId} in ${elapsed}ms`) - - // Notify Socket.IO system to disconnect users from this workflow's room - // This prevents "Block not found" errors when collaborative updates try to process - // after the workflow has been deleted - try { - const socketUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002' - const socketResponse = await fetch(`${socketUrl}/api/workflow-deleted`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'x-api-key': env.INTERNAL_API_SECRET, - }, - body: JSON.stringify({ workflowId }), - }) - - if (socketResponse.ok) { - logger.info( - `[${requestId}] Notified Socket.IO server about workflow ${workflowId} deletion` - ) - } else { - logger.warn( - `[${requestId}] Failed to notify Socket.IO server about workflow ${workflowId} deletion` - ) - } - } catch (error) { - logger.warn( - `[${requestId}] Error notifying Socket.IO server about workflow ${workflowId} deletion:`, - error - ) - // Don't fail the deletion if Socket.IO notification fails - } + logger.info(`[${requestId}] Successfully archived workflow ${workflowId} in ${elapsed}ms`) recordAudit({ workspaceId: workflowData.workspaceId || null, @@ -337,8 +264,9 @@ export async function DELETE( resourceType: AuditResourceType.WORKFLOW, resourceId: workflowId, resourceName: workflowData.name, - description: `Deleted workflow "${workflowData.name}"`, + description: `Archived workflow "${workflowData.name}"`, metadata: { + archived: archiveResult.archived, deleteTemplates: deleteTemplatesParam === 'delete', }, request, @@ -417,6 +345,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{ const conditions = [ eq(workflow.workspaceId, workflowData.workspaceId), + isNull(workflow.archivedAt), eq(workflow.name, targetName), ne(workflow.id, workflowId), ] diff --git a/apps/sim/app/api/workflows/[id]/state/route.ts b/apps/sim/app/api/workflows/[id]/state/route.ts index 7cca499088..26a63ecdd8 100644 --- a/apps/sim/app/api/workflows/[id]/state/route.ts +++ b/apps/sim/app/api/workflows/[id]/state/route.ts @@ -8,7 +8,10 @@ import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { env } from '@/lib/core/config/env' import { generateRequestId } from '@/lib/core/utils/request' import { extractAndPersistCustomTools } from '@/lib/workflows/persistence/custom-tools-persistence' -import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils' +import { + loadWorkflowFromNormalizedTables, + saveWorkflowToNormalizedTables, +} from '@/lib/workflows/persistence/utils' import { sanitizeAgentToolsInBlocks } from '@/lib/workflows/sanitization/validation' import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils' import { validateEdges } from '@/stores/workflows/workflow/edge-validation' @@ -109,6 +112,49 @@ const WorkflowStateSchema = z.object({ variables: z.any().optional(), // Workflow variables }) +/** + * GET /api/workflows/[id]/state + * Fetch the current workflow state from normalized tables. + * Used by the client after server-side edits (edit_workflow) to stay in sync. + */ +export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workflowId } = await params + + try { + const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!auth.success || !auth.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const authorization = await authorizeWorkflowByWorkspacePermission({ + workflowId, + userId: auth.userId, + action: 'read', + }) + if (!authorization.allowed) { + return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + } + + const normalized = await loadWorkflowFromNormalizedTables(workflowId) + if (!normalized) { + return NextResponse.json({ error: 'Workflow state not found' }, { status: 404 }) + } + + return NextResponse.json({ + blocks: normalized.blocks, + edges: normalized.edges, + loops: normalized.loops || {}, + parallels: normalized.parallels || {}, + }) + } catch (error) { + logger.error('Failed to fetch workflow state', { + workflowId, + error: error instanceof Error ? error.message : String(error), + }) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} + /** * PUT /api/workflows/[id]/state * Save complete workflow state to normalized database tables diff --git a/apps/sim/app/api/workflows/route.test.ts b/apps/sim/app/api/workflows/route.test.ts index bff62acfc8..b1d92ca64e 100644 --- a/apps/sim/app/api/workflows/route.test.ts +++ b/apps/sim/app/api/workflows/route.test.ts @@ -45,6 +45,8 @@ vi.mock('@sim/db/schema', () => ({ id: 'id', folderId: 'folderId', userId: 'userId', + name: 'name', + archivedAt: 'archivedAt', updatedAt: 'updatedAt', workspaceId: 'workspaceId', sortOrder: 'sortOrder', @@ -108,11 +110,16 @@ describe('Workflows API Route - POST ordering', () => { const minResultsQueue: Array> = [ [{ minOrder: 5 }], [{ minOrder: 2 }], + [], ] mockDbSelect.mockImplementation(() => ({ from: vi.fn().mockReturnValue({ - where: vi.fn().mockImplementation(() => Promise.resolve(minResultsQueue.shift() ?? [])), + where: vi.fn().mockImplementation(() => ({ + limit: vi.fn().mockImplementation(() => Promise.resolve(minResultsQueue.shift() ?? [])), + then: (onFulfilled: (value: Array<{ minOrder: number }>) => unknown) => + Promise.resolve(minResultsQueue.shift() ?? []).then(onFulfilled), + })), }), })) @@ -141,11 +148,15 @@ describe('Workflows API Route - POST ordering', () => { }) it('defaults to sortOrder 0 when there are no siblings', async () => { - const minResultsQueue: Array> = [[], []] + const minResultsQueue: Array> = [[], [], []] mockDbSelect.mockImplementation(() => ({ from: vi.fn().mockReturnValue({ - where: vi.fn().mockImplementation(() => Promise.resolve(minResultsQueue.shift() ?? [])), + where: vi.fn().mockImplementation(() => ({ + limit: vi.fn().mockImplementation(() => Promise.resolve(minResultsQueue.shift() ?? [])), + then: (onFulfilled: (value: Array<{ minOrder: number }>) => unknown) => + Promise.resolve(minResultsQueue.shift() ?? []).then(onFulfilled), + })), }), })) diff --git a/apps/sim/app/api/workflows/route.ts b/apps/sim/app/api/workflows/route.ts index 611d808cf6..3181185b75 100644 --- a/apps/sim/app/api/workflows/route.ts +++ b/apps/sim/app/api/workflows/route.ts @@ -1,21 +1,27 @@ import { db } from '@sim/db' import { permissions, workflow, workflowFolder } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, asc, eq, inArray, isNull, min } from 'drizzle-orm' +import { and, asc, eq, inArray, isNull, min, sql } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' import { generateRequestId } from '@/lib/core/utils/request' +import { getNextWorkflowColor } from '@/lib/workflows/colors' +import { listWorkflows, type WorkflowScope } from '@/lib/workflows/utils' import { getUserEntityPermissions, workspaceExists } from '@/lib/workspaces/permissions/utils' import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' const logger = createLogger('WorkflowAPI') const CreateWorkflowSchema = z.object({ + id: z.string().uuid().optional(), name: z.string().min(1, 'Name is required'), description: z.string().optional().default(''), - color: z.string().optional().default('#3972F6'), + color: z + .string() + .optional() + .transform((c) => c || getNextWorkflowColor()), workspaceId: z.string().optional(), folderId: z.string().nullable().optional(), sortOrder: z.number().int().optional(), @@ -27,6 +33,7 @@ export async function GET(request: NextRequest) { const startTime = Date.now() const url = new URL(request.url) const workspaceId = url.searchParams.get('workspaceId') + const scope = (url.searchParams.get('scope') ?? 'active') as WorkflowScope try { const auth = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) @@ -62,16 +69,16 @@ export async function GET(request: NextRequest) { } } + if (!['active', 'archived', 'all'].includes(scope)) { + return NextResponse.json({ error: 'Invalid scope' }, { status: 400 }) + } + let workflows const orderByClause = [asc(workflow.sortOrder), asc(workflow.createdAt), asc(workflow.id)] if (workspaceId) { - workflows = await db - .select() - .from(workflow) - .where(eq(workflow.workspaceId, workspaceId)) - .orderBy(...orderByClause) + workflows = await listWorkflows(workspaceId, { scope }) } else { const workspacePermissionRows = await db .select({ workspaceId: permissions.entityId }) @@ -84,7 +91,16 @@ export async function GET(request: NextRequest) { workflows = await db .select() .from(workflow) - .where(inArray(workflow.workspaceId, workspaceIds)) + .where( + scope === 'all' + ? inArray(workflow.workspaceId, workspaceIds) + : scope === 'archived' + ? and( + inArray(workflow.workspaceId, workspaceIds), + sql`${workflow.archivedAt} IS NOT NULL` + ) + : and(inArray(workflow.workspaceId, workspaceIds), isNull(workflow.archivedAt)) + ) .orderBy(...orderByClause) } @@ -109,6 +125,7 @@ export async function POST(req: NextRequest) { try { const body = await req.json() const { + id: clientId, name, description, color, @@ -140,7 +157,7 @@ export async function POST(req: NextRequest) { ) } - const workflowId = crypto.randomUUID() + const workflowId = clientId || crypto.randomUUID() const now = new Date() logger.info(`[${requestId}] Creating workflow ${workflowId} for user ${userId}`) @@ -173,7 +190,13 @@ export async function POST(req: NextRequest) { db .select({ minOrder: min(workflow.sortOrder) }) .from(workflow) - .where(and(eq(workflow.workspaceId, workspaceId), workflowParentCondition)), + .where( + and( + eq(workflow.workspaceId, workspaceId), + workflowParentCondition, + isNull(workflow.archivedAt) + ) + ), db .select({ minOrder: min(workflowFolder.sortOrder) }) .from(workflowFolder) @@ -191,6 +214,31 @@ export async function POST(req: NextRequest) { sortOrder = minSortOrder != null ? minSortOrder - 1 : 0 } + const duplicateConditions = [ + eq(workflow.workspaceId, workspaceId), + isNull(workflow.archivedAt), + eq(workflow.name, name), + ] + + if (folderId) { + duplicateConditions.push(eq(workflow.folderId, folderId)) + } else { + duplicateConditions.push(isNull(workflow.folderId)) + } + + const [duplicateWorkflow] = await db + .select({ id: workflow.id }) + .from(workflow) + .where(and(...duplicateConditions)) + .limit(1) + + if (duplicateWorkflow) { + return NextResponse.json( + { error: `A workflow named "${name}" already exists in this folder` }, + { status: 409 } + ) + } + await db.insert(workflow).values({ id: workflowId, userId, diff --git a/apps/sim/app/api/workspaces/[id]/api-keys/[keyId]/route.ts b/apps/sim/app/api/workspaces/[id]/api-keys/[keyId]/route.ts index d95daf99ee..bb9a5ff698 100644 --- a/apps/sim/app/api/workspaces/[id]/api-keys/[keyId]/route.ts +++ b/apps/sim/app/api/workspaces/[id]/api-keys/[keyId]/route.ts @@ -32,7 +32,7 @@ export async function PUT( const userId = session.user.id const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) - if (!permission || (permission !== 'admin' && permission !== 'write')) { + if (permission !== 'admin') { return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) } @@ -128,7 +128,7 @@ export async function DELETE( const userId = session.user.id const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) - if (!permission || (permission !== 'admin' && permission !== 'write')) { + if (permission !== 'admin') { return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) } diff --git a/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts b/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts index ab4c9600df..e3f34529a2 100644 --- a/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts +++ b/apps/sim/app/api/workspaces/[id]/byok-keys/route.ts @@ -13,7 +13,21 @@ import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/per const logger = createLogger('WorkspaceBYOKKeysAPI') -const VALID_PROVIDERS = ['openai', 'anthropic', 'google', 'mistral'] as const +const VALID_PROVIDERS = [ + 'openai', + 'anthropic', + 'google', + 'mistral', + 'firecrawl', + 'exa', + 'serper', + 'linkup', + 'perplexity', + 'jina', + 'google_cloud', + 'parallel_ai', + 'brandfetch', +] as const const UpsertKeySchema = z.object({ providerId: z.enum(VALID_PROVIDERS), diff --git a/apps/sim/app/api/workspaces/[id]/files/[fileId]/content/route.ts b/apps/sim/app/api/workspaces/[id]/files/[fileId]/content/route.ts new file mode 100644 index 0000000000..24b5eb56cf --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/files/[fileId]/content/route.ts @@ -0,0 +1,94 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' +import { getSession } from '@/lib/auth' +import { generateRequestId } from '@/lib/core/utils/request' +import { updateWorkspaceFileContent } from '@/lib/uploads/contexts/workspace' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('WorkspaceFileContentAPI') + +/** + * PUT /api/workspaces/[id]/files/[fileId]/content + * Update a workspace file's text content (requires write permission) + */ +export async function PUT( + request: NextRequest, + { params }: { params: Promise<{ id: string; fileId: string }> } +) { + const requestId = generateRequestId() + const { id: workspaceId, fileId } = await params + + try { + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId) + if (userPermission !== 'admin' && userPermission !== 'write') { + logger.warn( + `[${requestId}] User ${session.user.id} lacks write permission for workspace ${workspaceId}` + ) + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + + const body = await request.json() + const { content } = body as { content: string } + + if (typeof content !== 'string') { + return NextResponse.json({ error: 'Content must be a string' }, { status: 400 }) + } + + const buffer = Buffer.from(content, 'utf-8') + + const maxFileSizeBytes = 50 * 1024 * 1024 + if (buffer.length > maxFileSizeBytes) { + return NextResponse.json( + { error: `File size exceeds ${maxFileSizeBytes / 1024 / 1024}MB limit` }, + { status: 413 } + ) + } + + const updatedFile = await updateWorkspaceFileContent( + workspaceId, + fileId, + session.user.id, + buffer + ) + + logger.info(`[${requestId}] Updated content for workspace file: ${updatedFile.name}`) + + recordAudit({ + workspaceId, + actorId: session.user.id, + actorName: session.user.name, + actorEmail: session.user.email, + action: AuditAction.FILE_UPDATED, + resourceType: AuditResourceType.FILE, + resourceId: fileId, + description: `Updated content of file "${updatedFile.name}"`, + request, + }) + + return NextResponse.json({ + success: true, + file: updatedFile, + }) + } catch (error) { + const errorMessage = error instanceof Error ? error.message : 'Failed to update file content' + const isNotFound = errorMessage.includes('File not found') + const isQuotaExceeded = errorMessage.includes('Storage limit exceeded') + const status = isNotFound ? 404 : isQuotaExceeded ? 402 : 500 + + if (status === 500) { + logger.error(`[${requestId}] Error updating file content:`, error) + } else { + logger.warn(`[${requestId}] ${errorMessage}`) + } + + return NextResponse.json({ success: false, error: errorMessage }, { status }) + } +} diff --git a/apps/sim/app/api/workspaces/[id]/files/[fileId]/restore/route.ts b/apps/sim/app/api/workspaces/[id]/files/[fileId]/restore/route.ts new file mode 100644 index 0000000000..eae4bae436 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/files/[fileId]/restore/route.ts @@ -0,0 +1,40 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { getSession } from '@/lib/auth' +import { generateRequestId } from '@/lib/core/utils/request' +import { restoreWorkspaceFile } from '@/lib/uploads/contexts/workspace' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('RestoreWorkspaceFileAPI') + +export async function POST( + request: NextRequest, + { params }: { params: Promise<{ id: string; fileId: string }> } +) { + const requestId = generateRequestId() + const { id: workspaceId, fileId } = await params + + try { + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId) + if (userPermission !== 'admin' && userPermission !== 'write') { + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + + await restoreWorkspaceFile(workspaceId, fileId) + + logger.info(`[${requestId}] Restored workspace file ${fileId}`) + + return NextResponse.json({ success: true }) + } catch (error) { + logger.error(`[${requestId}] Error restoring workspace file ${fileId}`, error) + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Internal server error' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/workspaces/[id]/files/[fileId]/route.ts b/apps/sim/app/api/workspaces/[id]/files/[fileId]/route.ts index 80c91a2adf..c440618863 100644 --- a/apps/sim/app/api/workspaces/[id]/files/[fileId]/route.ts +++ b/apps/sim/app/api/workspaces/[id]/files/[fileId]/route.ts @@ -3,16 +3,84 @@ import { type NextRequest, NextResponse } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' -import { deleteWorkspaceFile } from '@/lib/uploads/contexts/workspace' +import { + deleteWorkspaceFile, + FileConflictError, + renameWorkspaceFile, +} from '@/lib/uploads/contexts/workspace' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' export const dynamic = 'force-dynamic' const logger = createLogger('WorkspaceFileAPI') +/** + * PATCH /api/workspaces/[id]/files/[fileId] + * Rename a workspace file (requires write permission) + */ +export async function PATCH( + request: NextRequest, + { params }: { params: Promise<{ id: string; fileId: string }> } +) { + const requestId = generateRequestId() + const { id: workspaceId, fileId } = await params + + try { + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const userPermission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId) + if (userPermission !== 'admin' && userPermission !== 'write') { + logger.warn( + `[${requestId}] User ${session.user.id} lacks write permission for workspace ${workspaceId}` + ) + return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) + } + + const body = await request.json() + const { name } = body + + if (!name || typeof name !== 'string' || !name.trim()) { + return NextResponse.json({ error: 'Name is required' }, { status: 400 }) + } + + const updatedFile = await renameWorkspaceFile(workspaceId, fileId, name) + + logger.info(`[${requestId}] Renamed workspace file: ${fileId} to "${updatedFile.name}"`) + + recordAudit({ + workspaceId, + actorId: session.user.id, + actorName: session.user.name, + actorEmail: session.user.email, + action: AuditAction.FILE_UPDATED, + resourceType: AuditResourceType.FILE, + resourceId: fileId, + description: `Renamed file to "${updatedFile.name}"`, + request, + }) + + return NextResponse.json({ + success: true, + file: updatedFile, + }) + } catch (error) { + logger.error(`[${requestId}] Error renaming workspace file:`, error) + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Failed to rename file', + }, + { status: error instanceof FileConflictError ? 409 : 500 } + ) + } +} + /** * DELETE /api/workspaces/[id]/files/[fileId] - * Delete a workspace file (requires write permission) + * Archive a workspace file (requires write permission) */ export async function DELETE( request: NextRequest, @@ -38,7 +106,7 @@ export async function DELETE( await deleteWorkspaceFile(workspaceId, fileId) - logger.info(`[${requestId}] Deleted workspace file: ${fileId}`) + logger.info(`[${requestId}] Archived workspace file: ${fileId}`) recordAudit({ workspaceId, @@ -48,7 +116,7 @@ export async function DELETE( action: AuditAction.FILE_DELETED, resourceType: AuditResourceType.FILE, resourceId: fileId, - description: `Deleted file "${fileId}"`, + description: `Archived file "${fileId}"`, request, }) diff --git a/apps/sim/app/api/workspaces/[id]/files/route.ts b/apps/sim/app/api/workspaces/[id]/files/route.ts index a62575dce0..d6ceb728e0 100644 --- a/apps/sim/app/api/workspaces/[id]/files/route.ts +++ b/apps/sim/app/api/workspaces/[id]/files/route.ts @@ -3,7 +3,11 @@ import { type NextRequest, NextResponse } from 'next/server' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' -import { listWorkspaceFiles, uploadWorkspaceFile } from '@/lib/uploads/contexts/workspace' +import { + listWorkspaceFiles, + uploadWorkspaceFile, + type WorkspaceFileScope, +} from '@/lib/uploads/contexts/workspace' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' @@ -34,7 +38,12 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ return NextResponse.json({ error: 'Insufficient permissions' }, { status: 403 }) } - const files = await listWorkspaceFiles(workspaceId) + const scope = (new URL(request.url).searchParams.get('scope') ?? 'active') as WorkspaceFileScope + if (!['active', 'archived', 'all'].includes(scope)) { + return NextResponse.json({ error: 'Invalid scope' }, { status: 400 }) + } + + const files = await listWorkspaceFiles(workspaceId, { scope }) logger.info(`[${requestId}] Listed ${files.length} files for workspace ${workspaceId}`) diff --git a/apps/sim/app/api/workspaces/[id]/inbox/route.ts b/apps/sim/app/api/workspaces/[id]/inbox/route.ts new file mode 100644 index 0000000000..3e64cf3417 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/inbox/route.ts @@ -0,0 +1,140 @@ +import { db, mothershipInboxTask, workspace } from '@sim/db' +import { createLogger } from '@sim/logger' +import { eq, sql } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { hasInboxAccess } from '@/lib/billing/core/subscription' +import { disableInbox, enableInbox, updateInboxAddress } from '@/lib/mothership/inbox/lifecycle' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('InboxConfigAPI') + +const patchSchema = z.object({ + enabled: z.boolean().optional(), + username: z.string().min(1).max(64).optional(), +}) + +export async function GET(_req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workspaceId } = await params + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [hasAccess, permission] = await Promise.all([ + hasInboxAccess(session.user.id), + getUserEntityPermissions(session.user.id, 'workspace', workspaceId), + ]) + if (!hasAccess) { + return NextResponse.json({ error: 'Sim Mailer requires a Max plan' }, { status: 403 }) + } + if (!permission) { + return NextResponse.json({ error: 'Not found' }, { status: 404 }) + } + + const [wsResult, statsResult] = await Promise.all([ + db + .select({ + inboxEnabled: workspace.inboxEnabled, + inboxAddress: workspace.inboxAddress, + }) + .from(workspace) + .where(eq(workspace.id, workspaceId)) + .limit(1), + db + .select({ + status: mothershipInboxTask.status, + count: sql`count(*)::int`, + }) + .from(mothershipInboxTask) + .where(eq(mothershipInboxTask.workspaceId, workspaceId)) + .groupBy(mothershipInboxTask.status), + ]) + + const [ws] = wsResult + if (!ws) { + return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) + } + + const stats = { + total: 0, + completed: 0, + processing: 0, + failed: 0, + } + for (const row of statsResult) { + const count = Number(row.count) + stats.total += count + if (row.status === 'completed') stats.completed = count + else if (row.status === 'processing') stats.processing = count + else if (row.status === 'failed') stats.failed = count + } + + return NextResponse.json({ + enabled: ws.inboxEnabled, + address: ws.inboxAddress, + taskStats: stats, + }) +} + +export async function PATCH(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workspaceId } = await params + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [hasAccess, permission] = await Promise.all([ + hasInboxAccess(session.user.id), + getUserEntityPermissions(session.user.id, 'workspace', workspaceId), + ]) + if (!hasAccess) { + return NextResponse.json({ error: 'Sim Mailer requires a Max plan' }, { status: 403 }) + } + if (permission !== 'admin') { + return NextResponse.json({ error: 'Admin access required' }, { status: 403 }) + } + + try { + const body = patchSchema.parse(await req.json()) + + if (body.enabled === true) { + const [current] = await db + .select({ inboxEnabled: workspace.inboxEnabled }) + .from(workspace) + .where(eq(workspace.id, workspaceId)) + .limit(1) + if (current?.inboxEnabled) { + return NextResponse.json({ error: 'Inbox is already enabled' }, { status: 409 }) + } + const config = await enableInbox(workspaceId, { username: body.username }) + return NextResponse.json(config) + } + + if (body.enabled === false) { + await disableInbox(workspaceId) + return NextResponse.json({ enabled: false, address: null }) + } + + if (body.username) { + const config = await updateInboxAddress(workspaceId, body.username) + return NextResponse.json(config) + } + + return NextResponse.json({ error: 'No valid update provided' }, { status: 400 }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: 'Invalid request', details: error.errors }, { status: 400 }) + } + + logger.error('Inbox config update failed', { + workspaceId, + error: error instanceof Error ? error.message : 'Unknown error', + }) + return NextResponse.json( + { error: error instanceof Error ? error.message : 'Failed to update inbox' }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/workspaces/[id]/inbox/senders/route.ts b/apps/sim/app/api/workspaces/[id]/inbox/senders/route.ts new file mode 100644 index 0000000000..3b48f75db0 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/inbox/senders/route.ts @@ -0,0 +1,172 @@ +import { db, mothershipInboxAllowedSender, permissions, user } from '@sim/db' +import { createLogger } from '@sim/logger' +import { and, eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { v4 as uuidv4 } from 'uuid' +import { z } from 'zod' +import { getSession } from '@/lib/auth' +import { hasInboxAccess } from '@/lib/billing/core/subscription' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('InboxSendersAPI') + +const addSenderSchema = z.object({ + email: z.string().email('Invalid email address'), + label: z.string().max(100).optional(), +}) + +const deleteSenderSchema = z.object({ + senderId: z.string().min(1), +}) + +export async function GET(_req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workspaceId } = await params + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [hasAccess, permission] = await Promise.all([ + hasInboxAccess(session.user.id), + getUserEntityPermissions(session.user.id, 'workspace', workspaceId), + ]) + if (!hasAccess) { + return NextResponse.json({ error: 'Sim Mailer requires a Max plan' }, { status: 403 }) + } + if (!permission) { + return NextResponse.json({ error: 'Not found' }, { status: 404 }) + } + + const [senders, members] = await Promise.all([ + db + .select({ + id: mothershipInboxAllowedSender.id, + email: mothershipInboxAllowedSender.email, + label: mothershipInboxAllowedSender.label, + createdAt: mothershipInboxAllowedSender.createdAt, + }) + .from(mothershipInboxAllowedSender) + .where(eq(mothershipInboxAllowedSender.workspaceId, workspaceId)) + .orderBy(mothershipInboxAllowedSender.createdAt), + db + .select({ + email: user.email, + name: user.name, + }) + .from(permissions) + .innerJoin(user, eq(permissions.userId, user.id)) + .where(and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspaceId))), + ]) + + return NextResponse.json({ + senders: senders.map((s) => ({ + id: s.id, + email: s.email, + label: s.label, + createdAt: s.createdAt, + })), + workspaceMembers: members.map((m) => ({ + email: m.email, + name: m.name, + isAutoAllowed: true, + })), + }) +} + +export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workspaceId } = await params + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [hasAccess, permission] = await Promise.all([ + hasInboxAccess(session.user.id), + getUserEntityPermissions(session.user.id, 'workspace', workspaceId), + ]) + if (!hasAccess) { + return NextResponse.json({ error: 'Sim Mailer requires a Max plan' }, { status: 403 }) + } + if (permission !== 'admin') { + return NextResponse.json({ error: 'Admin access required' }, { status: 403 }) + } + + try { + const { email, label } = addSenderSchema.parse(await req.json()) + const normalizedEmail = email.toLowerCase() + + const [existing] = await db + .select({ id: mothershipInboxAllowedSender.id }) + .from(mothershipInboxAllowedSender) + .where( + and( + eq(mothershipInboxAllowedSender.workspaceId, workspaceId), + eq(mothershipInboxAllowedSender.email, normalizedEmail) + ) + ) + .limit(1) + + if (existing) { + return NextResponse.json({ error: 'Sender already exists' }, { status: 409 }) + } + + const [sender] = await db + .insert(mothershipInboxAllowedSender) + .values({ + id: uuidv4(), + workspaceId, + email: normalizedEmail, + label: label || null, + addedBy: session.user.id, + }) + .returning() + + return NextResponse.json({ sender }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: 'Invalid request', details: error.errors }, { status: 400 }) + } + logger.error('Failed to add sender', { workspaceId, error }) + return NextResponse.json({ error: 'Failed to add sender' }, { status: 500 }) + } +} + +export async function DELETE(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workspaceId } = await params + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [hasAccess, permission] = await Promise.all([ + hasInboxAccess(session.user.id), + getUserEntityPermissions(session.user.id, 'workspace', workspaceId), + ]) + if (!hasAccess) { + return NextResponse.json({ error: 'Sim Mailer requires a Max plan' }, { status: 403 }) + } + if (permission !== 'admin') { + return NextResponse.json({ error: 'Admin access required' }, { status: 403 }) + } + + try { + const { senderId } = deleteSenderSchema.parse(await req.json()) + + await db + .delete(mothershipInboxAllowedSender) + .where( + and( + eq(mothershipInboxAllowedSender.id, senderId), + eq(mothershipInboxAllowedSender.workspaceId, workspaceId) + ) + ) + + return NextResponse.json({ ok: true }) + } catch (error) { + if (error instanceof z.ZodError) { + return NextResponse.json({ error: 'Invalid request', details: error.errors }, { status: 400 }) + } + logger.error('Failed to delete sender', { workspaceId, error }) + return NextResponse.json({ error: 'Failed to delete sender' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/workspaces/[id]/inbox/tasks/route.ts b/apps/sim/app/api/workspaces/[id]/inbox/tasks/route.ts new file mode 100644 index 0000000000..8deb40cb67 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/inbox/tasks/route.ts @@ -0,0 +1,88 @@ +import { db, mothershipInboxTask } from '@sim/db' +import { createLogger } from '@sim/logger' +import { and, desc, eq, lt } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { getSession } from '@/lib/auth' +import { hasInboxAccess } from '@/lib/billing/core/subscription' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('InboxTasksAPI') + +export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) { + const { id: workspaceId } = await params + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + + const [hasAccess, permission] = await Promise.all([ + hasInboxAccess(session.user.id), + getUserEntityPermissions(session.user.id, 'workspace', workspaceId), + ]) + if (!hasAccess) { + return NextResponse.json({ error: 'Sim Mailer requires a Max plan' }, { status: 403 }) + } + if (!permission) { + return NextResponse.json({ error: 'Not found' }, { status: 404 }) + } + + const url = new URL(req.url) + const status = url.searchParams.get('status') || 'all' + const limit = Math.min(Number(url.searchParams.get('limit') || '20'), 50) + const cursor = url.searchParams.get('cursor') // ISO date string for cursor-based pagination + + const conditions = [eq(mothershipInboxTask.workspaceId, workspaceId)] + + const validStatuses = ['received', 'processing', 'completed', 'failed', 'rejected'] as const + if (status !== 'all') { + if (!validStatuses.includes(status as (typeof validStatuses)[number])) { + return NextResponse.json({ error: 'Invalid status filter' }, { status: 400 }) + } + conditions.push(eq(mothershipInboxTask.status, status)) + } + + if (cursor) { + const cursorDate = new Date(cursor) + if (Number.isNaN(cursorDate.getTime())) { + return NextResponse.json({ error: 'Invalid cursor value' }, { status: 400 }) + } + conditions.push(lt(mothershipInboxTask.createdAt, cursorDate)) + } + + const tasks = await db + .select({ + id: mothershipInboxTask.id, + fromEmail: mothershipInboxTask.fromEmail, + fromName: mothershipInboxTask.fromName, + subject: mothershipInboxTask.subject, + bodyPreview: mothershipInboxTask.bodyPreview, + status: mothershipInboxTask.status, + hasAttachments: mothershipInboxTask.hasAttachments, + resultSummary: mothershipInboxTask.resultSummary, + errorMessage: mothershipInboxTask.errorMessage, + rejectionReason: mothershipInboxTask.rejectionReason, + chatId: mothershipInboxTask.chatId, + createdAt: mothershipInboxTask.createdAt, + completedAt: mothershipInboxTask.completedAt, + }) + .from(mothershipInboxTask) + .where(and(...conditions)) + .orderBy(desc(mothershipInboxTask.createdAt)) + .limit(limit + 1) // Fetch one extra to determine hasMore + + const hasMore = tasks.length > limit + const resultTasks = hasMore ? tasks.slice(0, limit) : tasks + const nextCursor = + hasMore && resultTasks.length > 0 + ? resultTasks[resultTasks.length - 1].createdAt.toISOString() + : null + + return NextResponse.json({ + tasks: resultTasks, + pagination: { + limit, + hasMore, + nextCursor, + }, + }) +} diff --git a/apps/sim/app/api/workspaces/[id]/members/route.ts b/apps/sim/app/api/workspaces/[id]/members/route.ts new file mode 100644 index 0000000000..987946d5a3 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/members/route.ts @@ -0,0 +1,39 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { getSession } from '@/lib/auth' +import { + getUserEntityPermissions, + getWorkspaceMemberProfiles, +} from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('WorkspaceMembersAPI') + +/** + * GET /api/workspaces/[id]/members + * + * Returns lightweight member profiles (id, name, image) for a workspace. + * Intended for UI display (avatars, owner cells) without the overhead of + * full permission data. + */ +export async function GET(_request: NextRequest, { params }: { params: Promise<{ id: string }> }) { + try { + const { id: workspaceId } = await params + const session = await getSession() + + if (!session?.user?.id) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const permission = await getUserEntityPermissions(session.user.id, 'workspace', workspaceId) + if (permission === null) { + return NextResponse.json({ error: 'Workspace not found or access denied' }, { status: 404 }) + } + + const members = await getWorkspaceMemberProfiles(workspaceId) + + return NextResponse.json({ members }) + } catch (error) { + logger.error('Error fetching workspace members:', error) + return NextResponse.json({ error: 'Failed to fetch workspace members' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/workspaces/[id]/notifications/[notificationId]/route.ts b/apps/sim/app/api/workspaces/[id]/notifications/[notificationId]/route.ts index ddc2730018..96acb82811 100644 --- a/apps/sim/app/api/workspaces/[id]/notifications/[notificationId]/route.ts +++ b/apps/sim/app/api/workspaces/[id]/notifications/[notificationId]/route.ts @@ -8,13 +8,12 @@ import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { encryptSecret } from '@/lib/core/security/encryption' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' -import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types' import { MAX_EMAIL_RECIPIENTS, MAX_WORKFLOW_IDS } from '../constants' const logger = createLogger('WorkspaceNotificationAPI') const levelFilterSchema = z.array(z.enum(['info', 'error'])) -const triggerFilterSchema = z.array(z.enum(CORE_TRIGGER_TYPES)) +const triggerFilterSchema = z.array(z.string().min(1)) const alertRuleSchema = z.enum([ 'consecutive_failures', diff --git a/apps/sim/app/api/workspaces/[id]/notifications/route.ts b/apps/sim/app/api/workspaces/[id]/notifications/route.ts index 6fc8f4866c..6c46cef900 100644 --- a/apps/sim/app/api/workspaces/[id]/notifications/route.ts +++ b/apps/sim/app/api/workspaces/[id]/notifications/route.ts @@ -9,14 +9,13 @@ import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' import { encryptSecret } from '@/lib/core/security/encryption' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' -import { CORE_TRIGGER_TYPES } from '@/stores/logs/filters/types' import { MAX_EMAIL_RECIPIENTS, MAX_NOTIFICATIONS_PER_TYPE, MAX_WORKFLOW_IDS } from './constants' const logger = createLogger('WorkspaceNotificationsAPI') const notificationTypeSchema = z.enum(['webhook', 'email', 'slack']) const levelFilterSchema = z.array(z.enum(['info', 'error'])) -const triggerFilterSchema = z.array(z.enum(CORE_TRIGGER_TYPES)) +const triggerFilterSchema = z.array(z.string().min(1)) const alertRuleSchema = z.enum([ 'consecutive_failures', @@ -82,7 +81,7 @@ const createNotificationSchema = z workflowIds: z.array(z.string()).max(MAX_WORKFLOW_IDS).default([]), allWorkflows: z.boolean().default(false), levelFilter: levelFilterSchema.default(['info', 'error']), - triggerFilter: triggerFilterSchema.default([...CORE_TRIGGER_TYPES]), + triggerFilter: triggerFilterSchema.default([]), includeFinalOutput: z.boolean().default(false), includeTraceSpans: z.boolean().default(false), includeRateLimits: z.boolean().default(false), diff --git a/apps/sim/app/api/workspaces/[id]/route.ts b/apps/sim/app/api/workspaces/[id]/route.ts index 503773be0d..cf2ed3826d 100644 --- a/apps/sim/app/api/workspaces/[id]/route.ts +++ b/apps/sim/app/api/workspaces/[id]/route.ts @@ -1,19 +1,24 @@ import { workflow } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq, inArray } from 'drizzle-orm' +import { and, eq, inArray, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' import { getSession } from '@/lib/auth' +import { archiveWorkspace } from '@/lib/workspaces/lifecycle' const logger = createLogger('WorkspaceByIdAPI') import { db } from '@sim/db' -import { knowledgeBase, permissions, templates, workspace } from '@sim/db/schema' +import { permissions, templates, workspace } from '@sim/db/schema' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const patchWorkspaceSchema = z.object({ name: z.string().trim().min(1).optional(), + color: z + .string() + .regex(/^#[0-9a-fA-F]{6}$/) + .optional(), billedAccountUserId: z.string().optional(), allowPersonalApiKeys: z.boolean().optional(), }) @@ -80,7 +85,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{ const workspaceDetails = await db .select() .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .then((rows) => rows[0]) if (!workspaceDetails) { @@ -113,10 +118,11 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise< try { const body = patchWorkspaceSchema.parse(await request.json()) - const { name, billedAccountUserId, allowPersonalApiKeys } = body + const { name, color, billedAccountUserId, allowPersonalApiKeys } = body if ( name === undefined && + color === undefined && billedAccountUserId === undefined && allowPersonalApiKeys === undefined ) { @@ -126,7 +132,7 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise< const existingWorkspace = await db .select() .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .then((rows) => rows[0]) if (!existingWorkspace) { @@ -139,6 +145,10 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise< updateData.name = name } + if (color !== undefined) { + updateData.color = color + } + if (allowPersonalApiKeys !== undefined) { updateData.allowPersonalApiKeys = Boolean(allowPersonalApiKeys) } @@ -233,67 +243,37 @@ export async function DELETE( const [workspaceRecord] = await db .select({ name: workspace.name }) .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .limit(1) - // Delete workspace and all related data in a transaction - let workspaceWorkflowCount = 0 - await db.transaction(async (tx) => { - // Get all workflows in this workspace before deletion - const workspaceWorkflows = await tx - .select({ id: workflow.id }) - .from(workflow) - .where(eq(workflow.workspaceId, workspaceId)) - - workspaceWorkflowCount = workspaceWorkflows.length - - if (workspaceWorkflows.length > 0) { - const workflowIds = workspaceWorkflows.map((w) => w.id) - - // Handle templates based on user choice - if (deleteTemplates) { - // Delete published templates that reference these workflows - await tx.delete(templates).where(inArray(templates.workflowId, workflowIds)) - logger.info(`Deleted templates for workflows in workspace ${workspaceId}`) - } else { - // Set workflowId to null for templates to create "orphaned" templates - // This allows templates to remain without source workflows - await tx - .update(templates) - .set({ workflowId: null }) - .where(inArray(templates.workflowId, workflowIds)) - logger.info( - `Updated templates to orphaned status for workflows in workspace ${workspaceId}` - ) - } + const workspaceWorkflows = await db + .select({ id: workflow.id }) + .from(workflow) + .where(eq(workflow.workspaceId, workspaceId)) + + const workflowIds = workspaceWorkflows.map((entry) => entry.id) + + if (workflowIds.length > 0) { + if (deleteTemplates) { + await db.delete(templates).where(inArray(templates.workflowId, workflowIds)) + } else { + await db + .update(templates) + .set({ workflowId: null }) + .where(inArray(templates.workflowId, workflowIds)) } + } - // Delete all workflows in the workspace - database cascade will handle all workflow-related data - // The database cascade will handle deleting related workflow_blocks, workflow_edges, workflow_subflows, - // workflow_logs, workflow_execution_snapshots, workflow_execution_logs, workflow_execution_trace_spans, - // workflow_schedule, webhook, chat, and memory records - await tx.delete(workflow).where(eq(workflow.workspaceId, workspaceId)) - - // Clear workspace ID from knowledge bases instead of deleting them - // This allows knowledge bases to become "unassigned" rather than being deleted - await tx - .update(knowledgeBase) - .set({ workspaceId: null, updatedAt: new Date() }) - .where(eq(knowledgeBase.workspaceId, workspaceId)) - - // Delete all permissions associated with this workspace - await tx - .delete(permissions) - .where(and(eq(permissions.entityType, 'workspace'), eq(permissions.entityId, workspaceId))) - - // Delete the workspace itself - await tx.delete(workspace).where(eq(workspace.id, workspaceId)) - - logger.info(`Successfully deleted workspace ${workspaceId} and all related data`) + const archiveResult = await archiveWorkspace(workspaceId, { + requestId: `workspace-${workspaceId}`, }) + if (!archiveResult.archived && !workspaceRecord) { + return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) + } + recordAudit({ - workspaceId: null, + workspaceId, actorId: session.user.id, actorName: session.user.name, actorEmail: session.user.email, @@ -301,11 +281,12 @@ export async function DELETE( resourceType: AuditResourceType.WORKSPACE, resourceId: workspaceId, resourceName: workspaceRecord?.name, - description: `Deleted workspace "${workspaceRecord?.name || workspaceId}"`, + description: `Archived workspace "${workspaceRecord?.name || workspaceId}"`, metadata: { affected: { - workflows: workspaceWorkflowCount, + workflows: workflowIds.length, }, + archived: archiveResult.archived, deleteTemplates, }, request, diff --git a/apps/sim/app/api/workspaces/invitations/[invitationId]/route.test.ts b/apps/sim/app/api/workspaces/invitations/[invitationId]/route.test.ts index 65f80c1779..3df5bd7688 100644 --- a/apps/sim/app/api/workspaces/invitations/[invitationId]/route.test.ts +++ b/apps/sim/app/api/workspaces/invitations/[invitationId]/route.test.ts @@ -4,6 +4,7 @@ import { beforeEach, describe, expect, it, vi } from 'vitest' const mockGetSession = vi.fn() const mockHasWorkspaceAdminAccess = vi.fn() +const mockGetWorkspaceById = vi.fn() let dbSelectResults: any[] = [] let dbSelectCallIndex = 0 @@ -63,6 +64,7 @@ vi.mock('@/lib/auth', () => ({ vi.mock('@/lib/workspaces/permissions/utils', () => ({ hasWorkspaceAdminAccess: (userId: string, workspaceId: string) => mockHasWorkspaceAdminAccess(userId, workspaceId), + getWorkspaceById: (id: string) => mockGetWorkspaceById(id), })) vi.mock('@/lib/credentials/environment', () => ({ @@ -120,8 +122,9 @@ vi.mock('@sim/db/schema', () => ({ })) vi.mock('drizzle-orm', () => ({ - eq: vi.fn((a, b) => ({ type: 'eq', a, b })), - and: vi.fn((...args) => ({ type: 'and', args })), + eq: vi.fn((a: unknown, b: unknown) => ({ type: 'eq', a, b })), + and: vi.fn((...args: unknown[]) => ({ type: 'and', args })), + isNull: vi.fn((field: unknown) => ({ type: 'isNull', field })), })) vi.mock('crypto', () => ({ @@ -164,6 +167,7 @@ describe('Workspace Invitation [invitationId] API Route', () => { vi.clearAllMocks() dbSelectResults = [] dbSelectCallIndex = 0 + mockGetWorkspaceById.mockResolvedValue({ id: 'workspace-456', name: 'Test Workspace' }) }) describe('GET /api/workspaces/invitations/[invitationId]', () => { @@ -240,7 +244,9 @@ describe('Workspace Invitation [invitationId] API Route', () => { const response = await GET(request, { params }) expect(response.status).toBe(307) - expect(response.headers.get('location')).toBe('https://test.sim.ai/workspace/workspace-456/w') + expect(response.headers.get('location')).toBe( + 'https://test.sim.ai/workspace/workspace-456/home' + ) }) it('should redirect to error page with token preserved when invitation expired', async () => { @@ -495,7 +501,7 @@ describe('Workspace Invitation [invitationId] API Route', () => { expect(response2.status).toBe(307) expect(response2.headers.get('location')).toBe( - 'https://test.sim.ai/workspace/workspace-456/w' + 'https://test.sim.ai/workspace/workspace-456/home' ) }) }) diff --git a/apps/sim/app/api/workspaces/invitations/[invitationId]/route.ts b/apps/sim/app/api/workspaces/invitations/[invitationId]/route.ts index fac6b6f6da..723b2954de 100644 --- a/apps/sim/app/api/workspaces/invitations/[invitationId]/route.ts +++ b/apps/sim/app/api/workspaces/invitations/[invitationId]/route.ts @@ -10,7 +10,7 @@ import { workspaceInvitation, } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { WorkspaceInvitationEmail } from '@/components/emails' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' @@ -19,7 +19,7 @@ import { getBaseUrl } from '@/lib/core/utils/urls' import { syncWorkspaceEnvCredentials } from '@/lib/credentials/environment' import { sendEmail } from '@/lib/messaging/email/mailer' import { getFromEmailAddress } from '@/lib/messaging/email/utils' -import { hasWorkspaceAdminAccess } from '@/lib/workspaces/permissions/utils' +import { getWorkspaceById, hasWorkspaceAdminAccess } from '@/lib/workspaces/permissions/utils' const logger = createLogger('WorkspaceInvitationAPI') @@ -74,7 +74,7 @@ export async function GET( const workspaceDetails = await db .select() .from(workspace) - .where(eq(workspace.id, invitation.workspaceId)) + .where(and(eq(workspace.id, invitation.workspaceId), isNull(workspace.archivedAt))) .then((rows) => rows[0]) if (!workspaceDetails) { @@ -141,7 +141,7 @@ export async function GET( .where(eq(workspaceInvitation.id, invitation.id)) return NextResponse.redirect( - new URL(`/workspace/${invitation.workspaceId}/w`, getBaseUrl()) + new URL(`/workspace/${invitation.workspaceId}/home`, getBaseUrl()) ) } @@ -193,7 +193,9 @@ export async function GET( request: req, }) - return NextResponse.redirect(new URL(`/workspace/${invitation.workspaceId}/w`, getBaseUrl())) + return NextResponse.redirect( + new URL(`/workspace/${invitation.workspaceId}/home`, getBaseUrl()) + ) } return NextResponse.json({ @@ -235,6 +237,11 @@ export async function DELETE( return NextResponse.json({ error: 'Invitation not found' }, { status: 404 }) } + const activeWorkspace = await getWorkspaceById(invitation.workspaceId) + if (!activeWorkspace) { + return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) + } + const hasAdminAccess = await hasWorkspaceAdminAccess(session.user.id, invitation.workspaceId) if (!hasAdminAccess) { diff --git a/apps/sim/app/api/workspaces/invitations/route.test.ts b/apps/sim/app/api/workspaces/invitations/route.test.ts index 0919385d0f..248e721258 100644 --- a/apps/sim/app/api/workspaces/invitations/route.test.ts +++ b/apps/sim/app/api/workspaces/invitations/route.test.ts @@ -15,6 +15,7 @@ const { mockGetEmailDomain, mockValidateInvitationsAllowed, mockRandomUUID, + mockGetWorkspaceById, } = vi.hoisted(() => { const mockGetSession = vi.fn() const mockInsertValues = vi.fn().mockResolvedValue(undefined) @@ -24,6 +25,7 @@ const { const mockGetEmailDomain = vi.fn().mockReturnValue('sim.ai') const mockValidateInvitationsAllowed = vi.fn().mockResolvedValue(undefined) const mockRandomUUID = vi.fn().mockReturnValue('mock-uuid-1234') + const mockGetWorkspaceById = vi.fn() const mockDbResults: { value: any[] } = { value: [] } @@ -52,6 +54,7 @@ const { mockGetEmailDomain, mockValidateInvitationsAllowed, mockRandomUUID, + mockGetWorkspaceById, } }) @@ -111,6 +114,10 @@ vi.mock('@/lib/core/config/env', async () => { return createEnvMock() }) +vi.mock('@/lib/workspaces/permissions/utils', () => ({ + getWorkspaceById: mockGetWorkspaceById, +})) + vi.mock('@/lib/core/utils/urls', () => ({ getEmailDomain: mockGetEmailDomain, })) @@ -135,6 +142,7 @@ vi.mock('drizzle-orm', () => ({ inArray: vi .fn() .mockImplementation((field: any, values: any) => ({ type: 'inArray', field, values })), + isNull: vi.fn().mockImplementation((field: any) => ({ type: 'isNull', field })), })) vi.mock('@/ee/access-control/utils/permission-check', () => ({ @@ -176,6 +184,7 @@ describe('Workspace Invitations API Route', () => { mockRender.mockResolvedValue('email content') mockGetEmailDomain.mockReturnValue('sim.ai') mockValidateInvitationsAllowed.mockResolvedValue(undefined) + mockGetWorkspaceById.mockResolvedValue({ id: 'workspace-1', name: 'Test Workspace' }) }) describe('GET /api/workspaces/invitations', () => { @@ -291,9 +300,9 @@ describe('Workspace Invitations API Route', () => { it('should return 404 when workspace is not found', async () => { mockGetSession.mockResolvedValue({ user: { id: 'user-123' } }) + mockGetWorkspaceById.mockResolvedValueOnce(null) mockDbResults.value = [ [{ permissionType: 'admin' }], // User has admin permissions - [], // Workspace not found ] const req = createMockRequest('POST', { diff --git a/apps/sim/app/api/workspaces/invitations/route.ts b/apps/sim/app/api/workspaces/invitations/route.ts index 543cc73727..208e0a0e26 100644 --- a/apps/sim/app/api/workspaces/invitations/route.ts +++ b/apps/sim/app/api/workspaces/invitations/route.ts @@ -10,7 +10,7 @@ import { workspaceInvitation, } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq, inArray } from 'drizzle-orm' +import { and, eq, inArray, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { WorkspaceInvitationEmail } from '@/components/emails' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' @@ -19,6 +19,7 @@ import { PlatformEvents } from '@/lib/core/telemetry' import { getBaseUrl } from '@/lib/core/utils/urls' import { sendEmail } from '@/lib/messaging/email/mailer' import { getFromEmailAddress } from '@/lib/messaging/email/utils' +import { getWorkspaceById } from '@/lib/workspaces/permissions/utils' import { InvitationsNotAllowedError, validateInvitationsAllowed, @@ -50,6 +51,7 @@ export async function GET(req: NextRequest) { eq(permissions.userId, session.user.id) ) ) + .where(isNull(workspace.archivedAt)) if (userWorkspaces.length === 0) { return NextResponse.json({ invitations: [] }) @@ -114,10 +116,15 @@ export async function POST(req: NextRequest) { ) } + const activeWorkspace = await getWorkspaceById(workspaceId) + if (!activeWorkspace) { + return NextResponse.json({ error: 'Workspace not found' }, { status: 404 }) + } + const workspaceDetails = await db .select() .from(workspace) - .where(eq(workspace.id, workspaceId)) + .where(and(eq(workspace.id, workspaceId), isNull(workspace.archivedAt))) .then((rows) => rows[0]) if (!workspaceDetails) { diff --git a/apps/sim/app/api/workspaces/route.ts b/apps/sim/app/api/workspaces/route.ts index 79c2c436df..66493a5eb0 100644 --- a/apps/sim/app/api/workspaces/route.ts +++ b/apps/sim/app/api/workspaces/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { permissions, workflow, workspace } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, desc, eq, isNull } from 'drizzle-orm' +import { and, desc, eq, isNull, sql } from 'drizzle-orm' import { NextResponse } from 'next/server' import { z } from 'zod' import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log' @@ -9,22 +9,33 @@ import { getSession } from '@/lib/auth' import { PlatformEvents } from '@/lib/core/telemetry' import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults' import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils' +import { getRandomWorkspaceColor } from '@/lib/workspaces/colors' +import type { WorkspaceScope } from '@/lib/workspaces/utils' const logger = createLogger('Workspaces') const createWorkspaceSchema = z.object({ name: z.string().trim().min(1, 'Name is required'), + color: z + .string() + .regex(/^#[0-9a-fA-F]{6}$/) + .optional(), skipDefaultWorkflow: z.boolean().optional().default(false), }) // Get all workspaces for the current user -export async function GET() { +export async function GET(request: Request) { const session = await getSession() if (!session?.user?.id) { return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) } + const scope = (new URL(request.url).searchParams.get('scope') ?? 'active') as WorkspaceScope + if (!['active', 'archived', 'all'].includes(scope)) { + return NextResponse.json({ error: 'Invalid scope' }, { status: 400 }) + } + const userWorkspaces = await db .select({ workspace: workspace, @@ -32,10 +43,24 @@ export async function GET() { }) .from(permissions) .innerJoin(workspace, eq(permissions.entityId, workspace.id)) - .where(and(eq(permissions.userId, session.user.id), eq(permissions.entityType, 'workspace'))) + .where( + scope === 'all' + ? and(eq(permissions.userId, session.user.id), eq(permissions.entityType, 'workspace')) + : scope === 'archived' + ? and( + eq(permissions.userId, session.user.id), + eq(permissions.entityType, 'workspace'), + sql`${workspace.archivedAt} IS NOT NULL` + ) + : and( + eq(permissions.userId, session.user.id), + eq(permissions.entityType, 'workspace'), + isNull(workspace.archivedAt) + ) + ) .orderBy(desc(workspace.createdAt)) - if (userWorkspaces.length === 0) { + if (scope === 'active' && userWorkspaces.length === 0) { const defaultWorkspace = await createDefaultWorkspace(session.user.id, session.user.name) await migrateExistingWorkflows(session.user.id, defaultWorkspace.id) @@ -43,7 +68,9 @@ export async function GET() { return NextResponse.json({ workspaces: [defaultWorkspace] }) } - await ensureWorkflowsHaveWorkspace(session.user.id, userWorkspaces[0].workspace.id) + if (scope === 'active') { + await ensureWorkflowsHaveWorkspace(session.user.id, userWorkspaces[0].workspace.id) + } const workspacesWithPermissions = userWorkspaces.map( ({ workspace: workspaceDetails, permissionType }) => ({ @@ -65,9 +92,9 @@ export async function POST(req: Request) { } try { - const { name, skipDefaultWorkflow } = createWorkspaceSchema.parse(await req.json()) + const { name, color, skipDefaultWorkflow } = createWorkspaceSchema.parse(await req.json()) - const newWorkspace = await createWorkspace(session.user.id, name, skipDefaultWorkflow) + const newWorkspace = await createWorkspace(session.user.id, name, skipDefaultWorkflow, color) recordAudit({ workspaceId: newWorkspace.id, @@ -96,16 +123,23 @@ async function createDefaultWorkspace(userId: string, userName?: string | null) return createWorkspace(userId, workspaceName) } -async function createWorkspace(userId: string, name: string, skipDefaultWorkflow = false) { +async function createWorkspace( + userId: string, + name: string, + skipDefaultWorkflow = false, + explicitColor?: string +) { const workspaceId = crypto.randomUUID() const workflowId = crypto.randomUUID() const now = new Date() + const color = explicitColor || getRandomWorkspaceColor() try { await db.transaction(async (tx) => { await tx.insert(workspace).values({ id: workspaceId, name, + color, ownerId: userId, billedAccountUserId: userId, allowPersonalApiKeys: true, @@ -174,6 +208,7 @@ async function createWorkspace(userId: string, name: string, skipDefaultWorkflow return { id: workspaceId, name, + color, ownerId: userId, billedAccountUserId: userId, allowPersonalApiKeys: true, diff --git a/apps/sim/app/chat/[identifier]/page.tsx b/apps/sim/app/chat/[identifier]/page.tsx index 9ba983fc5c..a90238b644 100644 --- a/apps/sim/app/chat/[identifier]/page.tsx +++ b/apps/sim/app/chat/[identifier]/page.tsx @@ -1,5 +1,10 @@ +import type { Metadata } from 'next' import ChatClient from '@/app/chat/[identifier]/chat' +export const metadata: Metadata = { + title: 'Chat', +} + export default async function ChatPage({ params }: { params: Promise<{ identifier: string }> }) { const { identifier } = await params return diff --git a/apps/sim/app/chat/components/auth/email/email-auth.tsx b/apps/sim/app/chat/components/auth/email/email-auth.tsx index d6ba3de532..d84b9a8b6b 100644 --- a/apps/sim/app/chat/components/auth/email/email-auth.tsx +++ b/apps/sim/app/chat/components/auth/email/email-auth.tsx @@ -52,7 +52,7 @@ export default function EmailAuth({ identifier, onAuthSuccess }: EmailAuthProps) useEffect(() => { if (countdown > 0) { - const timer = setTimeout(() => setCountdown(countdown - 1), 1000) + const timer = setTimeout(() => setCountdown((c) => c - 1), 1000) return () => clearTimeout(timer) } if (countdown === 0 && isResendDisabled) { diff --git a/apps/sim/app/chat/components/input/input.tsx b/apps/sim/app/chat/components/input/input.tsx index 5e385d7f3a..5c9bfea95b 100644 --- a/apps/sim/app/chat/components/input/input.tsx +++ b/apps/sim/app/chat/components/input/input.tsx @@ -5,6 +5,7 @@ import { useEffect, useRef, useState } from 'react' import { motion } from 'framer-motion' import { Paperclip, Send, Square, X } from 'lucide-react' import { Badge, Tooltip } from '@/components/emcn' +import { CHAT_ACCEPT_ATTRIBUTE } from '@/lib/uploads/utils/validation' import { VoiceInput } from '@/app/chat/components/input/voice-input' const logger = createLogger('ChatInput') @@ -15,6 +16,13 @@ const PLACEHOLDER_MOBILE = 'Enter a message' const PLACEHOLDER_DESKTOP = 'Enter a message or click the mic to speak' const MAX_TEXTAREA_HEIGHT = 120 // Max height in pixels (e.g., for about 3-4 lines) const MAX_TEXTAREA_HEIGHT_MOBILE = 100 // Smaller for mobile +const IS_STT_AVAILABLE = + typeof window !== 'undefined' && + !!( + (window as Window & { SpeechRecognition?: unknown; webkitSpeechRecognition?: unknown }) + .SpeechRecognition || + (window as Window & { webkitSpeechRecognition?: unknown }).webkitSpeechRecognition + ) interface AttachedFile { id: string @@ -42,10 +50,6 @@ export const ChatInput: React.FC<{ const [dragCounter, setDragCounter] = useState(0) const isDragOver = dragCounter > 0 - // Check if speech-to-text is available in the browser - const isSttAvailable = - typeof window !== 'undefined' && !!(window.SpeechRecognition || window.webkitSpeechRecognition) - // Function to adjust textarea height const adjustTextareaHeight = () => { if (textareaRef.current) { @@ -195,7 +199,7 @@ export const ChatInput: React.FC<{
{/* Voice Input Only */} - {isSttAvailable && ( + {IS_STT_AVAILABLE && (
@@ -268,7 +272,7 @@ export const ChatInput: React.FC<{ > {/* File Previews */} {attachedFiles.length > 0 && ( -
+
{attachedFiles.map((file) => { const formatFileSize = (bytes: number) => { if (bytes === 0) return '0 B' @@ -348,7 +352,7 @@ export const ChatInput: React.FC<{ ref={fileInputRef} type='file' multiple - accept='.pdf,.csv,.doc,.docx,.txt,.md,.xlsx,.xls,.html,.htm,.pptx,.ppt,.json,.xml,.rtf,image/*' + accept={CHAT_ACCEPT_ATTRIBUTE} onChange={(e) => { handleFileSelect(e.target.files) if (fileInputRef.current) { @@ -406,7 +410,7 @@ export const ChatInput: React.FC<{
{/* Voice Input */} - {isSttAvailable && ( + {IS_STT_AVAILABLE && (
diff --git a/apps/sim/app/chat/components/input/voice-input.tsx b/apps/sim/app/chat/components/input/voice-input.tsx index e23ece4a29..53a97a583d 100644 --- a/apps/sim/app/chat/components/input/voice-input.tsx +++ b/apps/sim/app/chat/components/input/voice-input.tsx @@ -31,11 +31,9 @@ interface SpeechRecognitionStatic { new (): SpeechRecognition } -declare global { - interface Window { - SpeechRecognition?: SpeechRecognitionStatic - webkitSpeechRecognition?: SpeechRecognitionStatic - } +type WindowWithSpeech = Window & { + SpeechRecognition?: SpeechRecognitionStatic + webkitSpeechRecognition?: SpeechRecognitionStatic } interface VoiceInputProps { @@ -57,8 +55,9 @@ export function VoiceInput({ // Check if speech recognition is supported useEffect(() => { - const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition - setIsSupported(!!SpeechRecognition) + const w = window as WindowWithSpeech + const SpeechRecognitionCtor = w.SpeechRecognition || w.webkitSpeechRecognition + setIsSupported(!!SpeechRecognitionCtor) }, []) const handleVoiceClick = useCallback(() => { diff --git a/apps/sim/app/chat/components/message/message.tsx b/apps/sim/app/chat/components/message/message.tsx index 7a8f4546d4..a9186ae020 100644 --- a/apps/sim/app/chat/components/message/message.tsx +++ b/apps/sim/app/chat/components/message/message.tsx @@ -1,6 +1,6 @@ 'use client' -import { memo, useMemo, useState } from 'react' +import { memo, useState } from 'react' import { Check, Copy, File as FileIcon, FileText, Image as ImageIcon } from 'lucide-react' import { Tooltip } from '@/components/emcn' import { @@ -8,6 +8,7 @@ import { ChatFileDownloadAll, } from '@/app/chat/components/message/components/file-download' import MarkdownRenderer from '@/app/chat/components/message/components/markdown-renderer' +import { useThrottledValue } from '@/hooks/use-throttled-value' export interface ChatAttachment { id: string @@ -39,16 +40,15 @@ export interface ChatMessage { } function EnhancedMarkdownRenderer({ content }: { content: string }) { - return + const throttled = useThrottledValue(content) + return } export const ClientChatMessage = memo( function ClientChatMessage({ message }: { message: ChatMessage }) { const [isCopied, setIsCopied] = useState(false) - const isJsonObject = useMemo(() => { - return typeof message.content === 'object' && message.content !== null - }, [message.content]) + const isJsonObject = typeof message.content === 'object' && message.content !== null // Since tool calls are now handled via SSE events and stored in message.toolCalls, // we can use the content directly without parsing diff --git a/apps/sim/app/chat/components/voice-interface/voice-interface.tsx b/apps/sim/app/chat/components/voice-interface/voice-interface.tsx index 6f2d0653b4..fd7f291c31 100644 --- a/apps/sim/app/chat/components/voice-interface/voice-interface.tsx +++ b/apps/sim/app/chat/components/voice-interface/voice-interface.tsx @@ -36,11 +36,9 @@ interface SpeechRecognitionStatic { new (): SpeechRecognition } -declare global { - interface Window { - SpeechRecognition?: SpeechRecognitionStatic - webkitSpeechRecognition?: SpeechRecognitionStatic - } +type WindowWithSpeech = Window & { + SpeechRecognition?: SpeechRecognitionStatic + webkitSpeechRecognition?: SpeechRecognitionStatic } interface VoiceInterfaceProps { @@ -93,7 +91,11 @@ export function VoiceInterface({ const responseTimeoutRef = useRef(null) const isSupported = - typeof window !== 'undefined' && !!(window.SpeechRecognition || window.webkitSpeechRecognition) + typeof window !== 'undefined' && + !!( + (window as WindowWithSpeech).SpeechRecognition || + (window as WindowWithSpeech).webkitSpeechRecognition + ) useEffect(() => { isMutedRef.current = isMuted @@ -214,7 +216,8 @@ export function VoiceInterface({ const setupSpeechRecognition = useCallback(() => { if (!isSupported) return - const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition + const w = window as WindowWithSpeech + const SpeechRecognition = w.SpeechRecognition || w.webkitSpeechRecognition if (!SpeechRecognition) return const recognition = new SpeechRecognition() diff --git a/apps/sim/app/chat/hooks/use-chat-streaming.ts b/apps/sim/app/chat/hooks/use-chat-streaming.ts index e020870931..79dfb02f40 100644 --- a/apps/sim/app/chat/hooks/use-chat-streaming.ts +++ b/apps/sim/app/chat/hooks/use-chat-streaming.ts @@ -78,18 +78,15 @@ export function useChatStreaming() { abortControllerRef.current.abort() abortControllerRef.current = null - // Add a message indicating the response was stopped + const latestContent = accumulatedTextRef.current + setMessages((prev) => { const lastMessage = prev[prev.length - 1] - // Only modify if the last message is from the assistant (as expected) if (lastMessage && lastMessage.type === 'assistant') { - // Append a note that the response was stopped + const content = latestContent || lastMessage.content const updatedContent = - lastMessage.content + - (lastMessage.content - ? '\n\n_Response stopped by user._' - : '_Response stopped by user._') + content + (content ? '\n\n_Response stopped by user._' : '_Response stopped by user._') return [ ...prev.slice(0, -1), @@ -100,7 +97,6 @@ export function useChatStreaming() { return prev }) - // Reset streaming state immediately setIsStreamingResponse(false) accumulatedTextRef.current = '' lastStreamedPositionRef.current = 0 @@ -139,9 +135,49 @@ export function useChatStreaming() { let accumulatedText = '' let lastAudioPosition = 0 - // Track which blocks have streamed content (like chat panel) const messageIdMap = new Map() const messageId = crypto.randomUUID() + + const UI_BATCH_MAX_MS = 50 + let uiDirty = false + let uiRAF: number | null = null + let uiTimer: ReturnType | null = null + let lastUIFlush = 0 + + const flushUI = () => { + if (uiRAF !== null) { + cancelAnimationFrame(uiRAF) + uiRAF = null + } + if (uiTimer !== null) { + clearTimeout(uiTimer) + uiTimer = null + } + if (!uiDirty) return + uiDirty = false + lastUIFlush = performance.now() + const snapshot = accumulatedText + setMessages((prev) => + prev.map((msg) => { + if (msg.id !== messageId) return msg + if (!msg.isStreaming) return msg + return { ...msg, content: snapshot } + }) + ) + } + + const scheduleUIFlush = () => { + if (uiRAF !== null) return + const elapsed = performance.now() - lastUIFlush + if (elapsed >= UI_BATCH_MAX_MS) { + flushUI() + return + } + uiRAF = requestAnimationFrame(flushUI) + if (uiTimer === null) { + uiTimer = setTimeout(flushUI, Math.max(0, UI_BATCH_MAX_MS - elapsed)) + } + } setMessages((prev) => [ ...prev, { @@ -165,6 +201,7 @@ export function useChatStreaming() { const { done, value } = await reader.read() if (done) { + flushUI() // Stream any remaining text for TTS if ( shouldPlayAudio && @@ -217,6 +254,7 @@ export function useChatStreaming() { } if (eventType === 'final' && json.data) { + flushUI() const finalData = json.data as { success: boolean error?: string | { message?: string } @@ -367,6 +405,7 @@ export function useChatStreaming() { } accumulatedText += contentChunk + accumulatedTextRef.current = accumulatedText logger.debug('[useChatStreaming] Received chunk', { blockId, chunkLength: contentChunk.length, @@ -374,11 +413,8 @@ export function useChatStreaming() { messageId, chunk: contentChunk.substring(0, 20), }) - setMessages((prev) => - prev.map((msg) => - msg.id === messageId ? { ...msg, content: accumulatedText } : msg - ) - ) + uiDirty = true + scheduleUIFlush() // Real-time TTS for voice mode if (shouldPlayAudio && streamingOptions?.audioStreamHandler) { @@ -419,10 +455,13 @@ export function useChatStreaming() { } } catch (error) { logger.error('Error processing stream:', error) + flushUI() setMessages((prev) => prev.map((msg) => (msg.id === messageId ? { ...msg, isStreaming: false } : msg)) ) } finally { + if (uiRAF !== null) cancelAnimationFrame(uiRAF) + if (uiTimer !== null) clearTimeout(uiTimer) setIsStreamingResponse(false) abortControllerRef.current = null diff --git a/apps/sim/app/favicon.ico b/apps/sim/app/favicon.ico deleted file mode 100644 index 9aa82bcf046763fb94ffcb3235ad88f6c2bb3626..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 15086 zcmeHO*>jXd5MN8nXa59zu*&kuCtn1_TT$`0yv6duQi#YcB2plzC@3P}g(4~_3IdiW zD57G#MMXTgRgg+vUyV-0AAz|5AU#g~Nzi+;o{!LF$PfvFzmpdwVbnfJn zbErGzW}cACos!GtI(5oFAD7Fm#k=EBv%S|bx!h=ca3a1zmt0R2zHjbNPP+2g`cwOK zH(hFKQ4ESyc_GGHVoum?=bFu?bB(WZ+y%Y@D=|0ly(3{y@)0q26qzaTKH-mbN09gC zuxIZZuVu!Mj31c{WHOM+!2eAKPP<1;x4yBQhPo@BOXtXUTJrX5SJdfU>9^_mVB0xX z>eHH<sNKL*MoqFY@m`3A^`-z3uU)_|PY;+Mr{&C*O*6TEbl1 ze%(V7{@4o4Uc$Of;wH@%J90X3y(?k!4#nzMtQT|k05N9*|EBFaKljEG@VlS5slXqC zgw-3%l4E{?W(7qhQO_(i)GF*)BK))dmcJ!-?k+{rF52Jrx?opF> zp?<&4jk0CV>R)~MfzeHzBX-0z@V9?q?)g0tcKxILwolE`HK;27b8U!|YsNY~bDu%3 z-1J2fxF=BTz-OwD|4Fup{oFxf@1H8}ozKNDUx$4PIX4ysdv(z_ioNROpZF=~w>>6) z#kzJaES_wquq_e>OX?I>N7MrYH3 z@UMaX!C0lb!{Q8)eb7J9p*#ml+u0|qf$lyO{R8~e$ze|&bX>jI0aFV4hcX3y!+oIo zpTC>M5As`f#_y6$vc`F8`%l6CaVXeBL!K+_v-I6HBPDFanF9QF+^nYU>APlXd;2)> zrcT1Hf5qN1wo!lEXQJ){o>>NTn#C{pgWTC^?~6GdXE5iUpTGb32etwq)Y$i{#dIH1 zpwSIy@7S3VxX(DhLC>NM^KJ%f+4onDkifGS=a-vT8_)8qN9tPpWvg^tzY4NJ+n_oA z6CTbd%7|O=mGapReL?)vL?9hrcAY?@H z)&gAG2L6~EMvI&CX`Bz+DN}4G_WefenAhUkwH80W=UPK{-1}ck_~m!uehR<-q0bif znzr*h>Z*rc5eq$Q?wu5$2S2Cl<|wtk`RC!0H+fwaG=Z!d!` z;uvW^D4!w^%z1;NuDTfZCiOyFv6q`qnRm2PL%oSI@$Jf}U(0;gY8kH77{m`*<-YL5 zPd-+Pc6-pt%fFkmR2uFC?J;hp#-MIa^3MHIDY_Y*jMteAWHOM+Kqdp33>+yL=*Tf( z&4uVtxerOs^Z5hJf#}Hj8z~0`He*N0g=myN(_CIq_P#P4V4hYdlx`>o%+u<6_wlAn z?^@fX##QQm5^43Wl|1cA(QQhnvhyybUrC;q($1c7H&16qR#z7<=qq7t4>4}__a)y6 zYx)b}7g9eXeTDp8ynYS-mXh^N{ffUW_ZxKS*JoVa!$0d@TP*hO3Gn>^3u9ixrbhp1 z_rYQxnW3@lV)~5PGRCUd)A;aNst=rgY2rxlOKZ%JF%iZm0%B4A-7m%0u8^>Gr`V@v zi-o_Ov1<>1D&r&q-=v2Rzj!>xgXnixA8;DK$508sZxO%f+sM!KzO)8E-K!r&pFQIj z^yk7SOW$Ew`KNGhhm}6p?&ABSFPMBn{J|nd%~*o}4fGMWJFN7{56T$VAADtuf^mhg zWrx@Yr-?7 zz)@EI5Wi5LJ$c9c2J(%usk@#m`01~BP{QB45i9yK%2cV`6+JUv5q9i}F%RTw*q#ro z{QD)}m!;1*oSV5=%nf72KK_Qrf7st+pD-S0@!p{?<4%q_0Y$XR{>EN$?|p@}->SS0 z8@K6xQ2+C#!-P28j1PtRG-3T_v6JVD|7o3s?YpC_Ag{yLOcXc!lemt+$C#8`ybQ8^ zkohvq?STw3?qmBxXY|0HAnptA6FUm=AnFn7BgT9$KztB;RM&^`K=4|14D}ddt*Wn> z|HBwdOLK3bo$+Aq3IE-yb{VhLc(^vuPmGCCH?+Ec3-fDnJn9CC zeJ9!wu)}!=YLo-a{>-U@ZRnnvFU&t>zOIhJTrT7n@IGR19HaIT?@wxpev6ao9%8XK^zYYauTG!|Iqb13VsfTqC=OyaP5j`%`g~ z=Lzpn%;VvlI3DvB?WpORGvjdgGCdzD*sy61#Wi^}7x^}aY7eMAF#v0abt@jX8h^KC O542?u=KHo)N&N>ImdASl diff --git a/apps/sim/app/form/[identifier]/page.tsx b/apps/sim/app/form/[identifier]/page.tsx index ba4004789d..10f5a6e7fc 100644 --- a/apps/sim/app/form/[identifier]/page.tsx +++ b/apps/sim/app/form/[identifier]/page.tsx @@ -1,5 +1,10 @@ +import type { Metadata } from 'next' import Form from '@/app/form/[identifier]/form' +export const metadata: Metadata = { + title: 'Form', +} + export default async function FormPage({ params }: { params: Promise<{ identifier: string }> }) { const { identifier } = await params return
diff --git a/apps/sim/app/invite/[id]/invite.tsx b/apps/sim/app/invite/[id]/invite.tsx index caa2659d47..10658d8f7f 100644 --- a/apps/sim/app/invite/[id]/invite.tsx +++ b/apps/sim/app/invite/[id]/invite.tsx @@ -200,7 +200,7 @@ export default function Invite() { }, [searchParams, inviteId]) useEffect(() => { - if (!session?.user || !token) return + if (!session?.user) return async function fetchInvitationDetails() { setIsLoading(true) @@ -301,7 +301,7 @@ export default function Invite() { } fetchInvitationDetails() - }, [session?.user, inviteId, token]) + }, [session?.user, inviteId]) const handleAcceptInvitation = async () => { if (!session?.user) return diff --git a/apps/sim/app/invite/[id]/page.tsx b/apps/sim/app/invite/[id]/page.tsx index 2f22144abc..e04a2ca774 100644 --- a/apps/sim/app/invite/[id]/page.tsx +++ b/apps/sim/app/invite/[id]/page.tsx @@ -1,3 +1,9 @@ +import type { Metadata } from 'next' import Invite from '@/app/invite/[id]/invite' +export const metadata: Metadata = { + title: 'Invite', + robots: { index: false }, +} + export default Invite diff --git a/apps/sim/app/layout.tsx b/apps/sim/app/layout.tsx index 33c504a5d3..dc3fb76201 100644 --- a/apps/sim/app/layout.tsx +++ b/apps/sim/app/layout.tsx @@ -110,13 +110,19 @@ export default function RootLayout({ children }: { children: React.ReactNode }) if (stored) { var parsed = JSON.parse(stored); var state = parsed && parsed.state; - var width = state && state.sidebarWidth; - var maxSidebarWidth = window.innerWidth * 0.3; + var isCollapsed = state && state.isCollapsed; - if (width >= 232 && width <= maxSidebarWidth) { - document.documentElement.style.setProperty('--sidebar-width', width + 'px'); - } else if (width > maxSidebarWidth) { - document.documentElement.style.setProperty('--sidebar-width', maxSidebarWidth + 'px'); + if (isCollapsed) { + document.documentElement.style.setProperty('--sidebar-width', '51px'); + } else { + var width = state && state.sidebarWidth; + var maxSidebarWidth = window.innerWidth * 0.3; + + if (width >= 248 && width <= maxSidebarWidth) { + document.documentElement.style.setProperty('--sidebar-width', width + 'px'); + } else if (width > maxSidebarWidth) { + document.documentElement.style.setProperty('--sidebar-width', maxSidebarWidth + 'px'); + } } } } catch (e) { diff --git a/apps/sim/app/llms-full.txt/route.ts b/apps/sim/app/llms-full.txt/route.ts index 47eaedfc6e..7e89c74c73 100644 --- a/apps/sim/app/llms-full.txt/route.ts +++ b/apps/sim/app/llms-full.txt/route.ts @@ -3,20 +3,20 @@ import { getBaseUrl } from '@/lib/core/utils/urls' export async function GET() { const baseUrl = getBaseUrl() - const llmsFullContent = `# Sim - AI Agent Workflow Builder + const llmsFullContent = `# Sim — Build AI Agents & Run Your Agentic Workforce -> Sim is an open-source AI agent workflow builder used by 70,000+ developers at startups to Fortune 500 companies. Build and deploy agentic workflows with a visual drag-and-drop canvas. SOC2 and HIPAA compliant. +> Sim is the open-source platform to build AI agents and run your agentic workforce. Connect 1,000+ integrations and LLMs to deploy and orchestrate agentic workflows. ## Overview -Sim provides a visual interface for building AI agent workflows. Instead of writing code, users drag and drop blocks onto a canvas and connect them to create complex AI automations. Each block represents a step in the workflow - an LLM call, a tool invocation, an API request, or a code execution. +Sim lets teams create agents, workflows, knowledge bases, tables, and docs. Over 100,000 builders use Sim — from startups to Fortune 500 companies. Teams connect their tools and data, build agents that execute real workflows across systems, and manage them with full observability. SOC2 and HIPAA compliant. ## Product Details - **Product Name**: Sim -- **Category**: AI Development Tools / Workflow Automation +- **Category**: AI Agent Platform / Agentic Workflow Orchestration - **Deployment**: Cloud (SaaS) and Self-hosted options -- **Pricing**: Free tier, Pro ($20/month), Team ($40/month), Enterprise (custom) +- **Pricing**: Free tier, Pro ($25/month, 6K credits), Max ($100/month, 25K credits), Team plans available, Enterprise (custom) - **Compliance**: SOC2 Type II, HIPAA compliant ## Core Concepts @@ -66,7 +66,7 @@ Sim supports all major LLM providers: - Amazon Bedrock ### Integrations -100+ pre-built integrations including: +1,000+ pre-built integrations including: - **Communication**: Slack, Discord, Email (Gmail, Outlook), SMS (Twilio) - **Productivity**: Notion, Airtable, Google Sheets, Google Docs - **Development**: GitHub, GitLab, Jira, Linear @@ -81,6 +81,12 @@ Built-in support for: - Semantic search and retrieval - Chunking strategies (fixed size, semantic, recursive) +### Tables +Built-in table creation and management: +- Structured data storage +- Queryable tables for agent workflows +- Native integrations + ### Code Execution - Sandboxed JavaScript/TypeScript execution - Access to npm packages diff --git a/apps/sim/app/llms.txt/route.ts b/apps/sim/app/llms.txt/route.ts index 23e458f626..e38d203c1b 100644 --- a/apps/sim/app/llms.txt/route.ts +++ b/apps/sim/app/llms.txt/route.ts @@ -5,16 +5,16 @@ export async function GET() { const llmsContent = `# Sim -> Sim is an open-source AI agent workflow builder. 70,000+ developers at startups to Fortune 500 companies deploy agentic workflows on the Sim platform. SOC2 and HIPAA compliant. +> Sim is the open-source platform to build AI agents and run your agentic workforce. Connect 1,000+ integrations and LLMs to deploy and orchestrate agentic workflows. -Sim provides a visual drag-and-drop interface for building and deploying AI agent workflows. Connect to 100+ integrations and ship production-ready AI automations. +Sim lets teams create agents, workflows, knowledge bases, tables, and docs. Over 100,000 builders use Sim — from startups to Fortune 500 companies. SOC2 and HIPAA compliant. ## Core Pages -- [Homepage](${baseUrl}): Main landing page with product overview and features +- [Homepage](${baseUrl}): Product overview, features, and pricing - [Templates](${baseUrl}/templates): Pre-built workflow templates to get started quickly - [Changelog](${baseUrl}/changelog): Product updates and release notes -- [Sim Studio Blog](${baseUrl}/studio): Announcements, insights, and guides for AI workflows +- [Sim Studio Blog](${baseUrl}/studio): Announcements, insights, and guides ## Documentation @@ -29,28 +29,31 @@ Sim provides a visual drag-and-drop interface for building and deploying AI agen - **Block**: Individual step (LLM call, tool call, HTTP request, code execution) - **Trigger**: Event or schedule that initiates workflow execution - **Execution**: A single run of a workflow with logs and outputs +- **Knowledge Base**: Vector-indexed document store for retrieval-augmented generation ## Capabilities -- Visual workflow builder with drag-and-drop canvas -- Multi-model LLM orchestration (OpenAI, Anthropic, Google, Mistral, xAI) -- Retrieval-augmented generation (RAG) with vector databases -- 100+ integrations (Slack, Gmail, Notion, Airtable, databases) +- AI agent creation and deployment +- Agentic workflow orchestration +- 1,000+ integrations (Slack, Gmail, Notion, Airtable, databases, and more) +- Multi-model LLM orchestration (OpenAI, Anthropic, Google, Mistral, xAI, Perplexity) +- Knowledge base creation with retrieval-augmented generation (RAG) +- Table creation and management +- Document creation and processing - Scheduled and webhook-triggered executions -- Real-time collaboration and version control ## Use Cases -- AI agent workflow automation -- RAG pipelines and document processing -- Chatbot and copilot workflows for SaaS -- Email and customer support automation +- AI agent deployment and orchestration +- Knowledge bases and RAG pipelines +- Document creation and processing +- Customer support automation - Internal operations (sales, marketing, legal, finance) ## Links - [GitHub Repository](https://github.com/simstudioai/sim): Open-source codebase -- [Discord Community](https://discord.gg/Hr4UWYEcTT): Get help and connect with users +- [Discord Community](https://discord.gg/Hr4UWYEcTT): Get help and connect with 100,000+ builders - [X/Twitter](https://x.com/simdotai): Product updates and announcements ## Optional diff --git a/apps/sim/app/manifest.ts b/apps/sim/app/manifest.ts index bfd0784215..77c92d0c39 100644 --- a/apps/sim/app/manifest.ts +++ b/apps/sim/app/manifest.ts @@ -5,10 +5,10 @@ export default function manifest(): MetadataRoute.Manifest { const brand = getBrandConfig() return { - name: brand.name === 'Sim' ? 'Sim - AI Agent Workflow Builder' : brand.name, + name: brand.name === 'Sim' ? 'Sim — Build AI Agents & Run Your Agentic Workforce' : brand.name, short_name: brand.name, description: - 'Open-source AI agent workflow builder. 70,000+ developers build and deploy agentic workflows on Sim. Visual drag-and-drop interface for creating AI automations. SOC2 and HIPAA compliant.', + 'Sim is the open-source platform to build AI agents and run your agentic workforce. Connect 1,000+ integrations and LLMs to orchestrate agentic workflows.', start_url: '/', scope: '/', display: 'standalone', diff --git a/apps/sim/app/page.tsx b/apps/sim/app/page.tsx index 2c533dea8a..9c3a649fdb 100644 --- a/apps/sim/app/page.tsx +++ b/apps/sim/app/page.tsx @@ -1,16 +1,18 @@ import type { Metadata } from 'next' import { getBaseUrl } from '@/lib/core/utils/urls' -import Landing from '@/app/(landing)/landing' +import Landing from '@/app/(home)/landing' + +export const dynamic = 'force-dynamic' const baseUrl = getBaseUrl() export const metadata: Metadata = { metadataBase: new URL(baseUrl), - title: 'Sim - AI Agent Workflow Builder | Open Source Platform', + title: 'Sim — Build AI Agents & Run Your Agentic Workforce', description: - 'Open-source AI agent workflow builder used by 70,000+ developers. Build and deploy agentic workflows with a visual drag-and-drop canvas. Connect 100+ apps and ship SOC2 & HIPAA-ready AI automations from startups to Fortune 500.', + 'Sim is the open-source platform to build AI agents and run your agentic workforce. Connect 1,000+ integrations and LLMs to orchestrate agentic workflows.', keywords: - 'AI agent workflow builder, agentic workflows, open source AI, visual workflow builder, AI automation, LLM workflows, AI agents, workflow automation, no-code AI, SOC2 compliant, HIPAA compliant, enterprise AI', + 'AI agents, agentic workforce, open-source AI agent platform, agentic workflows, LLM orchestration, AI automation, knowledge base, workflow builder, AI integrations, SOC2 compliant, HIPAA compliant, enterprise AI', authors: [{ name: 'Sim' }], creator: 'Sim', publisher: 'Sim', @@ -20,9 +22,9 @@ export const metadata: Metadata = { telephone: false, }, openGraph: { - title: 'Sim - AI Agent Workflow Builder | Open Source', + title: 'Sim — Build AI Agents & Run Your Agentic Workforce', description: - 'Open-source platform used by 70,000+ developers. Design, deploy, and monitor agentic workflows with a visual drag-and-drop interface, 100+ integrations, and enterprise-grade security.', + 'Sim is the open-source platform to build AI agents and run your agentic workforce. Connect 1,000+ integrations and LLMs to orchestrate agentic workflows. Create agents, workflows, knowledge bases, tables, and docs. Join over 100,000 builders.', type: 'website', url: baseUrl, siteName: 'Sim', @@ -32,7 +34,7 @@ export const metadata: Metadata = { url: '/logo/426-240/primary/small.png', width: 2130, height: 1200, - alt: 'Sim - AI Agent Workflow Builder', + alt: 'Sim — Build AI Agents & Run Your Agentic Workforce', type: 'image/png', }, ], @@ -41,12 +43,12 @@ export const metadata: Metadata = { card: 'summary_large_image', site: '@simdotai', creator: '@simdotai', - title: 'Sim - AI Agent Workflow Builder | Open Source', + title: 'Sim — Build AI Agents & Run Your Agentic Workforce', description: - 'Open-source platform for agentic workflows. 70,000+ developers. Visual builder. 100+ integrations. SOC2 & HIPAA compliant.', + 'Sim is the open-source platform to build AI agents and run your agentic workforce. Connect 1,000+ integrations and LLMs to orchestrate agentic workflows.', images: { url: '/logo/426-240/primary/small.png', - alt: 'Sim - AI Agent Workflow Builder', + alt: 'Sim — Build AI Agents & Run Your Agentic Workforce', }, }, alternates: { @@ -72,12 +74,14 @@ export const metadata: Metadata = { classification: 'AI Development Tools', referrer: 'origin-when-cross-origin', other: { - 'llm:content-type': 'AI workflow builder, visual programming, no-code AI development', + 'llm:content-type': + 'AI agent platform, agentic workforce, agentic workflows, LLM orchestration', 'llm:use-cases': - 'email automation, Slack bots, Discord moderation, data analysis, customer support, content generation, agentic automations', + 'AI agents, agentic workforce, agentic workflows, knowledge bases, tables, document creation, email automation, Slack bots, data analysis, customer support, content generation', 'llm:integrations': - 'OpenAI, Anthropic, Google AI, Slack, Gmail, Discord, Notion, Airtable, Supabase', - 'llm:pricing': 'free tier available, pro $20/month, team $40/month, enterprise custom', + 'OpenAI, Anthropic, Google AI, Mistral, xAI, Perplexity, Slack, Gmail, Discord, Notion, Airtable, Supabase', + 'llm:pricing': + 'free tier available, pro $25/month, max $100/month, team plans available, enterprise custom', 'llm:region': 'global', 'llm:languages': 'en', }, diff --git a/apps/sim/app/resume/[workflowId]/[executionId]/page.tsx b/apps/sim/app/resume/[workflowId]/[executionId]/page.tsx index bfd2e2fac2..8ad86d3222 100644 --- a/apps/sim/app/resume/[workflowId]/[executionId]/page.tsx +++ b/apps/sim/app/resume/[workflowId]/[executionId]/page.tsx @@ -1,6 +1,12 @@ +import type { Metadata } from 'next' import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager' import ResumeExecutionPage from '@/app/resume/[workflowId]/[executionId]/resume-page-client' +export const metadata: Metadata = { + title: 'Resume Execution', + robots: { index: false }, +} + export const runtime = 'nodejs' export const dynamic = 'force-dynamic' diff --git a/apps/sim/app/templates/[id]/template.tsx b/apps/sim/app/templates/[id]/template.tsx index e64252e0d6..9bb7f26f7c 100644 --- a/apps/sim/app/templates/[id]/template.tsx +++ b/apps/sim/app/templates/[id]/template.tsx @@ -19,17 +19,15 @@ import { Breadcrumb, Button, Copy, + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, Popover, PopoverContent, PopoverItem, PopoverTrigger, } from '@/components/emcn' -import { - DropdownMenu, - DropdownMenuContent, - DropdownMenuItem, - DropdownMenuTrigger, -} from '@/components/ui/dropdown-menu' import { Skeleton } from '@/components/ui/skeleton' import { VerifiedBadge } from '@/components/ui/verified-badge' import { useSession } from '@/lib/auth/auth-client' @@ -704,9 +702,9 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template - + {workspaces.length === 0 ? ( - + No workspaces with write access ) : ( @@ -714,11 +712,10 @@ export default function TemplateDetails({ isWorkspaceContext = false }: Template handleWorkspaceSelectForEdit(workspace.id)} - className='flex cursor-pointer items-center justify-between' >
- {workspace.name} - + {workspace.name} + {workspace.permissions} access
diff --git a/apps/sim/app/templates/page.tsx b/apps/sim/app/templates/page.tsx index c233949a45..c74818acd3 100644 --- a/apps/sim/app/templates/page.tsx +++ b/apps/sim/app/templates/page.tsx @@ -1,11 +1,18 @@ import { db } from '@sim/db' import { permissions, templateCreators, templates, workspace } from '@sim/db/schema' import { and, desc, eq } from 'drizzle-orm' +import type { Metadata } from 'next' import { redirect } from 'next/navigation' import { getSession } from '@/lib/auth' import type { Template } from '@/app/templates/templates' import Templates from '@/app/templates/templates' +export const metadata: Metadata = { + title: 'Templates', + description: + 'Browse pre-built workflow templates to get started quickly with AI agents, automations, and integrations.', +} + /** * Public templates list page. * Redirects authenticated users to their workspace-scoped templates page. diff --git a/apps/sim/app/unsubscribe/page.tsx b/apps/sim/app/unsubscribe/page.tsx index 8f6e6bf688..d1b3ec2de1 100644 --- a/apps/sim/app/unsubscribe/page.tsx +++ b/apps/sim/app/unsubscribe/page.tsx @@ -1,3 +1,9 @@ +import type { Metadata } from 'next' import Unsubscribe from '@/app/unsubscribe/unsubscribe' +export const metadata: Metadata = { + title: 'Unsubscribe', + robots: { index: false }, +} + export default Unsubscribe diff --git a/apps/sim/app/workspace/[workspaceId]/components/error/error.tsx b/apps/sim/app/workspace/[workspaceId]/components/error/error.tsx new file mode 100644 index 0000000000..524a7f8860 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/error/error.tsx @@ -0,0 +1,64 @@ +'use client' + +import { useEffect } from 'react' +import { createLogger } from '@sim/logger' +import { RefreshCw } from 'lucide-react' +import { Button } from '@/components/emcn' + +interface ErrorAction { + label: string + icon?: React.ReactNode + onClick: () => void + variant?: 'default' | 'ghost' +} + +export interface ErrorStateProps { + error: Error & { digest?: string } + reset: () => void + title: string + description: string + loggerName: string + secondaryAction?: ErrorAction +} + +export function ErrorState({ + error, + reset, + title, + description, + loggerName, + secondaryAction, +}: ErrorStateProps) { + const logger = createLogger(loggerName) + + useEffect(() => { + logger.error(`${loggerName} error:`, { error: error.message, digest: error.digest }) + }, [error, logger, loggerName]) + + return ( +
+
+
+

{title}

+

{description}

+
+
+ {secondaryAction && ( + + )} + +
+
+
+ ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/components/error/index.ts b/apps/sim/app/workspace/[workspaceId]/components/error/index.ts new file mode 100644 index 0000000000..1fdff4534b --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/error/index.ts @@ -0,0 +1 @@ +export { ErrorState, type ErrorStateProps } from './error' diff --git a/apps/sim/app/workspace/[workspaceId]/components/index.ts b/apps/sim/app/workspace/[workspaceId]/components/index.ts new file mode 100644 index 0000000000..4a08e4f6c7 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/index.ts @@ -0,0 +1,28 @@ +export { ErrorState, type ErrorStateProps } from './error' +export { InlineRenameInput } from './inline-rename-input' +export { ownerCell } from './resource/components/owner-cell/owner-cell' +export type { + BreadcrumbEditing, + BreadcrumbItem, + CreateAction, + DropdownOption, + HeaderAction, +} from './resource/components/resource-header' +export { ResourceHeader } from './resource/components/resource-header' +export type { + ColumnOption, + FilterTag, + SearchConfig, + SortConfig, +} from './resource/components/resource-options-bar' +export { ResourceOptionsBar } from './resource/components/resource-options-bar' +export { timeCell } from './resource/components/time-cell/time-cell' +export type { + PaginationConfig, + ResourceCell, + ResourceColumn, + ResourceRow, + ResourceTableProps, + SelectableConfig, +} from './resource/resource' +export { Resource, ResourceTable } from './resource/resource' diff --git a/apps/sim/app/workspace/[workspaceId]/components/inline-rename-input.tsx b/apps/sim/app/workspace/[workspaceId]/components/inline-rename-input.tsx new file mode 100644 index 0000000000..c431184475 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/inline-rename-input.tsx @@ -0,0 +1,38 @@ +'use client' + +import { useEffect, useRef } from 'react' + +interface InlineRenameInputProps { + value: string + onChange: (value: string) => void + onSubmit: () => void + onCancel: () => void +} + +export function InlineRenameInput({ value, onChange, onSubmit, onCancel }: InlineRenameInputProps) { + const inputRef = useRef(null) + + useEffect(() => { + const el = inputRef.current + if (el) { + el.focus() + el.select() + } + }, []) + + return ( + onChange(e.target.value)} + onKeyDown={(e) => { + if (e.key === 'Enter') onSubmit() + if (e.key === 'Escape') onCancel() + }} + onBlur={onSubmit} + onClick={(e) => e.stopPropagation()} + className='min-w-0 flex-1 truncate border-0 bg-transparent p-0 font-medium text-[14px] text-[var(--text-body)] outline-none focus:outline-none focus:ring-0' + /> + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/index.ts b/apps/sim/app/workspace/[workspaceId]/components/resource/components/index.ts new file mode 100644 index 0000000000..5b63ad787f --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/index.ts @@ -0,0 +1,4 @@ +export * from './owner-cell' +export * from './resource-header' +export * from './resource-options-bar' +export * from './time-cell' diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/owner-cell/index.ts b/apps/sim/app/workspace/[workspaceId]/components/resource/components/owner-cell/index.ts new file mode 100644 index 0000000000..fa102e05d3 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/owner-cell/index.ts @@ -0,0 +1 @@ +export { ownerCell } from './owner-cell' diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/owner-cell/owner-cell.tsx b/apps/sim/app/workspace/[workspaceId]/components/resource/components/owner-cell/owner-cell.tsx new file mode 100644 index 0000000000..2635837602 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/owner-cell/owner-cell.tsx @@ -0,0 +1,43 @@ +'use client' + +import type { ResourceCell } from '@/app/workspace/[workspaceId]/components/resource/resource' +import type { WorkspaceMember } from '@/hooks/queries/workspace' + +function OwnerAvatar({ name, image }: { name: string; image: string | null }) { + if (image) { + return ( + {name} + ) + } + + return ( + + {name.charAt(0).toUpperCase()} + + ) +} + +/** + * Resolves a user ID into a ResourceCell with an avatar icon and display name. + * Returns null label while members are still loading to avoid flashing raw IDs. + */ +export function ownerCell( + userId: string | null | undefined, + members?: WorkspaceMember[] +): ResourceCell { + if (!userId) return { label: null } + if (!members) return { label: null } + + const member = members.find((m) => m.userId === userId) + if (!member) return { label: null } + + return { + icon: , + label: member.name, + } +} diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/index.ts b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/index.ts new file mode 100644 index 0000000000..697f8c9aac --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/index.ts @@ -0,0 +1,8 @@ +export type { + BreadcrumbEditing, + BreadcrumbItem, + CreateAction, + DropdownOption, + HeaderAction, +} from './resource-header' +export { ResourceHeader } from './resource-header' diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx new file mode 100644 index 0000000000..7c94ac098c --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx @@ -0,0 +1,214 @@ +import { Fragment, memo } from 'react' +import { + Button, + ChevronDown, + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, + Plus, +} from '@/components/emcn' +import { cn } from '@/lib/core/utils/cn' +import { InlineRenameInput } from '@/app/workspace/[workspaceId]/components/inline-rename-input' + +export interface DropdownOption { + label: string + icon?: React.ElementType + onClick: () => void + disabled?: boolean +} + +export interface BreadcrumbEditing { + isEditing: boolean + value: string + onChange: (value: string) => void + onSubmit: () => void + onCancel: () => void +} + +export interface BreadcrumbItem { + label: string + onClick?: () => void + dropdownItems?: DropdownOption[] + editing?: BreadcrumbEditing +} + +export interface HeaderAction { + label: string + icon?: React.ElementType + onClick: () => void + disabled?: boolean +} + +export interface CreateAction { + label: string + onClick: () => void + disabled?: boolean +} + +interface ResourceHeaderProps { + icon?: React.ElementType + title?: string + breadcrumbs?: BreadcrumbItem[] + create?: CreateAction + actions?: HeaderAction[] +} + +export const ResourceHeader = memo(function ResourceHeader({ + icon: Icon, + title, + breadcrumbs, + create, + actions, +}: ResourceHeaderProps) { + const hasBreadcrumbs = breadcrumbs && breadcrumbs.length > 0 + + return ( +
+
+
+ {hasBreadcrumbs ? ( + breadcrumbs.map((crumb, i) => ( + + {i > 0 && ( + / + )} + + + )) + ) : ( + <> + {Icon && } + {title && ( +

{title}

+ )} + + )} +
+
+ {actions?.map((action) => { + const ActionIcon = action.icon + return ( + + ) + })} + {create && ( + + )} +
+
+
+ ) +}) + +function BreadcrumbSegment({ + icon: Icon, + label, + onClick, + dropdownItems, + editing, +}: { + icon?: React.ElementType + label: string + onClick?: () => void + dropdownItems?: DropdownOption[] + editing?: BreadcrumbEditing +}) { + if (editing?.isEditing) { + return ( + + {Icon && } + + + ) + } + + const content = ( + <> + {Icon && } + {label} + + ) + + if (dropdownItems && dropdownItems.length > 0) { + return ( + + + + + + {dropdownItems.map((item) => { + const ItemIcon = item.icon + return ( + + {ItemIcon && } + {item.label} + + ) + })} + + + ) + } + + if (onClick) { + return ( + + ) + } + + return ( + + {content} + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-options-bar/index.ts b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-options-bar/index.ts new file mode 100644 index 0000000000..ba2be6c912 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-options-bar/index.ts @@ -0,0 +1,8 @@ +export type { + ColumnOption, + FilterTag, + SearchConfig, + SearchTag, + SortConfig, +} from './resource-options-bar' +export { ResourceOptionsBar } from './resource-options-bar' diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-options-bar/resource-options-bar.tsx b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-options-bar/resource-options-bar.tsx new file mode 100644 index 0000000000..b64349112c --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-options-bar/resource-options-bar.tsx @@ -0,0 +1,226 @@ +import { memo, type ReactNode } from 'react' +import * as PopoverPrimitive from '@radix-ui/react-popover' +import { + ArrowDown, + ArrowUp, + ArrowUpDown, + Button, + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuSeparator, + DropdownMenuTrigger, + ListFilter, + Search, + X, +} from '@/components/emcn' +import { cn } from '@/lib/core/utils/cn' + +type SortDirection = 'asc' | 'desc' + +export interface ColumnOption { + id: string + label: string + type?: string + icon?: React.ElementType +} + +export interface SortConfig { + options: ColumnOption[] + active: { column: string; direction: SortDirection } | null + onSort: (column: string, direction: SortDirection) => void + onClear?: () => void +} + +export interface FilterTag { + label: string + onRemove: () => void +} + +export interface SearchTag { + label: string + value: string + onRemove: () => void +} + +export interface SearchConfig { + value: string + onChange: (value: string) => void + placeholder?: string + inputRef?: React.RefObject + onKeyDown?: (e: React.KeyboardEvent) => void + onFocus?: () => void + onBlur?: () => void + tags?: SearchTag[] + highlightedTagIndex?: number | null + onClearAll?: () => void + dropdown?: ReactNode + dropdownRef?: React.RefObject +} + +interface ResourceOptionsBarProps { + search?: SearchConfig + sort?: SortConfig + filter?: ReactNode + filterTags?: FilterTag[] + extras?: ReactNode +} + +export const ResourceOptionsBar = memo(function ResourceOptionsBar({ + search, + sort, + filter, + filterTags, + extras, +}: ResourceOptionsBarProps) { + const hasContent = search || sort || filter || extras || (filterTags && filterTags.length > 0) + if (!hasContent) return null + + return ( +
+
+ {search && ( +
+ +
+ {search.tags?.map((tag, i) => ( + + ))} + search.onChange(e.target.value)} + onKeyDown={search.onKeyDown} + onFocus={search.onFocus} + onBlur={search.onBlur} + placeholder={search.tags?.length ? '' : (search.placeholder ?? 'Search...')} + className='min-w-[80px] flex-1 bg-transparent py-[4px] text-[12px] text-[var(--text-secondary)] outline-none placeholder:text-[var(--text-subtle)]' + /> +
+ {search.tags?.length || search.value ? ( + + ) : null} + {search.dropdown && ( +
+ {search.dropdown} +
+ )} +
+ )} +
+ {extras} + {filterTags?.map((tag) => ( + + ))} + {filter && ( + + + + + + + {filter} + + + + )} + {sort && } +
+
+
+ ) +}) + +function SortDropdown({ config }: { config: SortConfig }) { + const { options, active, onSort, onClear } = config + + return ( + + + + + + {options.map((option) => { + const isActive = active?.column === option.id + const Icon = option.icon + const DirectionIcon = isActive ? (active.direction === 'asc' ? ArrowUp : ArrowDown) : null + + return ( + { + if (isActive) { + onSort(option.id, active.direction === 'asc' ? 'desc' : 'asc') + } else { + onSort(option.id, 'desc') + } + }} + > + {Icon && } + {option.label} + {DirectionIcon && ( + + )} + + ) + })} + {active && onClear && ( + <> + + + + Clear sort + + + )} + + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/time-cell/index.ts b/apps/sim/app/workspace/[workspaceId]/components/resource/components/time-cell/index.ts new file mode 100644 index 0000000000..20db143be9 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/time-cell/index.ts @@ -0,0 +1 @@ +export { timeCell } from './time-cell' diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/time-cell/time-cell.ts b/apps/sim/app/workspace/[workspaceId]/components/resource/components/time-cell/time-cell.ts new file mode 100644 index 0000000000..fb97045159 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/time-cell/time-cell.ts @@ -0,0 +1,81 @@ +import type { ResourceCell } from '@/app/workspace/[workspaceId]/components/resource/resource' + +const SECOND = 1000 +const MINUTE = 60 * SECOND +const HOUR = 60 * MINUTE +const DAY = 24 * HOUR + +const ORDINAL_RULES: [number, string][] = [ + [1, 'st'], + [2, 'nd'], + [3, 'rd'], +] + +function ordinalSuffix(day: number): string { + if (day >= 11 && day <= 13) return 'th' + return ORDINAL_RULES.find(([d]) => day % 10 === d)?.[1] ?? 'th' +} + +const MONTH_NAMES = [ + 'January', + 'February', + 'March', + 'April', + 'May', + 'June', + 'July', + 'August', + 'September', + 'October', + 'November', + 'December', +] as const + +function formatFullDate(date: Date): string { + const month = MONTH_NAMES[date.getMonth()] + const day = date.getDate() + const year = date.getFullYear() + return `${month} ${day}${ordinalSuffix(day)}, ${year}` +} + +function pluralize(value: number, unit: string): string { + return `${value} ${unit}${value === 1 ? '' : 's'}` +} + +/** + * Formats a date string into a human-friendly relative time label. + * + * - Within ~1 minute of now: "Now" + * - Under 1 hour: "X minute(s) ago" / "X minute(s)" + * - Under 24 hours: "X hour(s) ago" / "X hour(s)" + * - Under 2 days: "X day(s) ago" / "X day(s)" + * - Beyond 2 days: full date (e.g. "March 4th, 2026") + */ +export function timeCell(dateValue: string | Date | null | undefined): ResourceCell { + if (!dateValue) return { label: null } + + const date = dateValue instanceof Date ? dateValue : new Date(dateValue) + const now = new Date() + const diff = now.getTime() - date.getTime() + const absDiff = Math.abs(diff) + const isPast = diff > 0 + + if (absDiff < MINUTE) return { label: 'Now' } + + if (absDiff < HOUR) { + const minutes = Math.floor(absDiff / MINUTE) + return { label: isPast ? `${pluralize(minutes, 'minute')} ago` : pluralize(minutes, 'minute') } + } + + if (absDiff < DAY) { + const hours = Math.floor(absDiff / HOUR) + return { label: isPast ? `${pluralize(hours, 'hour')} ago` : pluralize(hours, 'hour') } + } + + if (absDiff < 2 * DAY) { + const days = Math.floor(absDiff / DAY) + return { label: isPast ? `${pluralize(days, 'day')} ago` : pluralize(days, 'day') } + } + + return { label: formatFullDate(date) } +} diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/resource.tsx b/apps/sim/app/workspace/[workspaceId]/components/resource/resource.tsx new file mode 100644 index 0000000000..63300948f9 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/resource.tsx @@ -0,0 +1,552 @@ +'use client' +import { memo, type ReactNode, useCallback, useEffect, useMemo, useRef, useState } from 'react' +import { ChevronLeft, ChevronRight } from 'lucide-react' +import { ArrowDown, ArrowUp, Button, Checkbox, Loader, Plus, Skeleton } from '@/components/emcn' +import { cn } from '@/lib/core/utils/cn' +import type { BreadcrumbItem, CreateAction, HeaderAction } from './components/resource-header' +import { ResourceHeader } from './components/resource-header' +import type { FilterTag, SearchConfig, SortConfig } from './components/resource-options-bar' +import { ResourceOptionsBar } from './components/resource-options-bar' + +export interface ResourceColumn { + id: string + header: string + widthMultiplier?: number +} + +export interface ResourceCell { + icon?: ReactNode + label?: string | null + content?: ReactNode +} + +export interface ResourceRow { + id: string + cells: Record + sortValues?: Record +} + +export interface SelectableConfig { + selectedIds: Set + onSelectRow: (id: string, checked: boolean) => void + onSelectAll: (checked: boolean) => void + isAllSelected: boolean + disabled?: boolean +} + +export interface PaginationConfig { + currentPage: number + totalPages: number + onPageChange: (page: number) => void +} + +interface ResourceProps { + icon: React.ElementType + title: string + breadcrumbs?: BreadcrumbItem[] + create?: CreateAction + search?: SearchConfig + defaultSort?: string + sort?: SortConfig + headerActions?: HeaderAction[] + columns: ResourceColumn[] + rows: ResourceRow[] + selectedRowId?: string | null + selectable?: SelectableConfig + onRowClick?: (rowId: string) => void + onRowHover?: (rowId: string) => void + onRowContextMenu?: (e: React.MouseEvent, rowId: string) => void + isLoading?: boolean + onContextMenu?: (e: React.MouseEvent) => void + filter?: ReactNode + filterTags?: FilterTag[] + extras?: ReactNode + pagination?: PaginationConfig + emptyMessage?: string + overlay?: ReactNode +} + +const EMPTY_CELL_PLACEHOLDER = '- - -' +const SKELETON_ROW_COUNT = 5 + +/** + * Shared page shell for resource list pages (tables, files, knowledge, schedules, logs). + * Renders the header, toolbar with search, and a data table from column/row definitions. + */ +export function Resource({ + icon, + title, + breadcrumbs, + create, + search, + defaultSort, + sort: sortOverride, + headerActions, + columns, + rows, + selectedRowId, + selectable, + onRowClick, + onRowHover, + onRowContextMenu, + isLoading, + onContextMenu, + filter, + filterTags, + extras, + pagination, + emptyMessage, + overlay, +}: ResourceProps) { + return ( +
+ + + +
+ ) +} + +export interface ResourceTableProps { + columns: ResourceColumn[] + rows: ResourceRow[] + defaultSort?: string + sort?: SortConfig + selectedRowId?: string | null + selectable?: SelectableConfig + onRowClick?: (rowId: string) => void + onRowHover?: (rowId: string) => void + onRowContextMenu?: (e: React.MouseEvent, rowId: string) => void + isLoading?: boolean + create?: CreateAction + onLoadMore?: () => void + hasMore?: boolean + isLoadingMore?: boolean + pagination?: PaginationConfig + emptyMessage?: string + overlay?: ReactNode +} + +/** + * Data table body extracted from Resource for independent composition. + * Use directly when rendering a table without the Resource header/toolbar. + */ +export const ResourceTable = memo(function ResourceTable({ + columns, + rows, + defaultSort, + sort: externalSort, + selectedRowId, + selectable, + onRowClick, + onRowHover, + onRowContextMenu, + isLoading, + create, + onLoadMore, + hasMore, + isLoadingMore, + pagination, + emptyMessage, + overlay, +}: ResourceTableProps) { + const headerRef = useRef(null) + const loadMoreRef = useRef(null) + const sortEnabled = defaultSort != null + const [internalSort, setInternalSort] = useState<{ column: string; direction: 'asc' | 'desc' }>({ + column: defaultSort ?? '', + direction: 'desc', + }) + + const handleBodyScroll = useCallback((e: React.UIEvent) => { + if (headerRef.current) { + headerRef.current.scrollLeft = e.currentTarget.scrollLeft + } + }, []) + + const handleSort = useCallback((column: string, direction: 'asc' | 'desc') => { + setInternalSort({ column, direction }) + }, []) + + const displayRows = useMemo(() => { + if (!sortEnabled || externalSort) return rows + return [...rows].sort((a, b) => { + const col = internalSort.column + const aVal = a.sortValues?.[col] ?? a.cells[col]?.label ?? '' + const bVal = b.sortValues?.[col] ?? b.cells[col]?.label ?? '' + const cmp = + typeof aVal === 'number' && typeof bVal === 'number' + ? aVal - bVal + : String(aVal).localeCompare(String(bVal)) + return internalSort.direction === 'asc' ? -cmp : cmp + }) + }, [rows, internalSort, sortEnabled, externalSort]) + + useEffect(() => { + if (!onLoadMore || !hasMore) return + const el = loadMoreRef.current + if (!el) return + const observer = new IntersectionObserver( + ([entry]) => { + if (entry.isIntersecting) onLoadMore() + }, + { rootMargin: '200px' } + ) + observer.observe(el) + return () => observer.disconnect() + }, [onLoadMore, hasMore]) + + const hasCheckbox = selectable != null + const totalColSpan = columns.length + (hasCheckbox ? 1 : 0) + + if (isLoading) { + return ( + + ) + } + + if (rows.length === 0 && emptyMessage) { + return ( +
+ {emptyMessage} +
+ ) + } + + return ( +
+
+ + + + + {hasCheckbox && ( + + )} + {columns.map((col) => { + if (!sortEnabled) { + return ( + + ) + } + const isActive = internalSort.column === col.id + const SortIcon = internalSort.direction === 'asc' ? ArrowUp : ArrowDown + return ( + + ) + })} + + +
+ selectable.onSelectAll(checked as boolean)} + disabled={selectable.disabled} + aria-label='Select all' + /> + + {col.header} + + +
+
+
+ + + + {displayRows.map((row) => { + const isSelected = selectable?.selectedIds.has(row.id) ?? false + return ( + onRowClick?.(row.id)} + onMouseEnter={onRowHover ? () => onRowHover(row.id) : undefined} + onContextMenu={(e) => onRowContextMenu?.(e, row.id)} + > + {hasCheckbox && ( + + )} + {columns.map((col, colIdx) => { + const cell = row.cells[col.id] + return ( + + ) + })} + + ) + })} + {create && ( + + + + )} + +
+ + selectable.onSelectRow(row.id, checked as boolean) + } + disabled={selectable.disabled} + aria-label='Select row' + onClick={(e) => e.stopPropagation()} + /> + + +
+ + + {create.label} + +
+ {hasMore && ( +
+ {isLoadingMore && ( + + )} +
+ )} +
+ {overlay} + {pagination && pagination.totalPages > 1 && ( + + )} +
+ ) +}) + +function Pagination({ + currentPage, + totalPages, + onPageChange, +}: { + currentPage: number + totalPages: number + onPageChange: (page: number) => void +}) { + return ( +
+
+ +
+ {Array.from({ length: Math.min(totalPages, 5) }, (_, i) => { + let page: number + if (totalPages <= 5) { + page = i + 1 + } else if (currentPage <= 3) { + page = i + 1 + } else if (currentPage >= totalPages - 2) { + page = totalPages - 4 + i + } else { + page = currentPage - 2 + i + } + if (page < 1 || page > totalPages) return null + return ( + + ) + })} +
+ +
+
+ ) +} + +function CellContent({ cell, primary }: { cell: ResourceCell; primary?: boolean }) { + if (cell.content) return <>{cell.content} + return ( + + {cell.icon && {cell.icon}} + {cell.label} + + ) +} + +function ResourceColGroup({ + columns, + hasCheckbox, +}: { + columns: ResourceColumn[] + hasCheckbox?: boolean +}) { + return ( + + {hasCheckbox && } + {columns.map((col, colIdx) => ( + + ))} + + ) +} + +function DataTableSkeleton({ + columns, + rowCount, + hasCheckbox, +}: { + columns: ResourceColumn[] + rowCount: number + hasCheckbox?: boolean +}) { + return ( + <> +
+ + + + + {hasCheckbox && ( + + )} + {columns.map((col) => ( + + ))} + + +
+ + +
+ +
+
+
+
+ + + + {Array.from({ length: rowCount }, (_, i) => ( + + {hasCheckbox && ( + + )} + {columns.map((col, colIdx) => ( + + ))} + + ))} + +
+ + + + {colIdx === 0 && } + + +
+
+ + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/error.tsx b/apps/sim/app/workspace/[workspaceId]/error.tsx index 2504830ecb..c681cf70ee 100644 --- a/apps/sim/app/workspace/[workspaceId]/error.tsx +++ b/apps/sim/app/workspace/[workspaceId]/error.tsx @@ -1,5 +1,20 @@ 'use client' -import { NextError } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/error' +import { ErrorState } from '@/app/workspace/[workspaceId]/components' -export default NextError +interface WorkspaceErrorProps { + error: Error & { digest?: string } + reset: () => void +} + +export default function WorkspaceError({ error, reset }: WorkspaceErrorProps) { + return ( + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/file-viewer.tsx b/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/file-viewer.tsx index fb858aa0d4..b939d50898 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/file-viewer.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/[fileId]/view/file-viewer.tsx @@ -13,7 +13,7 @@ export function FileViewer({ file }: FileViewerProps) { const serveUrl = `/api/files/serve/${encodeURIComponent(file.key)}?context=workspace` return ( -
+