diff --git a/apps/docs/content/docs/en/tools/stt.mdx b/apps/docs/content/docs/en/tools/stt.mdx index 6920544178b..eecd270fe1c 100644 --- a/apps/docs/content/docs/en/tools/stt.mdx +++ b/apps/docs/content/docs/en/tools/stt.mdx @@ -120,7 +120,7 @@ Transcribe audio and video files to text using leading AI providers. Supports mu | --------- | ---- | -------- | ----------- | | `provider` | string | Yes | STT provider \(elevenlabs\) | | `apiKey` | string | Yes | ElevenLabs API key | -| `model` | string | No | ElevenLabs model to use \(scribe_v1, scribe_v1_experimental\) | +| `model` | string | No | ElevenLabs model to use \(scribe_v2\) | | `audioFile` | file | No | Audio or video file to transcribe \(e.g., MP3, WAV, M4A, WEBM\) | | `audioFileReference` | file | No | Reference to audio/video file from previous blocks | | `audioUrl` | string | No | URL to audio or video file | diff --git a/apps/realtime/src/handlers/index.ts b/apps/realtime/src/handlers/index.ts index 6ded2e54741..8977eea550a 100644 --- a/apps/realtime/src/handlers/index.ts +++ b/apps/realtime/src/handlers/index.ts @@ -2,6 +2,7 @@ import { setupConnectionHandlers } from '@/handlers/connection' import { setupOperationsHandlers } from '@/handlers/operations' import { setupPresenceHandlers } from '@/handlers/presence' import { setupSubblocksHandlers } from '@/handlers/subblocks' +import { setupTableHandlers } from '@/handlers/tables' import { setupVariablesHandlers } from '@/handlers/variables' import { setupWorkflowHandlers } from '@/handlers/workflow' import type { AuthenticatedSocket } from '@/middleware/auth' @@ -13,5 +14,6 @@ export function setupAllHandlers(socket: AuthenticatedSocket, roomManager: IRoom setupSubblocksHandlers(socket, roomManager) setupVariablesHandlers(socket, roomManager) setupPresenceHandlers(socket, roomManager) + setupTableHandlers(socket, roomManager) setupConnectionHandlers(socket, roomManager) } diff --git a/apps/realtime/src/handlers/tables.ts b/apps/realtime/src/handlers/tables.ts new file mode 100644 index 00000000000..ae9a7c6f003 --- /dev/null +++ b/apps/realtime/src/handlers/tables.ts @@ -0,0 +1,73 @@ +import { createLogger } from '@sim/logger' +import type { AuthenticatedSocket } from '@/middleware/auth' +import { verifyTableAccess } from '@/middleware/permissions' +import { type IRoomManager, tableRoomName } from '@/rooms/types' + +const logger = createLogger('TableHandlers') + +/** + * Wires `join-table` / `leave-table` socket events. Tables don't track presence + * or last-modified state — joining is a thin wrapper around `socket.join` so the + * Sim API → Realtime HTTP bridge can broadcast row updates back to subscribed clients. + */ +export function setupTableHandlers(socket: AuthenticatedSocket, _roomManager: IRoomManager) { + socket.on('join-table', async ({ tableId }: { tableId?: string }) => { + try { + if (!tableId || typeof tableId !== 'string') { + socket.emit('join-table-error', { + tableId: tableId ?? null, + error: 'tableId required', + code: 'INVALID_TABLE_ID', + retryable: false, + }) + return + } + + const userId = socket.userId + if (!userId) { + socket.emit('join-table-error', { + tableId, + error: 'Authentication required', + code: 'AUTHENTICATION_REQUIRED', + retryable: false, + }) + return + } + + const { hasAccess } = await verifyTableAccess(userId, tableId) + if (!hasAccess) { + socket.emit('join-table-error', { + tableId, + error: 'Access denied to table', + code: 'ACCESS_DENIED', + retryable: false, + }) + return + } + + const room = tableRoomName(tableId) + socket.join(room) + socket.emit('join-table-success', { tableId, socketId: socket.id }) + logger.debug(`Socket ${socket.id} (user ${userId}) joined ${room}`) + } catch (error) { + logger.error(`Error joining table room:`, error) + socket.emit('join-table-error', { + tableId: null, + error: 'Failed to join table', + code: 'JOIN_TABLE_FAILED', + retryable: true, + }) + } + }) + + socket.on('leave-table', async ({ tableId }: { tableId?: string }) => { + try { + if (!tableId || typeof tableId !== 'string') return + const room = tableRoomName(tableId) + socket.leave(room) + logger.debug(`Socket ${socket.id} left ${room}`) + } catch (error) { + logger.error(`Error leaving table room:`, error) + } + }) +} diff --git a/apps/realtime/src/middleware/permissions.ts b/apps/realtime/src/middleware/permissions.ts index dcc893b1478..db97b16f8a2 100644 --- a/apps/realtime/src/middleware/permissions.ts +++ b/apps/realtime/src/middleware/permissions.ts @@ -131,3 +131,51 @@ export async function verifyWorkflowAccess( return { hasAccess: false } } } + +/** + * Verify a user has read access to a table by virtue of workspace permission. + * Mirrors `verifyWorkflowAccess` for the table-room socket join check. + */ +export async function verifyTableAccess( + userId: string, + tableId: string +): Promise<{ hasAccess: boolean; workspaceId?: string }> { + try { + const { userTableDefinitions, permissions } = await import('@sim/db') + const tableData = await db + .select({ workspaceId: userTableDefinitions.workspaceId }) + .from(userTableDefinitions) + .where(and(eq(userTableDefinitions.id, tableId), isNull(userTableDefinitions.archivedAt))) + .limit(1) + + if (!tableData.length) { + logger.warn(`Table ${tableId} not found`) + return { hasAccess: false } + } + const { workspaceId } = tableData[0] + if (!workspaceId) return { hasAccess: false } + + const [permissionRow] = await db + .select({ permissionType: permissions.permissionType }) + .from(permissions) + .where( + and( + eq(permissions.userId, userId), + eq(permissions.entityType, 'workspace'), + eq(permissions.entityId, workspaceId) + ) + ) + .limit(1) + + if (!permissionRow?.permissionType) { + logger.warn( + `User ${userId} has no permission for workspace ${workspaceId} (table ${tableId})` + ) + return { hasAccess: false } + } + return { hasAccess: true, workspaceId } + } catch (error) { + logger.error(`Error verifying table access for user ${userId}, table ${tableId}:`, error) + return { hasAccess: false } + } +} diff --git a/apps/realtime/src/rooms/memory-manager.ts b/apps/realtime/src/rooms/memory-manager.ts index a032e785bb5..0cd37daf493 100644 --- a/apps/realtime/src/rooms/memory-manager.ts +++ b/apps/realtime/src/rooms/memory-manager.ts @@ -1,6 +1,13 @@ import { createLogger } from '@sim/logger' import type { Server } from 'socket.io' -import type { IRoomManager, UserPresence, UserSession, WorkflowRoom } from '@/rooms/types' +import { + type IRoomManager, + type TableRowUpdatedPayload, + tableRoomName, + type UserPresence, + type UserSession, + type WorkflowRoom, +} from '@/rooms/types' const logger = createLogger('MemoryRoomManager') @@ -255,4 +262,23 @@ export class MemoryRoomManager implements IRoomManager { logger.info(`Notified ${room.users.size} users about workflow deployment change: ${workflowId}`) } + + emitToTable(tableId: string, event: string, payload: T): void { + this._io.to(tableRoomName(tableId)).emit(event, payload) + } + + async handleTableRowUpdated(tableId: string, payload: TableRowUpdatedPayload): Promise { + this.emitToTable(tableId, 'table-row-updated', { tableId, ...payload }) + } + + async handleTableRowDeleted(tableId: string, rowId: string): Promise { + this.emitToTable(tableId, 'table-row-deleted', { tableId, rowId }) + } + + async handleTableDeleted(tableId: string): Promise { + logger.info(`Handling table deletion notification for ${tableId}`) + this.emitToTable(tableId, 'table-deleted', { tableId, timestamp: Date.now() }) + // Eject sockets so they don't hold a stale room. Cross-pod safe via socket.io. + await this._io.in(tableRoomName(tableId)).socketsLeave(tableRoomName(tableId)) + } } diff --git a/apps/realtime/src/rooms/redis-manager.ts b/apps/realtime/src/rooms/redis-manager.ts index 0e6b3eadf2b..0fb41417906 100644 --- a/apps/realtime/src/rooms/redis-manager.ts +++ b/apps/realtime/src/rooms/redis-manager.ts @@ -1,7 +1,13 @@ import { createLogger } from '@sim/logger' import { createClient, type RedisClientType } from 'redis' import type { Server } from 'socket.io' -import type { IRoomManager, UserPresence, UserSession } from '@/rooms/types' +import { + type IRoomManager, + type TableRowUpdatedPayload, + tableRoomName, + type UserPresence, + type UserSession, +} from '@/rooms/types' const logger = createLogger('RedisRoomManager') @@ -457,4 +463,23 @@ export class RedisRoomManager implements IRoomManager { const userCount = await this.getUniqueUserCount(workflowId) logger.info(`Notified ${userCount} users about workflow deployment change: ${workflowId}`) } + + emitToTable(tableId: string, event: string, payload: T): void { + this._io.to(tableRoomName(tableId)).emit(event, payload) + } + + async handleTableRowUpdated(tableId: string, payload: TableRowUpdatedPayload): Promise { + this.emitToTable(tableId, 'table-row-updated', { tableId, ...payload }) + } + + async handleTableRowDeleted(tableId: string, rowId: string): Promise { + this.emitToTable(tableId, 'table-row-deleted', { tableId, rowId }) + } + + async handleTableDeleted(tableId: string): Promise { + logger.info(`Handling table deletion notification for ${tableId}`) + this.emitToTable(tableId, 'table-deleted', { tableId, timestamp: Date.now() }) + // Eject sockets across all pods via socket.io's Redis adapter. + await this._io.in(tableRoomName(tableId)).socketsLeave(tableRoomName(tableId)) + } } diff --git a/apps/realtime/src/rooms/types.ts b/apps/realtime/src/rooms/types.ts index 9553a427e1e..9c15c967d54 100644 --- a/apps/realtime/src/rooms/types.ts +++ b/apps/realtime/src/rooms/types.ts @@ -143,4 +143,45 @@ export interface IRoomManager { * Handle workflow deployment change - notify users to refresh deployment state */ handleWorkflowDeployed(workflowId: string): Promise + + /** + * Emit an event to all clients in a table room (`table:${tableId}`). + * Tables don't track presence/last-modified state — just pub/sub. + */ + emitToTable(tableId: string, event: string, payload: T): void + + /** + * Notify all clients in a table room of a row write (insert/update/cell-state-change). + * Sim API calls this via the `/api/table-row-updated` HTTP bridge after every successful + * row commit; the client merges the delta into its React Query cache. + */ + handleTableRowUpdated(tableId: string, payload: TableRowUpdatedPayload): Promise + + /** + * Notify all clients in a table room that a row has been deleted. + */ + handleTableRowDeleted(tableId: string, rowId: string): Promise + + /** + * Notify all clients in a table room that the table has been deleted; eject sockets. + */ + handleTableDeleted(tableId: string): Promise +} + +/** + * Payload broadcast on `table-row-updated`. Mirrors the shape of `TableRow.data` so + * the client can merge directly into its React Query rows cache. `position` and + * `updatedAt` are included for cache reconciliation; `data` is the full row data + * (not a per-cell delta) — see plan Notes. + */ +export interface TableRowUpdatedPayload { + rowId: string + data: Record + /** Per-workflow-group execution state. Keyed by `WorkflowGroup.id`. */ + executions?: Record + position: number + updatedAt: string | number } + +/** Socket.IO room name for a table. Namespaced from workflow rooms. */ +export const tableRoomName = (tableId: string): string => `table:${tableId}` diff --git a/apps/realtime/src/routes/http.ts b/apps/realtime/src/routes/http.ts index 0f8ed73cc52..78cd89e63d9 100644 --- a/apps/realtime/src/routes/http.ts +++ b/apps/realtime/src/routes/http.ts @@ -150,6 +150,52 @@ export function createHttpHandler(roomManager: IRoomManager, logger: Logger) { return } + // Handle table row write notifications from the Sim API + if (req.method === 'POST' && req.url === '/api/table-row-updated') { + try { + const body = await readRequestBody(req) + const { tableId, rowId, data, executions, position, updatedAt } = JSON.parse(body) + await roomManager.handleTableRowUpdated(tableId, { + rowId, + data, + executions, + position, + updatedAt, + }) + sendSuccess(res) + } catch (error) { + logger.error('Error handling table row update notification:', error) + sendError(res, 'Failed to process table row update') + } + return + } + + if (req.method === 'POST' && req.url === '/api/table-row-deleted') { + try { + const body = await readRequestBody(req) + const { tableId, rowId } = JSON.parse(body) + await roomManager.handleTableRowDeleted(tableId, rowId) + sendSuccess(res) + } catch (error) { + logger.error('Error handling table row deletion notification:', error) + sendError(res, 'Failed to process table row deletion') + } + return + } + + if (req.method === 'POST' && req.url === '/api/table-deleted') { + try { + const body = await readRequestBody(req) + const { tableId } = JSON.parse(body) + await roomManager.handleTableDeleted(tableId) + sendSuccess(res) + } catch (error) { + logger.error('Error handling table deletion notification:', error) + sendError(res, 'Failed to process table deletion') + } + return + } + res.writeHead(404, { 'Content-Type': 'application/json' }) res.end(JSON.stringify({ error: 'Not found' })) } diff --git a/apps/sim/app/api/auth/oauth/disconnect/route.ts b/apps/sim/app/api/auth/oauth/disconnect/route.ts index 9bef61cf08e..a8fa4cfa2b6 100644 --- a/apps/sim/app/api/auth/oauth/disconnect/route.ts +++ b/apps/sim/app/api/auth/oauth/disconnect/route.ts @@ -1,14 +1,15 @@ import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit' import { db } from '@sim/db' -import { account, credentialSet, credentialSetMember } from '@sim/db/schema' +import { account, credential, credentialSet, credentialSetMember } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq, like, or } from 'drizzle-orm' +import { and, eq, inArray, like, or } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { disconnectOAuthContract } from '@/lib/api/contracts/oauth-connections' import { getValidationErrorMessage, parseRequest } from '@/lib/api/server' import { getSession } from '@/lib/auth' import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' +import { deleteCredential } from '@/lib/credentials/deletion' import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server' export const dynamic = 'force-dynamic' @@ -52,27 +53,39 @@ export const POST = withRouteHandler(async (request: NextRequest) => { hasProviderId: !!providerId, }) - // If a specific account row ID is provided, delete that exact account - if (accountId) { - await db - .delete(account) - .where(and(eq(account.userId, session.user.id), eq(account.id, accountId))) - } else if (providerId) { - // If a specific providerId is provided, delete accounts for that provider ID - await db - .delete(account) - .where(and(eq(account.userId, session.user.id), eq(account.providerId, providerId))) - } else { - // Otherwise, delete all accounts for this provider - // Handle both exact matches (e.g., 'confluence') and prefixed matches (e.g., 'google-email') - await db - .delete(account) - .where( - and( + // Delete credentials before their accounts so deleteCredential can clear + // stored references first. Otherwise FK CASCADE would orphan them silently. + const accountFilter = accountId + ? and(eq(account.userId, session.user.id), eq(account.id, accountId)) + : providerId + ? and(eq(account.userId, session.user.id), eq(account.providerId, providerId)) + : and( eq(account.userId, session.user.id), or(eq(account.providerId, provider), like(account.providerId, `${provider}-%`)) ) - ) + + const targetAccounts = await db.select({ id: account.id }).from(account).where(accountFilter) + + const targetAccountIds = targetAccounts.map((a) => a.id) + + if (targetAccountIds.length > 0) { + const credentialsToDelete = await db + .select({ id: credential.id }) + .from(credential) + .where(inArray(credential.accountId, targetAccountIds)) + + for (const cred of credentialsToDelete) { + await deleteCredential({ + credentialId: cred.id, + actorId: session.user.id, + actorName: session.user.name, + actorEmail: session.user.email, + reason: 'oauth_disconnect', + request, + }) + } + + await db.delete(account).where(inArray(account.id, targetAccountIds)) } // Sync webhooks for all credential sets the user is a member of diff --git a/apps/sim/app/api/chat/[identifier]/route.ts b/apps/sim/app/api/chat/[identifier]/route.ts index 24e6b709997..a6dff447355 100644 --- a/apps/sim/app/api/chat/[identifier]/route.ts +++ b/apps/sim/app/api/chat/[identifier]/route.ts @@ -149,7 +149,9 @@ export const POST = withRouteHandler( request ) - setChatAuthCookie(response, deployment.id, deployment.authType, deployment.password) + if (deployment.authType !== 'sso') { + setChatAuthCookie(response, deployment.id, deployment.authType, deployment.password) + } return response } @@ -358,6 +360,7 @@ export const GET = withRouteHandler( if ( deployment.authType !== 'public' && + deployment.authType !== 'sso' && authCookie && validateAuthToken(authCookie.value, deployment.id, deployment.password) ) { diff --git a/apps/sim/app/api/chat/[identifier]/sso/route.ts b/apps/sim/app/api/chat/[identifier]/sso/route.ts new file mode 100644 index 00000000000..812f27df5b3 --- /dev/null +++ b/apps/sim/app/api/chat/[identifier]/sso/route.ts @@ -0,0 +1,81 @@ +import { db } from '@sim/db' +import { chat } from '@sim/db/schema' +import { createLogger } from '@sim/logger' +import { and, eq, isNull } from 'drizzle-orm' +import type { NextRequest } from 'next/server' +import { chatSSOContract } from '@/lib/api/contracts/chats' +import { parseRequest } from '@/lib/api/server' +import type { TokenBucketConfig } from '@/lib/core/rate-limiter' +import { RateLimiter } from '@/lib/core/rate-limiter' +import { addCorsHeaders, isEmailAllowed } from '@/lib/core/security/deployment' +import { generateRequestId, getClientIp } from '@/lib/core/utils/request' +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' +import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils' + +const logger = createLogger('ChatSSOAPI') + +export const dynamic = 'force-dynamic' +export const runtime = 'nodejs' + +const rateLimiter = new RateLimiter() + +const SSO_IP_RATE_LIMIT: TokenBucketConfig = { + maxTokens: 20, + refillRate: 20, + refillIntervalMs: 15 * 60_000, +} + +export const POST = withRouteHandler( + async (request: NextRequest, context: { params: Promise<{ identifier: string }> }) => { + const requestId = generateRequestId() + + const ip = getClientIp(request) + if (ip !== 'unknown') { + const ipRateLimit = await rateLimiter.checkRateLimitDirect( + `chat-sso:ip:${ip}`, + SSO_IP_RATE_LIMIT + ) + if (!ipRateLimit.allowed) { + logger.warn(`[${requestId}] SSO eligibility rate limit exceeded from ${ip}`) + const retryAfter = Math.ceil( + (ipRateLimit.retryAfterMs ?? SSO_IP_RATE_LIMIT.refillIntervalMs) / 1000 + ) + const response = createErrorResponse('Too many requests. Please try again later.', 429) + response.headers.set('Retry-After', String(retryAfter)) + return addCorsHeaders(response, request) + } + } + + const parsed = await parseRequest(chatSSOContract, request, context) + if (!parsed.success) return parsed.response + + const { identifier } = parsed.data.params + const { email } = parsed.data.body + + const [deployment] = await db + .select({ + authType: chat.authType, + allowedEmails: chat.allowedEmails, + isActive: chat.isActive, + }) + .from(chat) + .where(and(eq(chat.identifier, identifier), isNull(chat.archivedAt))) + .limit(1) + + if (!deployment || !deployment.isActive) { + logger.warn(`[${requestId}] SSO check on missing/inactive chat: ${identifier}`) + return addCorsHeaders(createErrorResponse('Chat not found', 404), request) + } + + if (deployment.authType !== 'sso') { + return addCorsHeaders( + createErrorResponse('Chat is not configured for SSO authentication', 400), + request + ) + } + + const eligible = isEmailAllowed(email, (deployment.allowedEmails as string[]) || []) + + return addCorsHeaders(createSuccessResponse({ eligible }), request) + } +) diff --git a/apps/sim/app/api/chat/utils.test.ts b/apps/sim/app/api/chat/utils.test.ts index 31401d6b5ec..60395c0bbd1 100644 --- a/apps/sim/app/api/chat/utils.test.ts +++ b/apps/sim/app/api/chat/utils.test.ts @@ -19,6 +19,7 @@ const { mockSetDeploymentAuthCookie, mockAddCorsHeaders, mockIsEmailAllowed, + mockGetSession, } = vi.hoisted(() => ({ mockMergeSubblockStateWithValues: vi.fn().mockReturnValue({}), mockMergeSubBlockValues: vi.fn().mockReturnValue({}), @@ -26,6 +27,12 @@ const { mockSetDeploymentAuthCookie: vi.fn(), mockAddCorsHeaders: vi.fn((response: unknown) => response), mockIsEmailAllowed: vi.fn(), + mockGetSession: vi.fn(), +})) + +vi.mock('@/lib/auth', () => ({ + auth: { api: { getSession: vi.fn() } }, + getSession: mockGetSession, })) const mockDecryptSecret = encryptionMockFns.mockDecryptSecret @@ -285,6 +292,68 @@ describe('Chat API Utils', () => { expect(result3.authorized).toBe(false) expect(result3.error).toBe('Email not authorized') }) + + describe('SSO auth', () => { + const ssoDeployment = { + id: 'chat-id', + authType: 'sso', + allowedEmails: ['user@example.com', '@company.com'], + } + + const postRequest = { + method: 'POST', + cookies: { get: vi.fn().mockReturnValue(null) }, + } as any + + it('rejects when no session is present', async () => { + mockGetSession.mockResolvedValue(null) + + const result = await validateChatAuth('request-id', ssoDeployment, postRequest, { + input: 'hello', + }) + + expect(result.authorized).toBe(false) + expect(result.error).toBe('auth_required_sso') + }) + + it('ignores body-supplied email and uses the session email', async () => { + mockGetSession.mockResolvedValue({ user: { email: 'session@example.com' } }) + mockIsEmailAllowed.mockReturnValue(true) + + await validateChatAuth('request-id', ssoDeployment, postRequest, { + email: 'attacker@evil.com', + input: 'hello', + }) + + expect(mockIsEmailAllowed).toHaveBeenCalledWith( + 'session@example.com', + ssoDeployment.allowedEmails + ) + }) + + it('authorizes execution when session email is allowlisted', async () => { + mockGetSession.mockResolvedValue({ user: { email: 'user@example.com' } }) + mockIsEmailAllowed.mockReturnValue(true) + + const result = await validateChatAuth('request-id', ssoDeployment, postRequest, { + input: 'hello', + }) + + expect(result.authorized).toBe(true) + }) + + it('rejects execution when session email is not allowlisted', async () => { + mockGetSession.mockResolvedValue({ user: { email: 'stranger@other.com' } }) + mockIsEmailAllowed.mockReturnValue(false) + + const result = await validateChatAuth('request-id', ssoDeployment, postRequest, { + input: 'hello', + }) + + expect(result.authorized).toBe(false) + expect(result.error).toBe('Your email is not authorized to access this chat') + }) + }) }) describe('Execution Result Processing', () => { diff --git a/apps/sim/app/api/chat/utils.ts b/apps/sim/app/api/chat/utils.ts index 3909dd599fe..5a3d0750e8d 100644 --- a/apps/sim/app/api/chat/utils.ts +++ b/apps/sim/app/api/chat/utils.ts @@ -95,11 +95,13 @@ export async function validateChatAuth( return { authorized: true } } - const cookieName = `chat_auth_${deployment.id}` - const authCookie = request.cookies.get(cookieName) + if (authType !== 'sso') { + const cookieName = `chat_auth_${deployment.id}` + const authCookie = request.cookies.get(cookieName) - if (authCookie && validateAuthToken(authCookie.value, deployment.id, deployment.password)) { - return { authorized: true } + if (authCookie && validateAuthToken(authCookie.value, deployment.id, deployment.password)) { + return { authorized: true } + } } if (authType === 'password') { @@ -173,35 +175,11 @@ export async function validateChatAuth( } if (authType === 'sso') { - if (request.method === 'GET') { - return { authorized: false, error: 'auth_required_sso' } - } - try { - if (!parsedBody) { + if (request.method !== 'GET' && !parsedBody) { return { authorized: false, error: 'SSO authentication is required' } } - const { email, input, checkSSOAccess } = parsedBody - - if (input && !checkSSOAccess) { - return { authorized: false, error: 'auth_required_sso' } - } - - if (checkSSOAccess) { - if (!email) { - return { authorized: false, error: 'Email is required' } - } - - const allowedEmails = deployment.allowedEmails || [] - - if (isEmailAllowed(email, allowedEmails)) { - return { authorized: true } - } - - return { authorized: false, error: 'Email not authorized for SSO access' } - } - const { getSession } = await import('@/lib/auth') const session = await getSession() diff --git a/apps/sim/app/api/credentials/[id]/route.ts b/apps/sim/app/api/credentials/[id]/route.ts index 01502fa904c..0d3939f8d25 100644 --- a/apps/sim/app/api/credentials/[id]/route.ts +++ b/apps/sim/app/api/credentials/[id]/route.ts @@ -11,6 +11,7 @@ import { getSession } from '@/lib/auth' import { encryptSecret } from '@/lib/core/security/encryption' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { getCredentialActorContext } from '@/lib/credentials/access' +import { deleteCredential } from '@/lib/credentials/deletion' import { deleteWorkspaceEnvCredentials, syncPersonalEnvCredentialsForUser, @@ -333,7 +334,14 @@ export const DELETE = withRouteHandler( return NextResponse.json({ success: true }, { status: 200 }) } - await db.delete(credential).where(eq(credential.id, id)) + await deleteCredential({ + credentialId: id, + actorId: session.user.id, + actorName: session.user.name, + actorEmail: session.user.email, + reason: 'user_delete', + request, + }) captureServerEvent( session.user.id, @@ -346,23 +354,6 @@ export const DELETE = withRouteHandler( { groups: { workspace: access.credential.workspaceId } } ) - recordAudit({ - workspaceId: access.credential.workspaceId, - actorId: session.user.id, - actorName: session.user.name, - actorEmail: session.user.email, - action: AuditAction.CREDENTIAL_DELETED, - resourceType: AuditResourceType.CREDENTIAL, - resourceId: id, - resourceName: access.credential.displayName, - description: `Deleted ${access.credential.type} credential "${access.credential.displayName}"`, - metadata: { - credentialType: access.credential.type, - providerId: access.credential.providerId, - }, - request, - }) - return NextResponse.json({ success: true }, { status: 200 }) } catch (error) { logger.error('Failed to delete credential', error) diff --git a/apps/sim/app/api/files/delete/route.test.ts b/apps/sim/app/api/files/delete/route.test.ts index 977902d0be0..df3bad6170f 100644 --- a/apps/sim/app/api/files/delete/route.test.ts +++ b/apps/sim/app/api/files/delete/route.test.ts @@ -1,28 +1,25 @@ /** * @vitest-environment node */ -import { authMockFns, hybridAuthMockFns } from '@sim/testing' +import { + authMockFns, + hybridAuthMockFns, + storageServiceMock, + storageServiceMockFns, +} from '@sim/testing' import { beforeEach, describe, expect, it, vi } from 'vitest' const mocks = vi.hoisted(() => { const mockVerifyFileAccess = vi.fn() const mockVerifyWorkspaceFileAccess = vi.fn() - const mockDeleteFile = vi.fn() - const mockHasCloudStorage = vi.fn() const mockGetStorageProvider = vi.fn() const mockIsUsingCloudStorage = vi.fn() - const mockUploadFile = vi.fn() - const mockDownloadFile = vi.fn() return { mockVerifyFileAccess, mockVerifyWorkspaceFileAccess, - mockDeleteFile, - mockHasCloudStorage, mockGetStorageProvider, mockIsUsingCloudStorage, - mockUploadFile, - mockDownloadFile, } }) @@ -68,23 +65,18 @@ vi.mock('@/lib/uploads', () => ({ getStorageProvider: mocks.mockGetStorageProvider, isUsingCloudStorage: mocks.mockIsUsingCloudStorage, StorageService: { - uploadFile: mocks.mockUploadFile, - downloadFile: mocks.mockDownloadFile, - deleteFile: mocks.mockDeleteFile, - hasCloudStorage: mocks.mockHasCloudStorage, + uploadFile: storageServiceMockFns.mockUploadFile, + downloadFile: storageServiceMockFns.mockDownloadFile, + deleteFile: storageServiceMockFns.mockDeleteFile, + hasCloudStorage: storageServiceMockFns.mockHasCloudStorage, }, - uploadFile: mocks.mockUploadFile, - downloadFile: mocks.mockDownloadFile, - deleteFile: mocks.mockDeleteFile, - hasCloudStorage: mocks.mockHasCloudStorage, + uploadFile: storageServiceMockFns.mockUploadFile, + downloadFile: storageServiceMockFns.mockDownloadFile, + deleteFile: storageServiceMockFns.mockDeleteFile, + hasCloudStorage: storageServiceMockFns.mockHasCloudStorage, })) -vi.mock('@/lib/uploads/core/storage-service', () => ({ - uploadFile: mocks.mockUploadFile, - downloadFile: mocks.mockDownloadFile, - deleteFile: mocks.mockDeleteFile, - hasCloudStorage: mocks.mockHasCloudStorage, -})) +vi.mock('@/lib/uploads/core/storage-service', () => storageServiceMock) vi.mock('@/lib/uploads/server/metadata', () => ({ deleteFileMetadata: vi.fn().mockResolvedValue(undefined), @@ -117,14 +109,14 @@ describe('File Delete API Route', () => { }) mocks.mockVerifyFileAccess.mockResolvedValue(true) mocks.mockVerifyWorkspaceFileAccess.mockResolvedValue(true) - mocks.mockDeleteFile.mockResolvedValue(undefined) - mocks.mockHasCloudStorage.mockReturnValue(true) + storageServiceMockFns.mockDeleteFile.mockResolvedValue(undefined) + storageServiceMockFns.mockHasCloudStorage.mockReturnValue(true) mocks.mockGetStorageProvider.mockReturnValue('s3') mocks.mockIsUsingCloudStorage.mockReturnValue(true) }) it('should handle local file deletion successfully', async () => { - mocks.mockHasCloudStorage.mockReturnValue(false) + storageServiceMockFns.mockHasCloudStorage.mockReturnValue(false) mocks.mockGetStorageProvider.mockReturnValue('local') mocks.mockIsUsingCloudStorage.mockReturnValue(false) @@ -142,7 +134,7 @@ describe('File Delete API Route', () => { }) it('should handle file not found gracefully', async () => { - mocks.mockHasCloudStorage.mockReturnValue(false) + storageServiceMockFns.mockHasCloudStorage.mockReturnValue(false) mocks.mockGetStorageProvider.mockReturnValue('local') mocks.mockIsUsingCloudStorage.mockReturnValue(false) @@ -170,7 +162,7 @@ describe('File Delete API Route', () => { expect(data).toHaveProperty('success', true) expect(data).toHaveProperty('message', 'File deleted successfully') - expect(mocks.mockDeleteFile).toHaveBeenCalledWith({ + expect(storageServiceMockFns.mockDeleteFile).toHaveBeenCalledWith({ key: 'workspace/test-workspace-id/1234567890-test-file.txt', context: 'workspace', }) @@ -190,7 +182,7 @@ describe('File Delete API Route', () => { expect(data).toHaveProperty('success', true) expect(data).toHaveProperty('message', 'File deleted successfully') - expect(mocks.mockDeleteFile).toHaveBeenCalledWith({ + expect(storageServiceMockFns.mockDeleteFile).toHaveBeenCalledWith({ key: 'workspace/test-workspace-id/1234567890-test-document.pdf', context: 'workspace', }) diff --git a/apps/sim/app/api/files/export/[id]/route.ts b/apps/sim/app/api/files/export/[id]/route.ts new file mode 100644 index 00000000000..ba5b4366803 --- /dev/null +++ b/apps/sim/app/api/files/export/[id]/route.ts @@ -0,0 +1,153 @@ +import path from 'node:path' +import { createLogger } from '@sim/logger' +import { toError } from '@sim/utils/errors' +import JSZip from 'jszip' +import type { NextRequest } from 'next/server' +import { NextResponse } from 'next/server' +import { fileExportContract } from '@/lib/api/contracts/storage-transfer' +import { parseRequest } from '@/lib/api/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' +import type { StorageContext } from '@/lib/uploads/config' +import { USE_BLOB_STORAGE } from '@/lib/uploads/config' +import { downloadFile } from '@/lib/uploads/core/storage-service' +import { getFileMetadataById } from '@/lib/uploads/server/metadata' +import { verifyFileAccess } from '@/app/api/files/authorization' + +const logger = createLogger('FilesExportAPI') + +const MARKDOWN_MIME_TYPES = new Set(['text/markdown', 'text/x-markdown']) +const MARKDOWN_EXTENSIONS = new Set(['md', 'markdown']) +const VIEW_URL_RE = + /\/api\/files\/view\/([0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/gi +const MAX_EMBEDDED_IMAGES = 50 + +function isMarkdown(originalName: string, contentType: string): boolean { + if (MARKDOWN_MIME_TYPES.has(contentType)) return true + const ext = originalName.split('.').pop()?.toLowerCase() ?? '' + return MARKDOWN_EXTENSIONS.has(ext) +} + +function safeFilename(name: string): string { + return path + .basename(name) + .replace(/["\\]/g, '_') + .replace(/[\r\n\t]/g, '') +} + +function deduplicatedFilename(preferred: string, existing: Set, imageId: string): string { + if (!existing.has(preferred)) return preferred + const ext = path.extname(preferred) + const base = path.basename(preferred, ext) + const short = `${base}_${imageId.slice(0, 8)}${ext}` + if (!existing.has(short)) return short + return `${base}_${imageId}${ext}` +} + +export const GET = withRouteHandler( + async (request: NextRequest, context: { params: Promise<{ id: string }> }) => { + const parsed = await parseRequest(fileExportContract, request, context) + if (!parsed.success) return parsed.response + + const { id } = parsed.data.params + + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + const userId = authResult.userId + + const record = await getFileMetadataById(id) + if (!record) { + logger.warn('File not found by ID', { id }) + return NextResponse.json({ error: 'Not found' }, { status: 404 }) + } + + const hasAccess = await verifyFileAccess(record.key, userId) + if (!hasAccess) { + logger.warn('Unauthorized file export attempt', { id, userId }) + return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + } + + if (!isMarkdown(record.originalName, record.contentType)) { + const storagePrefix = USE_BLOB_STORAGE ? 'blob' : 's3' + const servePath = `/api/files/serve/${storagePrefix}/${encodeURIComponent(record.key)}` + return NextResponse.redirect(new URL(servePath, request.url), { status: 302 }) + } + + const mdBuffer = await downloadFile({ + key: record.key, + context: record.context as StorageContext, + }) + let mdContent = mdBuffer.toString('utf-8') + + const imageIds = [...new Set([...mdContent.matchAll(VIEW_URL_RE)].map((m) => m[1]))].slice( + 0, + MAX_EMBEDDED_IMAGES + ) + + logger.info('Exporting markdown with embedded images', { id, imageCount: imageIds.length }) + + const fetchResults = await Promise.allSettled( + imageIds.map(async (imageId) => { + const imgRecord = await getFileMetadataById(imageId) + if (!imgRecord) return null + const imgHasAccess = await verifyFileAccess(imgRecord.key, userId) + if (!imgHasAccess) return null + const imgBuffer = await downloadFile({ + key: imgRecord.key, + context: imgRecord.context as StorageContext, + }) + return { imageId, originalName: imgRecord.originalName, buffer: imgBuffer } + }) + ) + + const assetMap = new Map() + const usedFilenames = new Set() + + for (let i = 0; i < fetchResults.length; i++) { + const result = fetchResults[i] + if (result.status === 'rejected') { + logger.warn('Failed to fetch asset for export', { + imageId: imageIds[i], + error: toError(result.reason).message, + }) + continue + } + if (!result.value) continue + const { imageId, originalName, buffer } = result.value + const preferred = safeFilename(originalName) + const filename = deduplicatedFilename(preferred, usedFilenames, imageId) + usedFilenames.add(filename) + assetMap.set(imageId, { filename, buffer }) + } + + for (const [imageId, asset] of assetMap) { + const escapedId = imageId.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') + const replacement = `./assets/${asset.filename}` + mdContent = mdContent.replace( + new RegExp(`/api/files/view/${escapedId}`, 'g'), + () => replacement + ) + } + + const zip = new JSZip() + zip.file(safeFilename(record.originalName), mdContent) + const assetsFolder = zip.folder('assets')! + for (const { filename, buffer } of assetMap.values()) { + assetsFolder.file(filename, buffer) + } + + const zipBuffer = await zip.generateAsync({ type: 'nodebuffer', compression: 'DEFLATE' }) + const zipName = safeFilename(`${record.originalName.replace(/\.[^.]+$/, '')}.zip`) + + return new NextResponse(new Uint8Array(zipBuffer), { + status: 200, + headers: { + 'Content-Type': 'application/zip', + 'Content-Disposition': `attachment; filename="${zipName}"`, + 'Content-Length': String(zipBuffer.length), + }, + }) + } +) diff --git a/apps/sim/app/api/files/multipart/route.test.ts b/apps/sim/app/api/files/multipart/route.test.ts new file mode 100644 index 00000000000..b70fed81b82 --- /dev/null +++ b/apps/sim/app/api/files/multipart/route.test.ts @@ -0,0 +1,202 @@ +/** + * @vitest-environment node + */ +import { authMockFns, permissionsMock, permissionsMockFns } from '@sim/testing' +import { NextRequest } from 'next/server' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { + mockIsUsingCloudStorage, + mockGetStorageProvider, + mockGetStorageConfig, + mockCompleteS3MultipartUpload, + mockCompleteBlobMultipartUpload, + mockDeriveBlobBlockId, + mockVerifyUploadToken, + mockSignUploadToken, +} = vi.hoisted(() => ({ + mockIsUsingCloudStorage: vi.fn(), + mockGetStorageProvider: vi.fn(), + mockGetStorageConfig: vi.fn(), + mockCompleteS3MultipartUpload: vi.fn(), + mockCompleteBlobMultipartUpload: vi.fn(), + mockDeriveBlobBlockId: vi.fn(), + mockVerifyUploadToken: vi.fn(), + mockSignUploadToken: vi.fn(), +})) + +vi.mock('@/lib/uploads', () => ({ + isUsingCloudStorage: mockIsUsingCloudStorage, + getStorageProvider: mockGetStorageProvider, + getStorageConfig: mockGetStorageConfig, +})) + +vi.mock('@/lib/uploads/core/upload-token', () => ({ + signUploadToken: mockSignUploadToken, + verifyUploadToken: mockVerifyUploadToken, +})) + +vi.mock('@/lib/uploads/providers/s3/client', () => ({ + completeS3MultipartUpload: mockCompleteS3MultipartUpload, + initiateS3MultipartUpload: vi.fn(), + getS3MultipartPartUrls: vi.fn(), + abortS3MultipartUpload: vi.fn(), +})) + +vi.mock('@/lib/uploads/providers/blob/client', () => ({ + completeMultipartUpload: mockCompleteBlobMultipartUpload, + deriveBlobBlockId: mockDeriveBlobBlockId, + initiateMultipartUpload: vi.fn(), + getMultipartPartUrls: vi.fn(), + abortMultipartUpload: vi.fn(), +})) + +vi.mock('@/lib/workspaces/permissions/utils', () => permissionsMock) + +import { POST } from '@/app/api/files/multipart/route' + +const tokenPayload = { + uploadId: 'upload-1', + key: 'workspace/ws-1/123-abc-file.bin', + userId: 'user-1', + workspaceId: 'ws-1', + context: 'workspace' as const, +} + +const makeRequest = (action: string, body: unknown) => + new NextRequest(`http://localhost/api/files/multipart?action=${action}`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(body), + }) + +describe('POST /api/files/multipart action=complete', () => { + beforeEach(() => { + vi.clearAllMocks() + authMockFns.mockGetSession.mockResolvedValue({ user: { id: 'user-1' } }) + permissionsMockFns.mockGetUserEntityPermissions.mockResolvedValue('write') + mockIsUsingCloudStorage.mockReturnValue(true) + mockGetStorageConfig.mockReturnValue({ bucket: 'b', region: 'r' }) + mockVerifyUploadToken.mockReturnValue({ valid: true, payload: tokenPayload }) + mockSignUploadToken.mockReturnValue('signed-token') + mockCompleteS3MultipartUpload.mockResolvedValue({ + location: 'loc', + path: '/api/files/serve/...', + key: tokenPayload.key, + }) + mockCompleteBlobMultipartUpload.mockResolvedValue({ + location: 'loc', + path: '/api/files/serve/...', + key: tokenPayload.key, + }) + mockDeriveBlobBlockId.mockImplementation( + (n: number) => `block-${n.toString().padStart(6, '0')}` + ) + }) + + it('rejects parts without partNumber', async () => { + mockGetStorageProvider.mockReturnValue('s3') + const res = await POST( + makeRequest('complete', { + uploadToken: 'tok', + parts: [{ etag: 'abc' }], + }) + ) + expect(res.status).toBe(400) + expect(mockCompleteS3MultipartUpload).not.toHaveBeenCalled() + }) + + it('S3 path requires etag and forwards { ETag, PartNumber }', async () => { + mockGetStorageProvider.mockReturnValue('s3') + + const missingEtag = await POST( + makeRequest('complete', { + uploadToken: 'tok', + parts: [{ partNumber: 1 }], + }) + ) + expect(missingEtag.status).toBe(500) + + mockCompleteS3MultipartUpload.mockClear() + + const ok = await POST( + makeRequest('complete', { + uploadToken: 'tok', + parts: [ + { partNumber: 1, etag: 'aaa' }, + { partNumber: 2, etag: 'bbb' }, + ], + }) + ) + expect(ok.status).toBe(200) + expect(mockCompleteS3MultipartUpload).toHaveBeenCalledWith( + tokenPayload.key, + tokenPayload.uploadId, + [ + { ETag: 'aaa', PartNumber: 1 }, + { ETag: 'bbb', PartNumber: 2 }, + ], + expect.any(Object) + ) + }) + + it('Blob path derives blockId from partNumber and ignores etag', async () => { + mockGetStorageProvider.mockReturnValue('blob') + mockGetStorageConfig.mockReturnValue({ + containerName: 'c', + accountName: 'a', + accountKey: 'k', + }) + + const res = await POST( + makeRequest('complete', { + uploadToken: 'tok', + parts: [{ partNumber: 1, etag: 'irrelevant' }, { partNumber: 2 }], + }) + ) + + expect(res.status).toBe(200) + expect(mockDeriveBlobBlockId).toHaveBeenCalledWith(1) + expect(mockDeriveBlobBlockId).toHaveBeenCalledWith(2) + expect(mockCompleteBlobMultipartUpload).toHaveBeenCalledWith( + tokenPayload.key, + [ + { partNumber: 1, blockId: 'block-000001' }, + { partNumber: 2, blockId: 'block-000002' }, + ], + expect.objectContaining({ containerName: 'c' }) + ) + }) + + it('returns 403 when token is invalid', async () => { + mockGetStorageProvider.mockReturnValue('s3') + mockVerifyUploadToken.mockReturnValueOnce({ valid: false }) + const res = await POST( + makeRequest('complete', { + uploadToken: 'bad', + parts: [{ partNumber: 1, etag: 'a' }], + }) + ) + expect(res.status).toBe(403) + }) + + it('batch complete normalizes per upload', async () => { + mockGetStorageProvider.mockReturnValue('s3') + const res = await POST( + makeRequest('complete', { + uploads: [ + { + uploadToken: 'tok-a', + parts: [{ partNumber: 1, etag: 'aaa' }], + }, + { + uploadToken: 'tok-b', + parts: [{ partNumber: 1, etag: 'bbb' }], + }, + ], + }) + ) + expect(res.status).toBe(200) + expect(mockCompleteS3MultipartUpload).toHaveBeenCalledTimes(2) + }) +}) diff --git a/apps/sim/app/api/files/multipart/route.ts b/apps/sim/app/api/files/multipart/route.ts index 0b9ddf07c3d..80213d54cbb 100644 --- a/apps/sim/app/api/files/multipart/route.ts +++ b/apps/sim/app/api/files/multipart/route.ts @@ -22,6 +22,7 @@ import { type UploadTokenPayload, verifyUploadToken, } from '@/lib/uploads/core/upload-token' +import type { StorageConfig } from '@/lib/uploads/shared/types' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' const logger = createLogger('MultipartUploadAPI') @@ -39,6 +40,37 @@ const ALLOWED_UPLOAD_CONTEXTS = new Set([ 'workspace-logos', ]) +/** + * Unified part identity sent by the client when completing a multipart upload. + * `etag` is required for S3 (CompleteMultipartUpload). For Azure the server + * derives the block id from `partNumber` via {@link deriveBlobBlockId}. + */ +interface ClientCompletedPart { + partNumber: number + etag?: string +} + +const isClientCompletedParts = (value: unknown): value is ClientCompletedPart[] => + Array.isArray(value) && + value.every( + (p) => + p !== null && + typeof p === 'object' && + typeof (p as ClientCompletedPart).partNumber === 'number' && + ((p as ClientCompletedPart).etag === undefined || + typeof (p as ClientCompletedPart).etag === 'string') + ) + +const buildS3CustomConfig = (config: StorageConfig) => + config.bucket && config.region ? { bucket: config.bucket, region: config.region } : undefined + +const buildBlobCustomConfig = (config: StorageConfig) => ({ + containerName: config.containerName!, + accountName: config.accountName!, + accountKey: config.accountKey, + connectionString: config.connectionString, +}) + const verifyTokenForUser = (token: string | undefined, userId: string) => { if (!token || typeof token !== 'string') { return null @@ -103,12 +135,44 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const config = getStorageConfig(storageContext) + let customKey: string | undefined + if (context === 'workspace') { + const { MAX_WORKSPACE_FILE_SIZE } = await import('@/lib/uploads/shared/types') + if (typeof fileSize === 'number' && fileSize > MAX_WORKSPACE_FILE_SIZE) { + return NextResponse.json( + { error: `File size exceeds maximum of ${MAX_WORKSPACE_FILE_SIZE} bytes` }, + { status: 413 } + ) + } + + const { generateWorkspaceFileKey } = await import( + '@/lib/uploads/contexts/workspace/workspace-file-manager' + ) + customKey = generateWorkspaceFileKey(workspaceId, fileName) + + const { checkStorageQuota } = await import('@/lib/billing/storage') + const quotaCheck = await checkStorageQuota(userId, fileSize) + if (!quotaCheck.allowed) { + return NextResponse.json( + { error: quotaCheck.error || 'Storage limit exceeded' }, + { status: 413 } + ) + } + } + let uploadId: string let key: string if (storageProvider === 's3') { const { initiateS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client') - const result = await initiateS3MultipartUpload({ fileName, contentType, fileSize }) + const result = await initiateS3MultipartUpload({ + fileName, + contentType, + fileSize, + customConfig: buildS3CustomConfig(config), + customKey, + purpose: context, + }) uploadId = result.uploadId key = result.key } else if (storageProvider === 'blob') { @@ -117,12 +181,8 @@ export const POST = withRouteHandler(async (request: NextRequest) => { fileName, contentType, fileSize, - customConfig: { - containerName: config.containerName!, - accountName: config.accountName!, - accountKey: config.accountKey, - connectionString: config.connectionString, - }, + customConfig: buildBlobCustomConfig(config), + customKey, }) uploadId = result.uploadId key = result.key @@ -173,17 +233,21 @@ export const POST = withRouteHandler(async (request: NextRequest) => { if (storageProvider === 's3') { const { getS3MultipartPartUrls } = await import('@/lib/uploads/providers/s3/client') - const presignedUrls = await getS3MultipartPartUrls(key, uploadId, partNumbers) + const presignedUrls = await getS3MultipartPartUrls( + key, + uploadId, + partNumbers, + buildS3CustomConfig(config) + ) return NextResponse.json({ presignedUrls }) } if (storageProvider === 'blob') { const { getMultipartPartUrls } = await import('@/lib/uploads/providers/blob/client') - const presignedUrls = await getMultipartPartUrls(key, partNumbers, { - containerName: config.containerName!, - accountName: config.accountName!, - accountKey: config.accountKey, - connectionString: config.connectionString, - }) + const presignedUrls = await getMultipartPartUrls( + key, + partNumbers, + buildBlobCustomConfig(config) + ) return NextResponse.json({ presignedUrls }) } @@ -207,60 +271,83 @@ export const POST = withRouteHandler(async (request: NextRequest) => { const data: CompleteMultipartBody = parsed.data.body - if ('uploads' in data && Array.isArray(data.uploads)) { - const verified = data.uploads.map((upload) => { - const payload = verifyTokenForUser(upload.uploadToken, userId) - return payload ? { payload, parts: upload.parts } : null - }) + const s3Module = + storageProvider === 's3' ? await import('@/lib/uploads/providers/s3/client') : null + const blobModule = + storageProvider === 'blob' ? await import('@/lib/uploads/providers/blob/client') : null + + const completeOne = async (payload: UploadTokenPayload, parts: ClientCompletedPart[]) => { + const { uploadId, key, context } = payload + const config = getStorageConfig(context) - if (verified.some((entry) => entry === null)) { - return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 }) + if (storageProvider === 's3' && s3Module) { + const { completeS3MultipartUpload } = s3Module + const s3Parts = parts.map((p) => { + if (!p.etag) { + throw new Error(`Missing etag for S3 part ${p.partNumber}`) + } + return { ETag: p.etag, PartNumber: p.partNumber } + }) + const result = await completeS3MultipartUpload( + key, + uploadId, + s3Parts, + buildS3CustomConfig(config) + ) + return { + success: true as const, + location: result.location, + path: result.path, + key: result.key, + } } - const verifiedEntries = verified.filter( - (entry): entry is { payload: UploadTokenPayload; parts: unknown } => entry !== null - ) + if (storageProvider === 'blob' && blobModule) { + const { completeMultipartUpload, deriveBlobBlockId } = blobModule + const blobParts = parts.map((p) => ({ + partNumber: p.partNumber, + blockId: deriveBlobBlockId(p.partNumber), + })) + const result = await completeMultipartUpload( + key, + blobParts, + buildBlobCustomConfig(config) + ) + return { + success: true as const, + location: result.location, + path: result.path, + key: result.key, + } + } - const results = await Promise.all( - verifiedEntries.map(async ({ payload, parts }) => { - const { uploadId, key, context } = payload - const config = getStorageConfig(context) - - if (storageProvider === 's3') { - const { completeS3MultipartUpload } = await import( - '@/lib/uploads/providers/s3/client' - ) - const result = await completeS3MultipartUpload(key, uploadId, parts as any) - return { - success: true, - location: result.location, - path: result.path, - key: result.key, - } - } - if (storageProvider === 'blob') { - const { completeMultipartUpload } = await import( - '@/lib/uploads/providers/blob/client' - ) - const result = await completeMultipartUpload(key, parts as any, { - containerName: config.containerName!, - accountName: config.accountName!, - accountKey: config.accountKey, - connectionString: config.connectionString, - }) - return { - success: true, - location: result.location, - path: result.path, - key: result.key, - } - } + throw new Error(`Unsupported storage provider: ${storageProvider}`) + } - throw new Error(`Unsupported storage provider: ${storageProvider}`) - }) + if ('uploads' in data && Array.isArray(data.uploads)) { + const verified: Array<{ payload: UploadTokenPayload; parts: ClientCompletedPart[] }> = [] + for (const upload of data.uploads) { + const payload = verifyTokenForUser(upload.uploadToken, userId) + if (!payload) { + return NextResponse.json( + { error: 'Invalid or expired upload token' }, + { status: 403 } + ) + } + if (!isClientCompletedParts(upload.parts)) { + return NextResponse.json( + { error: 'Invalid parts payload: expected [{ partNumber, etag? }]' }, + { status: 400 } + ) + } + verified.push({ payload, parts: upload.parts }) + } + + const results = await Promise.all( + verified.map(({ payload, parts }) => completeOne(payload, parts)) ) - logger.info(`Completed ${verifiedEntries.length} multipart uploads`) + logger.info(`Completed ${verified.length} multipart uploads`) return NextResponse.json({ results }) } @@ -269,42 +356,18 @@ export const POST = withRouteHandler(async (request: NextRequest) => { if (!tokenPayload) { return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 }) } - - const { uploadId, key, context } = tokenPayload - const config = getStorageConfig(context) - - if (storageProvider === 's3') { - const { completeS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client') - const result = await completeS3MultipartUpload(key, uploadId, single.parts as any) - logger.info(`Completed S3 multipart upload for key ${key} (context: ${context})`) - return NextResponse.json({ - success: true, - location: result.location, - path: result.path, - key: result.key, - }) - } - if (storageProvider === 'blob') { - const { completeMultipartUpload } = await import('@/lib/uploads/providers/blob/client') - const result = await completeMultipartUpload(key, single.parts as any, { - containerName: config.containerName!, - accountName: config.accountName!, - accountKey: config.accountKey, - connectionString: config.connectionString, - }) - logger.info(`Completed Azure multipart upload for key ${key} (context: ${context})`) - return NextResponse.json({ - success: true, - location: result.location, - path: result.path, - key: result.key, - }) + if (!isClientCompletedParts(single.parts)) { + return NextResponse.json( + { error: 'Invalid parts payload: expected [{ partNumber, etag? }]' }, + { status: 400 } + ) } - return NextResponse.json( - { error: `Unsupported storage provider: ${storageProvider}` }, - { status: 400 } + const result = await completeOne(tokenPayload, single.parts) + logger.info( + `Completed ${storageProvider} multipart upload for key ${tokenPayload.key} (context: ${tokenPayload.context})` ) + return NextResponse.json(result) } case 'abort': { @@ -330,16 +393,11 @@ export const POST = withRouteHandler(async (request: NextRequest) => { if (storageProvider === 's3') { const { abortS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client') - await abortS3MultipartUpload(key, uploadId) + await abortS3MultipartUpload(key, uploadId, buildS3CustomConfig(config)) logger.info(`Aborted S3 multipart upload for key ${key} (context: ${context})`) } else if (storageProvider === 'blob') { const { abortMultipartUpload } = await import('@/lib/uploads/providers/blob/client') - await abortMultipartUpload(key, { - containerName: config.containerName!, - accountName: config.accountName!, - accountKey: config.accountKey, - connectionString: config.connectionString, - }) + await abortMultipartUpload(key, buildBlobCustomConfig(config)) logger.info(`Aborted Azure multipart upload for key ${key} (context: ${context})`) } else { return NextResponse.json( diff --git a/apps/sim/app/api/files/parse/route.test.ts b/apps/sim/app/api/files/parse/route.test.ts index 8a2c06f19ff..e2c032b4718 100644 --- a/apps/sim/app/api/files/parse/route.test.ts +++ b/apps/sim/app/api/files/parse/route.test.ts @@ -10,6 +10,8 @@ import { inputValidationMock, permissionsMock, permissionsMockFns, + storageServiceMock, + storageServiceMockFns, } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -22,8 +24,6 @@ const { mockIsSupportedFileType, mockParseFile, mockParseBuffer, - mockDownloadFile, - mockHasCloudStorage, mockFsAccess, mockFsStat, mockFsReadFile, @@ -47,8 +47,6 @@ const { content: 'parsed buffer content', metadata: { pageCount: 1 }, }), - mockDownloadFile: vi.fn(), - mockHasCloudStorage: vi.fn().mockReturnValue(true), mockFsAccess: vi.fn().mockResolvedValue(undefined), mockFsStat: vi.fn().mockImplementation(() => ({ isFile: () => true })), mockFsReadFile: vi.fn().mockResolvedValue(Buffer.from('test file content')), @@ -79,10 +77,7 @@ vi.mock('@/lib/file-parsers', () => ({ parseBuffer: mockParseBuffer, })) -vi.mock('@/lib/uploads/core/storage-service', () => ({ - downloadFile: mockDownloadFile, - hasCloudStorage: mockHasCloudStorage, -})) +vi.mock('@/lib/uploads/core/storage-service', () => storageServiceMock) vi.mock('path', () => ({ default: actualPath, @@ -176,6 +171,7 @@ describe('File Parse API Route', () => { }) permissionsMockFns.mockGetUserEntityPermissions.mockResolvedValue({ canView: true }) + storageServiceMockFns.mockHasCloudStorage.mockReturnValue(true) mockIsSupportedFileType.mockReturnValue(true) mockParseFile.mockResolvedValue({ content: 'parsed content', @@ -325,8 +321,8 @@ describe('File Parse API Route', () => { authenticated: true, }) - mockDownloadFile.mockRejectedValue(new Error('Access denied')) - mockHasCloudStorage.mockReturnValue(true) + storageServiceMockFns.mockDownloadFile.mockRejectedValue(new Error('Access denied')) + storageServiceMockFns.mockHasCloudStorage.mockReturnValue(true) const req = new NextRequest('http://localhost:3000/api/files/parse', { method: 'POST', diff --git a/apps/sim/app/api/files/presigned/route.test.ts b/apps/sim/app/api/files/presigned/route.test.ts index f6641c07d9f..6ae6a10ed5e 100644 --- a/apps/sim/app/api/files/presigned/route.test.ts +++ b/apps/sim/app/api/files/presigned/route.test.ts @@ -4,7 +4,7 @@ * @vitest-environment node */ -import { authMockFns } from '@sim/testing' +import { authMockFns, storageServiceMock, storageServiceMockFns } from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -16,9 +16,6 @@ const { mockGetStorageConfig, mockIsUsingCloudStorage, mockGetStorageProvider, - mockHasCloudStorage, - mockGeneratePresignedUploadUrl, - mockGeneratePresignedDownloadUrl, mockValidateFileType, mockGenerateCopilotUploadUrl, mockIsImageFileType, @@ -32,9 +29,6 @@ const { mockGetStorageConfig: vi.fn(), mockIsUsingCloudStorage: vi.fn(), mockGetStorageProvider: vi.fn(), - mockHasCloudStorage: vi.fn(), - mockGeneratePresignedUploadUrl: vi.fn(), - mockGeneratePresignedDownloadUrl: vi.fn().mockResolvedValue('https://example.com/presigned-url'), mockValidateFileType: vi.fn().mockReturnValue(null), mockGenerateCopilotUploadUrl: vi.fn().mockResolvedValue({ url: 'https://example.com/presigned-url', @@ -63,11 +57,7 @@ vi.mock('@/lib/uploads/config', () => ({ getStorageProvider: mockGetStorageProvider, })) -vi.mock('@/lib/uploads/core/storage-service', () => ({ - hasCloudStorage: mockHasCloudStorage, - generatePresignedUploadUrl: mockGeneratePresignedUploadUrl, - generatePresignedDownloadUrl: mockGeneratePresignedDownloadUrl, -})) +vi.mock('@/lib/uploads/core/storage-service', () => storageServiceMock) vi.mock('@/lib/uploads/utils/validation', () => ({ validateFileType: mockValidateFileType, @@ -132,8 +122,8 @@ function setupFileApiMocks( storageProvider === 'blob' ? 'Azure Blob' : storageProvider === 's3' ? 'S3' : 'Local' ) - mockHasCloudStorage.mockReturnValue(cloudEnabled) - mockGeneratePresignedUploadUrl.mockImplementation( + storageServiceMockFns.mockHasCloudStorage.mockReturnValue(cloudEnabled) + storageServiceMockFns.mockGeneratePresignedUploadUrl.mockImplementation( async (opts: { fileName: string; context: string }) => { const timestamp = Date.now() const safeFileName = opts.fileName.replace(/[^a-zA-Z0-9.-]/g, '_') @@ -144,7 +134,9 @@ function setupFileApiMocks( } } ) - mockGeneratePresignedDownloadUrl.mockResolvedValue('https://example.com/presigned-url') + storageServiceMockFns.mockGeneratePresignedDownloadUrl.mockResolvedValue( + 'https://example.com/presigned-url' + ) mockValidateFileType.mockReturnValue(null) @@ -431,7 +423,7 @@ describe('/api/files/presigned', () => { storageProvider: 's3', }) - mockGeneratePresignedUploadUrl.mockRejectedValue( + storageServiceMockFns.mockGeneratePresignedUploadUrl.mockRejectedValue( new Error('Unknown storage provider: unknown') ) @@ -458,7 +450,9 @@ describe('/api/files/presigned', () => { storageProvider: 's3', }) - mockGeneratePresignedUploadUrl.mockRejectedValue(new Error('S3 service unavailable')) + storageServiceMockFns.mockGeneratePresignedUploadUrl.mockRejectedValue( + new Error('S3 service unavailable') + ) const request = new NextRequest('http://localhost:3000/api/files/presigned?type=chat', { method: 'POST', @@ -483,7 +477,9 @@ describe('/api/files/presigned', () => { storageProvider: 'blob', }) - mockGeneratePresignedUploadUrl.mockRejectedValue(new Error('Azure service unavailable')) + storageServiceMockFns.mockGeneratePresignedUploadUrl.mockRejectedValue( + new Error('Azure service unavailable') + ) const request = new NextRequest('http://localhost:3000/api/files/presigned?type=chat', { method: 'POST', diff --git a/apps/sim/app/api/files/serve/[...path]/route.test.ts b/apps/sim/app/api/files/serve/[...path]/route.test.ts index 17b7a8d2fda..f0e7738c8d0 100644 --- a/apps/sim/app/api/files/serve/[...path]/route.test.ts +++ b/apps/sim/app/api/files/serve/[...path]/route.test.ts @@ -3,7 +3,7 @@ * * @vitest-environment node */ -import { hybridAuthMockFns } from '@sim/testing' +import { hybridAuthMockFns, storageServiceMock, storageServiceMockFns } from '@sim/testing' import { NextRequest } from 'next/server' import { beforeEach, describe, expect, it, vi } from 'vitest' @@ -11,7 +11,6 @@ const { mockVerifyFileAccess, mockReadFile, mockIsUsingCloudStorage, - mockDownloadFile, mockDownloadCopilotFile, mockInferContextFromKey, mockGetContentType, @@ -30,7 +29,6 @@ const { mockVerifyFileAccess: vi.fn(), mockReadFile: vi.fn(), mockIsUsingCloudStorage: vi.fn(), - mockDownloadFile: vi.fn(), mockDownloadCopilotFile: vi.fn(), mockInferContextFromKey: vi.fn(), mockGetContentType: vi.fn(), @@ -58,10 +56,7 @@ vi.mock('@/lib/uploads', () => ({ isUsingCloudStorage: mockIsUsingCloudStorage, })) -vi.mock('@/lib/uploads/core/storage-service', () => ({ - downloadFile: mockDownloadFile, - hasCloudStorage: vi.fn().mockReturnValue(true), -})) +vi.mock('@/lib/uploads/core/storage-service', () => storageServiceMock) vi.mock('@/lib/uploads/utils/file-utils', () => ({ inferContextFromKey: mockInferContextFromKey, @@ -104,6 +99,7 @@ describe('File Serve API Route', () => { mockVerifyFileAccess.mockResolvedValue(true) mockReadFile.mockResolvedValue(Buffer.from('test content')) mockIsUsingCloudStorage.mockReturnValue(false) + storageServiceMockFns.mockHasCloudStorage.mockReturnValue(true) mockInferContextFromKey.mockReturnValue('workspace') mockGetContentType.mockReturnValue('text/plain') mockFindLocalFile.mockReturnValue('/test/uploads/test-file.txt') @@ -161,7 +157,7 @@ describe('File Serve API Route', () => { it('should serve cloud file by downloading and proxying', async () => { mockIsUsingCloudStorage.mockReturnValue(true) - mockDownloadFile.mockResolvedValue(Buffer.from('test cloud file content')) + storageServiceMockFns.mockDownloadFile.mockResolvedValue(Buffer.from('test cloud file content')) mockGetContentType.mockReturnValue('image/png') const req = new NextRequest( @@ -174,7 +170,7 @@ describe('File Serve API Route', () => { expect(response.status).toBe(200) expect(response.headers.get('Content-Type')).toBe('image/png') - expect(mockDownloadFile).toHaveBeenCalledWith({ + expect(storageServiceMockFns.mockDownloadFile).toHaveBeenCalledWith({ key: 'workspace/test-workspace-id/1234567890-image.png', context: 'workspace', }) diff --git a/apps/sim/app/api/files/upload/route.test.ts b/apps/sim/app/api/files/upload/route.test.ts index 8e9ff1dbe8b..f0ef4ede98b 100644 --- a/apps/sim/app/api/files/upload/route.test.ts +++ b/apps/sim/app/api/files/upload/route.test.ts @@ -3,7 +3,14 @@ * * @vitest-environment node */ -import { authMockFns, hybridAuthMockFns, permissionsMock, permissionsMockFns } from '@sim/testing' +import { + authMockFns, + hybridAuthMockFns, + permissionsMock, + permissionsMockFns, + storageServiceMock, + storageServiceMockFns, +} from '@sim/testing' import { NextRequest } from 'next/server' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -16,8 +23,6 @@ const mocks = vi.hoisted(() => { const mockGetStorageProvider = vi.fn() const mockIsUsingCloudStorage = vi.fn() const mockUploadFile = vi.fn() - const mockHasCloudStorage = vi.fn() - const mockStorageUploadFile = vi.fn() return { mockVerifyFileAccess, @@ -28,8 +33,6 @@ const mocks = vi.hoisted(() => { mockGetStorageProvider, mockIsUsingCloudStorage, mockUploadFile, - mockHasCloudStorage, - mockStorageUploadFile, } }) @@ -85,10 +88,7 @@ vi.mock('@/lib/uploads', () => ({ uploadFile: mocks.mockUploadFile, })) -vi.mock('@/lib/uploads/core/storage-service', () => ({ - uploadFile: mocks.mockStorageUploadFile, - hasCloudStorage: mocks.mockHasCloudStorage, -})) +vi.mock('@/lib/uploads/core/storage-service', () => storageServiceMock) vi.mock('@/lib/uploads/setup.server', () => ({ UPLOAD_DIR_SERVER: '/tmp/test-uploads', @@ -153,8 +153,8 @@ function setupFileApiMocks( type: 'text/plain', }) - mocks.mockHasCloudStorage.mockReturnValue(cloudEnabled) - mocks.mockStorageUploadFile.mockResolvedValue({ + storageServiceMockFns.mockHasCloudStorage.mockReturnValue(cloudEnabled) + storageServiceMockFns.mockUploadFile.mockResolvedValue({ key: 'test-key', path: '/test/path', }) @@ -325,8 +325,8 @@ describe('File Upload Security Tests', () => { user: { id: 'test-user-id' }, }) - mocks.mockHasCloudStorage.mockReturnValue(false) - mocks.mockStorageUploadFile.mockResolvedValue({ + storageServiceMockFns.mockHasCloudStorage.mockReturnValue(false) + storageServiceMockFns.mockUploadFile.mockResolvedValue({ key: 'test-key', path: '/test/path', }) diff --git a/apps/sim/app/api/memory/[id]/route.ts b/apps/sim/app/api/memory/[id]/route.ts index 20d0a71cd0a..2928fd8b42c 100644 --- a/apps/sim/app/api/memory/[id]/route.ts +++ b/apps/sim/app/api/memory/[id]/route.ts @@ -1,7 +1,7 @@ import { db } from '@sim/db' import { memory } from '@sim/db/schema' import { createLogger } from '@sim/logger' -import { and, eq } from 'drizzle-orm' +import { and, eq, isNull } from 'drizzle-orm' import { type NextRequest, NextResponse } from 'next/server' import { agentMemoryDataSchemaContract, @@ -75,7 +75,13 @@ export const GET = withRouteHandler(async (request: NextRequest, context: Memory const memories = await db .select() .from(memory) - .where(and(eq(memory.key, id), eq(memory.workspaceId, validatedWorkspaceId))) + .where( + and( + eq(memory.key, id), + eq(memory.workspaceId, validatedWorkspaceId), + isNull(memory.deletedAt) + ) + ) .orderBy(memory.createdAt) .limit(1) @@ -125,7 +131,13 @@ export const DELETE = withRouteHandler( const existingMemory = await db .select({ id: memory.id }) .from(memory) - .where(and(eq(memory.key, id), eq(memory.workspaceId, validatedWorkspaceId))) + .where( + and( + eq(memory.key, id), + eq(memory.workspaceId, validatedWorkspaceId), + isNull(memory.deletedAt) + ) + ) .limit(1) if (existingMemory.length === 0) { @@ -134,7 +146,13 @@ export const DELETE = withRouteHandler( await db .delete(memory) - .where(and(eq(memory.key, id), eq(memory.workspaceId, validatedWorkspaceId))) + .where( + and( + eq(memory.key, id), + eq(memory.workspaceId, validatedWorkspaceId), + isNull(memory.deletedAt) + ) + ) logger.info(`[${requestId}] Memory deleted: ${id} for workspace: ${validatedWorkspaceId}`) return NextResponse.json( @@ -177,7 +195,13 @@ export const PUT = withRouteHandler(async (request: NextRequest, context: Memory const existingMemories = await db .select() .from(memory) - .where(and(eq(memory.key, id), eq(memory.workspaceId, validatedWorkspaceId))) + .where( + and( + eq(memory.key, id), + eq(memory.workspaceId, validatedWorkspaceId), + isNull(memory.deletedAt) + ) + ) .limit(1) if (existingMemories.length === 0) { @@ -196,12 +220,24 @@ export const PUT = withRouteHandler(async (request: NextRequest, context: Memory await db .update(memory) .set({ data: validatedData, updatedAt: now }) - .where(and(eq(memory.key, id), eq(memory.workspaceId, validatedWorkspaceId))) + .where( + and( + eq(memory.key, id), + eq(memory.workspaceId, validatedWorkspaceId), + isNull(memory.deletedAt) + ) + ) const updatedMemories = await db .select() .from(memory) - .where(and(eq(memory.key, id), eq(memory.workspaceId, validatedWorkspaceId))) + .where( + and( + eq(memory.key, id), + eq(memory.workspaceId, validatedWorkspaceId), + isNull(memory.deletedAt) + ) + ) .limit(1) const mem = updatedMemories[0] diff --git a/apps/sim/app/api/memory/route.ts b/apps/sim/app/api/memory/route.ts index 4ad47e108b2..53b9340f3c6 100644 --- a/apps/sim/app/api/memory/route.ts +++ b/apps/sim/app/api/memory/route.ts @@ -293,7 +293,13 @@ export const DELETE = withRouteHandler(async (request: NextRequest) => { const result = await db .delete(memory) - .where(and(eq(memory.key, conversationId), eq(memory.workspaceId, workspaceId))) + .where( + and( + eq(memory.key, conversationId), + eq(memory.workspaceId, workspaceId), + isNull(memory.deletedAt) + ) + ) .returning({ id: memory.id }) const deletedCount = result.length diff --git a/apps/sim/app/api/table/[tableId]/cancel-runs/route.ts b/apps/sim/app/api/table/[tableId]/cancel-runs/route.ts new file mode 100644 index 00000000000..be89633d7e9 --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/cancel-runs/route.ts @@ -0,0 +1,57 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { cancelTableRunsContract } from '@/lib/api/contracts/tables' +import { parseRequest } from '@/lib/api/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' +import { cancelWorkflowGroupRuns } from '@/lib/table/workflow-columns' +import { accessError, checkAccess } from '@/app/api/table/utils' + +const logger = createLogger('TableCancelRunsAPI') + +interface RouteParams { + params: Promise<{ tableId: string }> +} + +/** + * POST /api/table/[tableId]/cancel-runs + * + * Cancels in-flight and pending workflow-column runs for this table. Scopes: + * `all` (every cell) or `row` (every cell for `rowId`). + */ +export const POST = withRouteHandler(async (request: NextRequest, { params }: RouteParams) => { + const requestId = generateRequestId() + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const parsed = await parseRequest(cancelTableRunsContract, request, { params }) + if (!parsed.success) return parsed.response + const { tableId } = parsed.data.params + const { workspaceId, scope, rowId } = parsed.data.body + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + const { table } = result + + if (table.workspaceId !== workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const cancelled = await cancelWorkflowGroupRuns(tableId, scope === 'row' ? rowId : undefined) + logger.info( + `[${requestId}] cancel-runs: tableId=${tableId} scope=${scope}${ + rowId ? ` rowId=${rowId}` : '' + } cancelled=${cancelled}` + ) + + return NextResponse.json({ success: true, data: { cancelled } }) + } catch (error) { + logger.error(`[${requestId}] cancel-runs failed:`, error) + return NextResponse.json({ error: 'Failed to cancel runs' }, { status: 500 }) + } +}) diff --git a/apps/sim/app/api/table/[tableId]/groups/[groupId]/run/route.ts b/apps/sim/app/api/table/[tableId]/groups/[groupId]/run/route.ts new file mode 100644 index 00000000000..80f80bb7945 --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/groups/[groupId]/run/route.ts @@ -0,0 +1,67 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { runWorkflowGroupContract } from '@/lib/api/contracts/tables' +import { parseRequest } from '@/lib/api/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' +import { triggerWorkflowGroupRun } from '@/lib/table/workflow-columns' +import { accessError, checkAccess } from '@/app/api/table/utils' + +const logger = createLogger('TableRunGroupAPI') + +interface RouteParams { + params: Promise<{ tableId: string; groupId: string }> +} + +/** + * POST /api/table/[tableId]/groups/[groupId]/run + * + * Manually triggers the workflow group for every eligible row in the table. + * Each eligible row's `executions[groupId]` is reset to `pending` so the + * scheduler picks it up and enqueues a per-cell trigger.dev job. Rows whose + * deps aren't satisfied or whose group is already running are skipped. + */ +export const POST = withRouteHandler(async (request: NextRequest, { params }: RouteParams) => { + const requestId = generateRequestId() + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const parsed = await parseRequest(runWorkflowGroupContract, request, { params }) + if (!parsed.success) return parsed.response + const { tableId, groupId } = parsed.data.params + const { workspaceId, runMode, rowIds } = parsed.data.body + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + const { table } = result + + if (table.workspaceId !== workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const { triggered } = await triggerWorkflowGroupRun({ + tableId, + groupId, + workspaceId, + mode: runMode, + requestId, + rowIds, + }) + + return NextResponse.json({ success: true, data: { triggered } }) + } catch (error) { + if (error instanceof Error && error.message === 'Workflow group not found') { + return NextResponse.json({ error: 'Workflow group not found' }, { status: 404 }) + } + if (error instanceof Error && error.message === 'Invalid workspace ID') { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + logger.error(`run-group failed:`, error) + return NextResponse.json({ error: 'Failed to run group' }, { status: 500 }) + } +}) diff --git a/apps/sim/app/api/table/[tableId]/groups/route.ts b/apps/sim/app/api/table/[tableId]/groups/route.ts new file mode 100644 index 00000000000..847647fc397 --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/groups/route.ts @@ -0,0 +1,159 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { + addWorkflowGroupContract, + deleteWorkflowGroupContract, + updateWorkflowGroupContract, +} from '@/lib/api/contracts/tables' +import { parseRequest } from '@/lib/api/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' +import { addWorkflowGroup, deleteWorkflowGroup, updateWorkflowGroup } from '@/lib/table/service' +import { accessError, checkAccess, normalizeColumn } from '@/app/api/table/utils' + +const logger = createLogger('TableWorkflowGroupsAPI') + +interface RouteParams { + params: Promise<{ tableId: string }> +} + +/** + * Maps known service-layer error messages onto HTTP responses; falls through + * to a 500 with a generic message for anything unrecognized. The three + * group-route handlers all surface the same error shapes from + * `addWorkflowGroup` / `updateWorkflowGroup` / `deleteWorkflowGroup`, so they + * share this mapper instead of repeating the if-chain three times. + */ +function mapWorkflowGroupError(error: unknown, fallbackMessage: string): NextResponse { + if (error instanceof Error) { + const msg = error.message + if (msg === 'Table not found' || msg.includes('not found')) { + return NextResponse.json({ error: msg }, { status: 404 }) + } + if ( + msg.includes('Schema validation') || + msg.includes('Missing column definition') || + msg.includes('already exists') || + msg.includes('exceed') + ) { + return NextResponse.json({ error: msg }, { status: 400 }) + } + } + logger.error(fallbackMessage, error) + return NextResponse.json({ error: fallbackMessage }, { status: 500 }) +} + +/** POST /api/table/[tableId]/groups — create a workflow group + its output columns. */ +export const POST = withRouteHandler(async (request: NextRequest, { params }: RouteParams) => { + const requestId = generateRequestId() + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + const parsed = await parseRequest(addWorkflowGroupContract, request, { params }) + if (!parsed.success) return parsed.response + const { tableId } = parsed.data.params + const validated = parsed.data.body + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + if (result.table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + const updatedTable = await addWorkflowGroup( + { + tableId, + group: validated.group, + outputColumns: validated.outputColumns, + autoRun: validated.autoRun, + }, + requestId + ) + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + workflowGroups: updatedTable.schema.workflowGroups ?? [], + }, + }) + } catch (error) { + return mapWorkflowGroupError(error, 'Failed to add workflow group') + } +}) + +/** PATCH /api/table/[tableId]/groups — update a workflow group (deps / outputs). */ +export const PATCH = withRouteHandler(async (request: NextRequest, { params }: RouteParams) => { + const requestId = generateRequestId() + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + const parsed = await parseRequest(updateWorkflowGroupContract, request, { params }) + if (!parsed.success) return parsed.response + const { tableId } = parsed.data.params + const validated = parsed.data.body + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + if (result.table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + const updatedTable = await updateWorkflowGroup( + { + tableId, + groupId: validated.groupId, + ...(validated.workflowId !== undefined ? { workflowId: validated.workflowId } : {}), + ...(validated.name !== undefined ? { name: validated.name } : {}), + ...(validated.dependencies !== undefined ? { dependencies: validated.dependencies } : {}), + ...(validated.outputs !== undefined ? { outputs: validated.outputs } : {}), + ...(validated.newOutputColumns !== undefined + ? { newOutputColumns: validated.newOutputColumns } + : {}), + }, + requestId + ) + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + workflowGroups: updatedTable.schema.workflowGroups ?? [], + }, + }) + } catch (error) { + return mapWorkflowGroupError(error, 'Failed to update workflow group') + } +}) + +/** DELETE /api/table/[tableId]/groups — remove a workflow group + its columns. */ +export const DELETE = withRouteHandler(async (request: NextRequest, { params }: RouteParams) => { + const requestId = generateRequestId() + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + const parsed = await parseRequest(deleteWorkflowGroupContract, request, { params }) + if (!parsed.success) return parsed.response + const { tableId } = parsed.data.params + const validated = parsed.data.body + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + if (result.table.workspaceId !== validated.workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + const updatedTable = await deleteWorkflowGroup( + { tableId, groupId: validated.groupId }, + requestId + ) + return NextResponse.json({ + success: true, + data: { + columns: updatedTable.schema.columns.map(normalizeColumn), + workflowGroups: updatedTable.schema.workflowGroups ?? [], + }, + }) + } catch (error) { + return mapWorkflowGroupError(error, 'Failed to delete workflow group') + } +}) diff --git a/apps/sim/app/api/table/[tableId]/route.ts b/apps/sim/app/api/table/[tableId]/route.ts index 2d87ed39553..64e2600bb94 100644 --- a/apps/sim/app/api/table/[tableId]/route.ts +++ b/apps/sim/app/api/table/[tableId]/route.ts @@ -54,6 +54,7 @@ export const GET = withRouteHandler(async (request: NextRequest, { params }: Tab description: table.description, schema: { columns: schemaData.columns.map(normalizeColumn), + ...(schemaData.workflowGroups ? { workflowGroups: schemaData.workflowGroups } : {}), }, metadata: table.metadata ?? null, rowCount: table.rowCount, diff --git a/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts b/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts index 6d9e319f868..9a4a988bc25 100644 --- a/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts +++ b/apps/sim/app/api/table/[tableId]/rows/[rowId]/route.ts @@ -133,6 +133,9 @@ export const PATCH = withRouteHandler(async (request: NextRequest, context: RowR table, requestId ) + // Only `null` when a `cancellationGuard` is supplied and the SQL guard + // rejects the write — this route doesn't pass one, so reaching null is a bug. + if (!updatedRow) throw new Error('updateRow returned null without a cancellationGuard') return NextResponse.json({ success: true, diff --git a/apps/sim/app/api/table/[tableId]/rows/[rowId]/run-workflow-group/route.ts b/apps/sim/app/api/table/[tableId]/rows/[rowId]/run-workflow-group/route.ts new file mode 100644 index 00000000000..aee786d226d --- /dev/null +++ b/apps/sim/app/api/table/[tableId]/rows/[rowId]/run-workflow-group/route.ts @@ -0,0 +1,97 @@ +import { createLogger } from '@sim/logger' +import { generateId } from '@sim/utils/id' +import { type NextRequest, NextResponse } from 'next/server' +import { runRowWorkflowGroupContract } from '@/lib/api/contracts/tables' +import { parseRequest } from '@/lib/api/server' +import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid' +import { generateRequestId } from '@/lib/core/utils/request' +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' +import type { RowExecutionMetadata } from '@/lib/table' +import { updateRow } from '@/lib/table' +import { accessError, checkAccess } from '@/app/api/table/utils' + +const logger = createLogger('TableRunWorkflowGroupAPI') + +interface RouteParams { + params: Promise<{ tableId: string; rowId: string }> +} + +/** + * POST /api/table/[tableId]/rows/[rowId]/run-workflow-group + * + * Manually (re-)runs a workflow group for a single row by force-resetting + * `executions[groupId]` to `pending`. The `updateRow` call fires the + * scheduler which enqueues the cell job. + */ +export const POST = withRouteHandler(async (request: NextRequest, { params }: RouteParams) => { + const requestId = generateRequestId() + + try { + const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false }) + if (!authResult.success || !authResult.userId) { + return NextResponse.json({ error: 'Authentication required' }, { status: 401 }) + } + + const parsed = await parseRequest(runRowWorkflowGroupContract, request, { params }) + if (!parsed.success) return parsed.response + const { tableId, rowId } = parsed.data.params + const { workspaceId, groupId } = parsed.data.body + + const result = await checkAccess(tableId, authResult.userId, 'write') + if (!result.ok) return accessError(result, requestId, tableId) + const { table } = result + + if (table.workspaceId !== workspaceId) { + return NextResponse.json({ error: 'Invalid workspace ID' }, { status: 400 }) + } + + const group = (table.schema.workflowGroups ?? []).find((g) => g.id === groupId) + if (!group) { + return NextResponse.json({ error: 'Workflow group not found' }, { status: 404 }) + } + + const executionId = generateId() + const pendingExec: RowExecutionMetadata = { + status: 'pending', + executionId, + jobId: null, + workflowId: group.workflowId, + error: null, + } + /** + * Clear the group's output cells so the rerun starts visually fresh — + * otherwise stale values from the previous run linger in the UI until the + * new run writes new ones (or doesn't, on error/router-skip). + */ + const clearedData = Object.fromEntries(group.outputs.map((o) => [o.columnName, null])) + const updated = await updateRow( + { + tableId, + rowId, + data: clearedData, + workspaceId, + executionsPatch: { [groupId]: pendingExec }, + }, + table, + requestId + ) + if (updated === null) { + // The cell-task cancellation guard rejected the write — typically a + // racing stop click that already wrote `cancelled` for this run. + // Surface 409 so the caller doesn't poll indefinitely for a run that + // was never enqueued. + return NextResponse.json( + { error: 'Run was cancelled before it could be scheduled' }, + { status: 409 } + ) + } + + return NextResponse.json({ success: true, data: { executionId } }) + } catch (error) { + if (error instanceof Error && error.message === 'Row not found') { + return NextResponse.json({ error: 'Row not found' }, { status: 404 }) + } + logger.error(`run-workflow-group failed:`, error) + return NextResponse.json({ error: 'Failed to run workflow group' }, { status: 500 }) + } +}) diff --git a/apps/sim/app/api/table/[tableId]/rows/route.ts b/apps/sim/app/api/table/[tableId]/rows/route.ts index 4e107e82ea6..8c69ef55a38 100644 --- a/apps/sim/app/api/table/[tableId]/rows/route.ts +++ b/apps/sim/app/api/table/[tableId]/rows/route.ts @@ -277,6 +277,7 @@ export const GET = withRouteHandler( .select({ id: userTableRows.id, data: userTableRows.data, + executions: userTableRows.executions, position: userTableRows.position, createdAt: userTableRows.createdAt, updatedAt: userTableRows.updatedAt, @@ -317,6 +318,7 @@ export const GET = withRouteHandler( rows: rows.map((r) => ({ id: r.id, data: r.data, + executions: r.executions ?? {}, position: r.position, createdAt: r.createdAt instanceof Date ? r.createdAt.toISOString() : String(r.createdAt), diff --git a/apps/sim/app/api/table/utils.ts b/apps/sim/app/api/table/utils.ts index e80c9dbf0be..7db4ec31732 100644 --- a/apps/sim/app/api/table/utils.ts +++ b/apps/sim/app/api/table/utils.ts @@ -173,5 +173,6 @@ export function normalizeColumn(col: ColumnDefinition): ColumnDefinition { type: col.type, required: col.required ?? false, unique: col.unique ?? false, + ...(col.workflowGroupId ? { workflowGroupId: col.workflowGroupId } : {}), } } diff --git a/apps/sim/app/api/tools/file/manage/route.ts b/apps/sim/app/api/tools/file/manage/route.ts index 01048b92950..3880e587bcc 100644 --- a/apps/sim/app/api/tools/file/manage/route.ts +++ b/apps/sim/app/api/tools/file/manage/route.ts @@ -7,7 +7,7 @@ import { acquireLock, releaseLock } from '@/lib/core/config/redis' import { ensureAbsoluteUrl } from '@/lib/core/utils/urls' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { - downloadWorkspaceFile, + fetchWorkspaceFileBuffer, getWorkspaceFileByName, updateWorkspaceFileContent, uploadWorkspaceFile, @@ -91,7 +91,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => { } try { - const existingBuffer = await downloadWorkspaceFile(existing) + const existingBuffer = await fetchWorkspaceFileBuffer(existing) const finalContent = existingBuffer.toString('utf-8') + content const fileBuffer = Buffer.from(finalContent, 'utf-8') await updateWorkspaceFileContent(workspaceId, existing.id, userId, fileBuffer) diff --git a/apps/sim/app/api/tools/stt/route.ts b/apps/sim/app/api/tools/stt/route.ts index 4ff71c05cc0..3779a6b2982 100644 --- a/apps/sim/app/api/tools/stt/route.ts +++ b/apps/sim/app/api/tools/stt/route.ts @@ -20,6 +20,7 @@ import { import type { TranscriptSegment } from '@/tools/stt/types' const logger = createLogger('SttProxyAPI') +const ELEVENLABS_STT_MODEL = 'scribe_v2' export const dynamic = 'force-dynamic' export const maxDuration = 300 // 5 minutes for large files @@ -222,13 +223,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => { duration = result.duration confidence = result.confidence } else if (provider === 'elevenlabs') { - const result = await transcribeWithElevenLabs( - audioBuffer, - apiKey, - language, - timestamps, - model - ) + const result = await transcribeWithElevenLabs(audioBuffer, apiKey, language, timestamps) transcript = result.transcript segments = result.segments detectedLanguage = result.language @@ -470,8 +465,7 @@ async function transcribeWithElevenLabs( audioBuffer: Buffer, apiKey: string, language?: string, - timestamps?: 'none' | 'sentence' | 'word', - model?: string + timestamps?: 'none' | 'sentence' | 'word' ): Promise<{ transcript: string segments?: TranscriptSegment[] @@ -481,7 +475,7 @@ async function transcribeWithElevenLabs( const formData = new FormData() const blob = new Blob([new Uint8Array(audioBuffer)], { type: 'audio/mpeg' }) formData.append('file', blob, 'audio.mp3') - formData.append('model_id', model || 'scribe_v1') + formData.append('model_id', ELEVENLABS_STT_MODEL) if (language && language !== 'auto') { formData.append('language_code', language) diff --git a/apps/sim/app/api/v1/files/[fileId]/route.ts b/apps/sim/app/api/v1/files/[fileId]/route.ts index 0a7e4b6020c..a2e4c029d1d 100644 --- a/apps/sim/app/api/v1/files/[fileId]/route.ts +++ b/apps/sim/app/api/v1/files/[fileId]/route.ts @@ -7,7 +7,7 @@ import { generateRequestId } from '@/lib/core/utils/request' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { deleteWorkspaceFile, - downloadWorkspaceFile, + fetchWorkspaceFileBuffer, getWorkspaceFile, } from '@/lib/uploads/contexts/workspace' import { @@ -50,7 +50,7 @@ export const GET = withRouteHandler(async (request: NextRequest, context: FileRo return NextResponse.json({ error: 'File not found' }, { status: 404 }) } - const buffer = await downloadWorkspaceFile(fileRecord) + const buffer = await fetchWorkspaceFileBuffer(fileRecord) return new Response(new Uint8Array(buffer), { status: 200, diff --git a/apps/sim/app/api/v1/tables/[tableId]/rows/[rowId]/route.ts b/apps/sim/app/api/v1/tables/[tableId]/rows/[rowId]/route.ts index 65625720c31..810bb0dfc65 100644 --- a/apps/sim/app/api/v1/tables/[tableId]/rows/[rowId]/route.ts +++ b/apps/sim/app/api/v1/tables/[tableId]/rows/[rowId]/route.ts @@ -139,6 +139,11 @@ export const PATCH = withRouteHandler(async (request: NextRequest, context: RowR table, requestId ) + // No `cancellationGuard` is passed here, so `updateRow` can't return null + // from this caller. Defensive narrowing for TypeScript. + if (!updatedRow) { + return NextResponse.json({ error: 'Row not found' }, { status: 404 }) + } return NextResponse.json({ success: true, diff --git a/apps/sim/app/api/workflows/[id]/executions/[executionId]/cancel/route.test.ts b/apps/sim/app/api/workflows/[id]/executions/[executionId]/cancel/route.test.ts index 6a9808f1b0e..c7b86847f0b 100644 --- a/apps/sim/app/api/workflows/[id]/executions/[executionId]/cancel/route.test.ts +++ b/apps/sim/app/api/workflows/[id]/executions/[executionId]/cancel/route.test.ts @@ -5,6 +5,7 @@ import { databaseMock, hybridAuthMockFns, + posthogServerMock, workflowAuthzMockFns, workflowsUtilsMock, } from '@sim/testing' @@ -43,9 +44,7 @@ vi.mock('@/lib/workflows/executor/human-in-the-loop-manager', () => ({ vi.mock('@/lib/workflows/utils', () => workflowsUtilsMock) -vi.mock('@/lib/posthog/server', () => ({ - captureServerEvent: vi.fn(), -})) +vi.mock('@/lib/posthog/server', () => posthogServerMock) vi.mock('@/lib/execution/event-buffer', () => ({ setExecutionMeta: (...args: unknown[]) => mockSetExecutionMeta(...args), diff --git a/apps/sim/app/api/workspaces/[id]/files/[fileId]/compiled-check/route.ts b/apps/sim/app/api/workspaces/[id]/files/[fileId]/compiled-check/route.ts index a6d54e8983f..7324c915c20 100644 --- a/apps/sim/app/api/workspaces/[id]/files/[fileId]/compiled-check/route.ts +++ b/apps/sim/app/api/workspaces/[id]/files/[fileId]/compiled-check/route.ts @@ -8,7 +8,7 @@ import { withRouteHandler } from '@/lib/core/utils/with-route-handler' import { BINARY_DOC_TASKS, MAX_DOCUMENT_PREVIEW_CODE_BYTES } from '@/lib/execution/constants' import { runSandboxTask, SandboxUserCodeError } from '@/lib/execution/sandbox/run-task' import { validateMermaidSource } from '@/lib/mermaid/validate' -import { downloadWorkspaceFile, getWorkspaceFile } from '@/lib/uploads/contexts/workspace' +import { fetchWorkspaceFileBuffer, getWorkspaceFile } from '@/lib/uploads/contexts/workspace' import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' export const dynamic = 'force-dynamic' @@ -62,7 +62,7 @@ export const GET = withRouteHandler( let buffer: Buffer try { - buffer = await downloadWorkspaceFile(fileRecord) + buffer = await fetchWorkspaceFileBuffer(fileRecord) } catch (err) { logger.error('Failed to download file for compiled check', { fileId, diff --git a/apps/sim/app/api/workspaces/[id]/files/[fileId]/style/route.ts b/apps/sim/app/api/workspaces/[id]/files/[fileId]/style/route.ts index 815d8eb4f6f..c30d0e9723f 100644 --- a/apps/sim/app/api/workspaces/[id]/files/[fileId]/style/route.ts +++ b/apps/sim/app/api/workspaces/[id]/files/[fileId]/style/route.ts @@ -6,7 +6,7 @@ import { parseRequest } from '@/lib/api/server' import { getSession } from '@/lib/auth' import { extractDocumentStyle } from '@/lib/copilot/vfs/document-style' import { withRouteHandler } from '@/lib/core/utils/with-route-handler' -import { downloadWorkspaceFile, getWorkspaceFile } from '@/lib/uploads/contexts/workspace' +import { fetchWorkspaceFileBuffer, getWorkspaceFile } from '@/lib/uploads/contexts/workspace' import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' export const dynamic = 'force-dynamic' @@ -52,7 +52,7 @@ export const GET = withRouteHandler( let buffer: Buffer try { - buffer = await downloadWorkspaceFile(fileRecord) + buffer = await fetchWorkspaceFileBuffer(fileRecord) } catch (err) { logger.error('Failed to download file for style extraction', { fileId, diff --git a/apps/sim/app/api/workspaces/[id]/files/presigned/route.test.ts b/apps/sim/app/api/workspaces/[id]/files/presigned/route.test.ts new file mode 100644 index 00000000000..8fee00a3f25 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/files/presigned/route.test.ts @@ -0,0 +1,161 @@ +/** + * @vitest-environment node + */ +import { + authMockFns, + permissionsMock, + permissionsMockFns, + storageServiceMock, + storageServiceMockFns, +} from '@sim/testing' +import { NextRequest } from 'next/server' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { mockCheckStorageQuota, mockGenerateWorkspaceFileKey, mockUseBlobStorage } = vi.hoisted( + () => ({ + mockCheckStorageQuota: vi.fn(), + mockGenerateWorkspaceFileKey: vi.fn(), + mockUseBlobStorage: { value: false }, + }) +) + +vi.mock('@/lib/billing/storage', () => ({ + checkStorageQuota: mockCheckStorageQuota, +})) + +vi.mock('@/lib/uploads/core/storage-service', () => storageServiceMock) + +vi.mock('@/lib/uploads/contexts/workspace/workspace-file-manager', () => ({ + generateWorkspaceFileKey: mockGenerateWorkspaceFileKey, +})) + +vi.mock('@/lib/uploads/config', () => ({ + get USE_BLOB_STORAGE() { + return mockUseBlobStorage.value + }, +})) + +vi.mock('@/lib/workspaces/permissions/utils', () => permissionsMock) + +const WS = '7727ef3f-8cf6-4686-b063-2bb006a10785' + +import { POST } from '@/app/api/workspaces/[id]/files/presigned/route' + +const params = (id = WS) => ({ params: Promise.resolve({ id }) }) + +const makeRequest = (body: unknown) => + new NextRequest(`http://localhost/api/workspaces/${WS}/files/presigned`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(body), + }) + +const validBody = { + fileName: 'video.mp4', + contentType: 'video/mp4', + fileSize: 10 * 1024 * 1024, +} + +describe('POST /api/workspaces/[id]/files/presigned', () => { + beforeEach(() => { + vi.clearAllMocks() + authMockFns.mockGetSession.mockResolvedValue({ user: { id: 'user-1' } }) + permissionsMockFns.mockGetUserEntityPermissions.mockResolvedValue('write') + mockCheckStorageQuota.mockResolvedValue({ allowed: true }) + storageServiceMockFns.mockHasCloudStorage.mockReturnValue(true) + mockGenerateWorkspaceFileKey.mockReturnValue(`workspace/${WS}/123-abc-video.mp4`) + storageServiceMockFns.mockGeneratePresignedUploadUrl.mockResolvedValue({ + url: 'https://s3/presigned', + key: `workspace/${WS}/123-abc-video.mp4`, + uploadHeaders: { 'Content-Type': 'video/mp4' }, + }) + }) + + it('returns 401 when unauthenticated', async () => { + authMockFns.mockGetSession.mockResolvedValueOnce(null) + const res = await POST(makeRequest(validBody), params()) + expect(res.status).toBe(401) + }) + + it('returns 403 when user has read-only permission', async () => { + permissionsMockFns.mockGetUserEntityPermissions.mockResolvedValueOnce('read') + const res = await POST(makeRequest(validBody), params()) + expect(res.status).toBe(403) + }) + + it('returns 400 for missing fileName', async () => { + const res = await POST(makeRequest({ ...validBody, fileName: '' }), params()) + expect(res.status).toBe(400) + }) + + it('returns 400 for negative fileSize', async () => { + const res = await POST(makeRequest({ ...validBody, fileSize: -1 }), params()) + expect(res.status).toBe(400) + }) + + it('accepts fileSize === 0 (empty new files)', async () => { + const res = await POST(makeRequest({ ...validBody, fileSize: 0 }), params()) + expect(res.status).toBe(200) + }) + + it('returns 413 when fileSize exceeds 5 GiB ceiling', async () => { + const res = await POST( + makeRequest({ ...validBody, fileSize: 6 * 1024 * 1024 * 1024 }), + params() + ) + expect(res.status).toBe(413) + }) + + it('returns 413 when storage quota would be exceeded', async () => { + mockCheckStorageQuota.mockResolvedValueOnce({ allowed: false, error: 'Over quota' }) + const res = await POST(makeRequest(validBody), params()) + const body = await res.json() + expect(res.status).toBe(413) + expect(body.error).toBe('Over quota') + }) + + it('returns local fallback signal when cloud storage is not configured', async () => { + storageServiceMockFns.mockHasCloudStorage.mockReturnValueOnce(false) + const res = await POST(makeRequest(validBody), params()) + const body = await res.json() + expect(res.status).toBe(200) + expect(body.directUploadSupported).toBe(false) + expect(body.presignedUrl).toBe('') + expect(body.fileInfo.name).toBe('video.mp4') + expect(storageServiceMockFns.mockGeneratePresignedUploadUrl).not.toHaveBeenCalled() + }) + + it('issues a presigned URL bound to the workspace', async () => { + const res = await POST(makeRequest(validBody), params()) + const body = await res.json() + + expect(res.status).toBe(200) + expect(body.directUploadSupported).toBe(true) + expect(body.presignedUrl).toBe('https://s3/presigned') + expect(body.fileInfo.key).toBe(`workspace/${WS}/123-abc-video.mp4`) + expect(body.fileInfo.path).toContain('?context=workspace') + expect(body.fileInfo.path).toContain('s3') + expect(body.uploadHeaders).toEqual({ 'Content-Type': 'video/mp4' }) + + expect(mockGenerateWorkspaceFileKey).toHaveBeenCalledWith(WS, 'video.mp4') + expect(storageServiceMockFns.mockGeneratePresignedUploadUrl).toHaveBeenCalledWith( + expect.objectContaining({ + context: 'workspace', + userId: 'user-1', + customKey: `workspace/${WS}/123-abc-video.mp4`, + metadata: { workspaceId: WS }, + }) + ) + }) + + it('serves blob path when blob storage is configured', async () => { + mockUseBlobStorage.value = true + try { + const res = await POST(makeRequest(validBody), params()) + const body = await res.json() + expect(body.fileInfo.path).toContain('/blob/') + } finally { + mockUseBlobStorage.value = false + } + }) +}) diff --git a/apps/sim/app/api/workspaces/[id]/files/presigned/route.ts b/apps/sim/app/api/workspaces/[id]/files/presigned/route.ts new file mode 100644 index 00000000000..332a9386ca7 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/files/presigned/route.ts @@ -0,0 +1,97 @@ +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { workspacePresignedUploadContract } from '@/lib/api/contracts/workspace-files' +import { parseRequest } from '@/lib/api/server' +import { getSession } from '@/lib/auth' +import { checkStorageQuota } from '@/lib/billing/storage' +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' +import { USE_BLOB_STORAGE } from '@/lib/uploads/config' +import { generateWorkspaceFileKey } from '@/lib/uploads/contexts/workspace/workspace-file-manager' +import { generatePresignedUploadUrl, hasCloudStorage } from '@/lib/uploads/core/storage-service' +import { MAX_WORKSPACE_FILE_SIZE } from '@/lib/uploads/shared/types' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('WorkspacePresignedAPI') + +/** + * POST /api/workspaces/[id]/files/presigned + * Returns a presigned PUT URL for a workspace-scoped object key. The client + * uploads the bytes directly to S3/Blob, then calls /files/register to + * insert metadata. + */ +export const POST = withRouteHandler( + async (request: NextRequest, context: { params: Promise<{ id: string }> }) => { + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + const userId = session.user.id + + const parsed = await parseRequest(workspacePresignedUploadContract, request, context) + if (!parsed.success) return parsed.response + const { params, body } = parsed.data + const workspaceId = params.id + const { fileName, contentType, fileSize } = body + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission !== 'admin' && permission !== 'write') { + logger.warn(`User ${userId} lacks write permission for ${workspaceId}`) + return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + } + + if (fileSize > MAX_WORKSPACE_FILE_SIZE) { + return NextResponse.json( + { error: `File size exceeds maximum of ${MAX_WORKSPACE_FILE_SIZE} bytes` }, + { status: 413 } + ) + } + + if (!hasCloudStorage()) { + logger.info(`Local storage detected, signaling API fallback for ${fileName}`) + return NextResponse.json({ + fileName, + presignedUrl: '', + fileInfo: { path: '', key: '', name: fileName, size: fileSize, type: contentType }, + directUploadSupported: false, + }) + } + + const quotaCheck = await checkStorageQuota(userId, fileSize) + if (!quotaCheck.allowed) { + return NextResponse.json( + { error: quotaCheck.error || 'Storage limit exceeded' }, + { status: 413 } + ) + } + + const key = generateWorkspaceFileKey(workspaceId, fileName) + const presigned = await generatePresignedUploadUrl({ + fileName, + contentType, + fileSize, + context: 'workspace', + userId, + customKey: key, + expirationSeconds: 3600, + metadata: { workspaceId }, + }) + + const finalPath = `/api/files/serve/${USE_BLOB_STORAGE ? 'blob' : 's3'}/${encodeURIComponent(key)}?context=workspace` + + logger.info(`Issued workspace presigned URL for ${fileName} -> ${key}`) + + return NextResponse.json({ + fileName, + presignedUrl: presigned.url, + fileInfo: { + path: finalPath, + key: presigned.key, + name: fileName, + size: fileSize, + type: contentType, + }, + uploadHeaders: presigned.uploadHeaders, + directUploadSupported: true, + }) + } +) diff --git a/apps/sim/app/api/workspaces/[id]/files/register/route.test.ts b/apps/sim/app/api/workspaces/[id]/files/register/route.test.ts new file mode 100644 index 00000000000..3d9fd1465e3 --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/files/register/route.test.ts @@ -0,0 +1,171 @@ +/** + * @vitest-environment node + */ +import { + auditMock, + auditMockFns, + authMockFns, + permissionsMock, + permissionsMockFns, + posthogServerMock, + posthogServerMockFns, +} from '@sim/testing' +import { NextRequest } from 'next/server' +import { beforeEach, describe, expect, it, vi } from 'vitest' + +const { mockRegisterUploadedWorkspaceFile, mockParseWorkspaceFileKey, FileConflictErrorImpl } = + vi.hoisted(() => { + class FileConflictErrorImpl extends Error { + constructor(message: string) { + super(message) + this.name = 'FileConflictError' + } + } + return { + mockRegisterUploadedWorkspaceFile: vi.fn(), + mockParseWorkspaceFileKey: vi.fn(), + FileConflictErrorImpl, + } + }) + +vi.mock('@/lib/uploads/contexts/workspace', () => ({ + registerUploadedWorkspaceFile: mockRegisterUploadedWorkspaceFile, + parseWorkspaceFileKey: mockParseWorkspaceFileKey, + FileConflictError: FileConflictErrorImpl, +})) + +vi.mock('@/lib/posthog/server', () => posthogServerMock) +vi.mock('@/lib/workspaces/permissions/utils', () => permissionsMock) +vi.mock('@sim/audit', () => auditMock) + +const WS = '7727ef3f-8cf6-4686-b063-2bb006a10785' +const VALID_KEY = `workspace/${WS}/123-abc-video.mp4` + +import { POST } from '@/app/api/workspaces/[id]/files/register/route' + +const params = (id = WS) => ({ params: Promise.resolve({ id }) }) + +const makeRequest = (body: unknown) => + new NextRequest(`http://localhost/api/workspaces/${WS}/files/register`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(body), + }) + +const validBody = { + key: VALID_KEY, + name: 'video.mp4', + contentType: 'video/mp4', +} + +describe('POST /api/workspaces/[id]/files/register', () => { + beforeEach(() => { + vi.clearAllMocks() + authMockFns.mockGetSession.mockResolvedValue({ + user: { id: 'user-1', name: 'User One', email: 'u@example.com' }, + }) + permissionsMockFns.mockGetUserEntityPermissions.mockResolvedValue('write') + mockParseWorkspaceFileKey.mockImplementation((key: string) => { + const match = key.match(/^workspace\/([^/]+)\//) + return match ? match[1] : null + }) + mockRegisterUploadedWorkspaceFile.mockResolvedValue({ + file: { + id: 'wf_123', + name: 'video.mp4', + size: 10 * 1024 * 1024, + type: 'video/mp4', + url: '/api/files/serve/...', + key: VALID_KEY, + context: 'workspace', + }, + created: true, + }) + }) + + it('returns 401 when unauthenticated', async () => { + authMockFns.mockGetSession.mockResolvedValueOnce(null) + const res = await POST(makeRequest(validBody), params()) + expect(res.status).toBe(401) + }) + + it('returns 403 when user lacks write permission', async () => { + permissionsMockFns.mockGetUserEntityPermissions.mockResolvedValueOnce('read') + const res = await POST(makeRequest(validBody), params()) + expect(res.status).toBe(403) + }) + + it('rejects keys belonging to a different workspace', async () => { + const otherWsKey = `workspace/00000000-0000-0000-0000-000000000000/123-abc-video.mp4` + const res = await POST(makeRequest({ ...validBody, key: otherWsKey }), params()) + const body = await res.json() + expect(res.status).toBe(400) + expect(body.error).toContain('does not belong') + expect(mockRegisterUploadedWorkspaceFile).not.toHaveBeenCalled() + }) + + it('returns 400 for empty key/name', async () => { + const res = await POST(makeRequest({ ...validBody, key: '' }), params()) + expect(res.status).toBe(400) + }) + + it('returns 404 when storage object is missing', async () => { + mockRegisterUploadedWorkspaceFile.mockRejectedValueOnce( + new Error('Uploaded object not found in storage') + ) + const res = await POST(makeRequest(validBody), params()) + expect(res.status).toBe(404) + }) + + it('returns 409 on duplicate file conflict', async () => { + mockRegisterUploadedWorkspaceFile.mockRejectedValueOnce(new FileConflictErrorImpl('video.mp4')) + const res = await POST(makeRequest(validBody), params()) + const body = await res.json() + expect(res.status).toBe(409) + expect(body.isDuplicate).toBe(true) + }) + + it('skips audit + analytics on idempotent re-register (created=false)', async () => { + mockRegisterUploadedWorkspaceFile.mockResolvedValueOnce({ + file: { + id: 'wf_123', + name: 'video.mp4', + size: 10 * 1024 * 1024, + type: 'video/mp4', + url: '/api/files/serve/...', + key: VALID_KEY, + context: 'workspace', + }, + created: false, + }) + + const res = await POST(makeRequest(validBody), params()) + expect(res.status).toBe(200) + expect(posthogServerMockFns.mockCaptureServerEvent).not.toHaveBeenCalled() + expect(auditMockFns.mockRecordAudit).not.toHaveBeenCalled() + }) + + it('finalizes upload, records audit and analytics', async () => { + const res = await POST(makeRequest(validBody), params()) + const body = await res.json() + + expect(res.status).toBe(200) + expect(body.success).toBe(true) + expect(body.file).toMatchObject({ id: 'wf_123', key: VALID_KEY }) + + expect(mockRegisterUploadedWorkspaceFile).toHaveBeenCalledWith({ + workspaceId: WS, + userId: 'user-1', + key: VALID_KEY, + originalName: 'video.mp4', + contentType: 'video/mp4', + }) + + expect(posthogServerMockFns.mockCaptureServerEvent).toHaveBeenCalledWith( + 'user-1', + 'file_uploaded', + expect.objectContaining({ workspace_id: WS, file_type: 'video/mp4' }), + expect.any(Object) + ) + }) +}) diff --git a/apps/sim/app/api/workspaces/[id]/files/register/route.ts b/apps/sim/app/api/workspaces/[id]/files/register/route.ts new file mode 100644 index 00000000000..dfcaa537b5e --- /dev/null +++ b/apps/sim/app/api/workspaces/[id]/files/register/route.ts @@ -0,0 +1,101 @@ +import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit' +import { createLogger } from '@sim/logger' +import { type NextRequest, NextResponse } from 'next/server' +import { registerWorkspaceFileContract } from '@/lib/api/contracts/workspace-files' +import { parseRequest } from '@/lib/api/server' +import { getSession } from '@/lib/auth' +import { withRouteHandler } from '@/lib/core/utils/with-route-handler' +import { captureServerEvent } from '@/lib/posthog/server' +import { + FileConflictError, + parseWorkspaceFileKey, + registerUploadedWorkspaceFile, +} from '@/lib/uploads/contexts/workspace' +import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' + +const logger = createLogger('WorkspaceRegisterAPI') + +/** + * POST /api/workspaces/[id]/files/register + * Finalize a direct-to-storage upload by inserting metadata, updating quota, + * and recording an audit log. Validates the storage key belongs to the + * caller's workspace to prevent cross-tenant key smuggling. + */ +export const POST = withRouteHandler( + async (request: NextRequest, context: { params: Promise<{ id: string }> }) => { + const session = await getSession() + if (!session?.user?.id) { + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) + } + const userId = session.user.id + + const parsed = await parseRequest(registerWorkspaceFileContract, request, context) + if (!parsed.success) return parsed.response + const { params, body } = parsed.data + const workspaceId = params.id + const { key, name, contentType } = body + + const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId) + if (permission !== 'admin' && permission !== 'write') { + logger.warn(`User ${userId} lacks write permission for ${workspaceId}`) + return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) + } + + if (parseWorkspaceFileKey(key) !== workspaceId) { + logger.warn(`Key ${key} does not belong to workspace ${workspaceId}`) + return NextResponse.json( + { error: 'Storage key does not belong to this workspace' }, + { status: 400 } + ) + } + + try { + const { file: userFile, created } = await registerUploadedWorkspaceFile({ + workspaceId, + userId, + key, + originalName: name, + contentType, + }) + + if (created) { + logger.info(`Registered direct upload ${name} -> ${key}`) + + captureServerEvent( + userId, + 'file_uploaded', + { workspace_id: workspaceId, file_type: contentType }, + { groups: { workspace: workspaceId } } + ) + + recordAudit({ + workspaceId, + actorId: userId, + actorName: session.user.name, + actorEmail: session.user.email, + action: AuditAction.FILE_UPLOADED, + resourceType: AuditResourceType.FILE, + resourceId: userFile.id, + resourceName: name, + description: `Uploaded file "${name}"`, + metadata: { fileSize: userFile.size, fileType: contentType }, + request, + }) + } else { + logger.info(`Idempotent re-register for existing upload ${name} -> ${key}`) + } + + return NextResponse.json({ success: true, file: userFile }) + } catch (error) { + logger.error('Failed to register workspace file:', error) + + const errorMessage = error instanceof Error ? error.message : 'Failed to register file' + const isDuplicate = + error instanceof FileConflictError || errorMessage.includes('already exists') + const isMissing = errorMessage.includes('not found in storage') + + const status = isDuplicate ? 409 : isMissing ? 404 : 500 + return NextResponse.json({ success: false, error: errorMessage, isDuplicate }, { status }) + } + } +) diff --git a/apps/sim/app/api/workspaces/[id]/files/route.ts b/apps/sim/app/api/workspaces/[id]/files/route.ts index 090e2aa1ee2..d89b12118e8 100644 --- a/apps/sim/app/api/workspaces/[id]/files/route.ts +++ b/apps/sim/app/api/workspaces/[id]/files/route.ts @@ -15,6 +15,7 @@ import { listWorkspaceFiles, uploadWorkspaceFile, } from '@/lib/uploads/contexts/workspace' +import { MAX_WORKSPACE_FORMDATA_FILE_SIZE } from '@/lib/uploads/shared/types' import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils' import { verifyWorkspaceMembership } from '@/app/api/workflows/utils' @@ -129,13 +130,12 @@ export const POST = withRouteHandler( const fileName = rawFile.name || 'untitled.md' - const maxSize = 100 * 1024 * 1024 - if (rawFile.size > maxSize) { + if (rawFile.size > MAX_WORKSPACE_FORMDATA_FILE_SIZE) { return NextResponse.json( { - error: `File size exceeds 100MB limit (${(rawFile.size / (1024 * 1024)).toFixed(2)}MB)`, + error: `File size exceeds maximum of ${MAX_WORKSPACE_FORMDATA_FILE_SIZE} bytes (${(rawFile.size / (1024 * 1024)).toFixed(2)}MB)`, }, - { status: 400 } + { status: 413 } ) } diff --git a/apps/sim/app/api/workspaces/invitations/route.test.ts b/apps/sim/app/api/workspaces/invitations/route.test.ts index 979fe7523bc..c364d8228e4 100644 --- a/apps/sim/app/api/workspaces/invitations/route.test.ts +++ b/apps/sim/app/api/workspaces/invitations/route.test.ts @@ -8,6 +8,7 @@ import { createMockRequest, permissionsMock, permissionsMockFns, + posthogServerMock, schemaMock, } from '@sim/testing' import { beforeEach, describe, expect, it, vi } from 'vitest' @@ -94,9 +95,7 @@ vi.mock('@/ee/access-control/utils/permission-check', () => ({ vi.mock('@sim/audit', () => auditMock) -vi.mock('@/lib/posthog/server', () => ({ - captureServerEvent: vi.fn(), -})) +vi.mock('@/lib/posthog/server', () => posthogServerMock) vi.mock('@/lib/core/telemetry', () => ({ PlatformEvents: { diff --git a/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx index 68baec3f2b6..22686115782 100644 --- a/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx +++ b/apps/sim/app/workspace/[workspaceId]/components/resource/components/resource-header/resource-header.tsx @@ -55,6 +55,14 @@ interface ResourceHeaderProps { breadcrumbs?: BreadcrumbItem[] create?: CreateAction actions?: HeaderAction[] + /** Arbitrary content rendered in the right-aligned actions row, before the Create button. */ + trailingActions?: React.ReactNode + /** + * Replaces the default Create button entirely — supply your own trigger (for + * example a dropdown) when the create action needs richer UI. When provided, + * `create` is ignored. + */ + createTrigger?: React.ReactNode } export const ResourceHeader = memo(function ResourceHeader({ @@ -63,6 +71,8 @@ export const ResourceHeader = memo(function ResourceHeader({ breadcrumbs, create, actions, + trailingActions, + createTrigger, }: ResourceHeaderProps) { const hasBreadcrumbs = breadcrumbs && breadcrumbs.length > 0 @@ -124,17 +134,19 @@ export const ResourceHeader = memo(function ResourceHeader({ ) })} - {create && ( - - )} + {trailingActions} + {createTrigger ?? + (create && ( + + ))} diff --git a/apps/sim/app/workspace/[workspaceId]/files/files.tsx b/apps/sim/app/workspace/[workspaceId]/files/files.tsx index cd1cbdf499e..b172d8bc5d9 100644 --- a/apps/sim/app/workspace/[workspaceId]/files/files.tsx +++ b/apps/sim/app/workspace/[workspaceId]/files/files.tsx @@ -23,13 +23,15 @@ import { ModalHeader, Pencil, Trash, + toast, Upload, } from '@/components/emcn' import { File as FilesIcon } from '@/components/emcn/icons' import { getDocumentIcon } from '@/components/icons/document-icons' +import { triggerFileDownload } from '@/lib/uploads/client/download' import type { WorkspaceFileRecord } from '@/lib/uploads/contexts/workspace' +import { MAX_WORKSPACE_FILE_SIZE } from '@/lib/uploads/shared/types' import { - downloadWorkspaceFile, formatFileSize, getFileExtension, getMimeTypeFromExtension, @@ -180,7 +182,11 @@ export function Files() { filesRef.current = files const [uploading, setUploading] = useState(false) - const [uploadProgress, setUploadProgress] = useState({ completed: 0, total: 0 }) + const [uploadProgress, setUploadProgress] = useState({ + completed: 0, + total: 0, + currentPercent: 0, + }) const [isDraggingOver, setIsDraggingOver] = useState(false) const dragCounterRef = useRef(0) const [inputValue, setInputValue] = useState('') @@ -376,8 +382,24 @@ export function Files() { const uploadFiles = async (filesToUpload: File[]) => { if (!workspaceId || filesToUpload.length === 0) return + const oversized: string[] = [] + const sizeFiltered = filesToUpload.filter((f) => { + if (f.size > MAX_WORKSPACE_FILE_SIZE) { + oversized.push(f.name) + return false + } + return true + }) + if (oversized.length > 0) { + toast.error( + oversized.length === 1 + ? `${oversized[0]} exceeds the 5 GiB upload limit` + : `${oversized.length} files exceed the 5 GiB upload limit` + ) + } + const unsupported: string[] = [] - const allowedFiles = filesToUpload.filter((f) => { + const allowedFiles = sizeFiltered.filter((f) => { const ext = getFileExtension(f.name) const ok = SUPPORTED_EXTENSIONS.includes(ext as (typeof SUPPORTED_EXTENSIONS)[number]) if (!ok) unsupported.push(f.name) @@ -392,12 +414,22 @@ export function Files() { try { setUploading(true) - setUploadProgress({ completed: 0, total: allowedFiles.length }) + setUploadProgress({ completed: 0, total: allowedFiles.length, currentPercent: 0 }) for (let i = 0; i < allowedFiles.length; i++) { try { - await uploadFile.mutateAsync({ workspaceId, file: allowedFiles[i] }) - setUploadProgress({ completed: i + 1, total: allowedFiles.length }) + await uploadFile.mutateAsync({ + workspaceId, + file: allowedFiles[i], + onProgress: ({ percent }) => { + setUploadProgress((prev) => ({ ...prev, currentPercent: percent })) + }, + }) + setUploadProgress({ + completed: i + 1, + total: allowedFiles.length, + currentPercent: 0, + }) } catch (err) { logger.error('Error uploading file:', err) } @@ -406,7 +438,7 @@ export function Files() { logger.error('Error uploading file:', err) } finally { setUploading(false) - setUploadProgress({ completed: 0, total: 0 }) + setUploadProgress({ completed: 0, total: 0, currentPercent: 0 }) } } @@ -443,7 +475,7 @@ export function Files() { const handleDownload = useCallback(async (file: WorkspaceFileRecord) => { try { - await downloadWorkspaceFile(file) + await triggerFileDownload(file) } catch (err) { logger.error('Failed to download file:', err) } @@ -824,7 +856,9 @@ export function Files() { const uploadButtonLabel = uploading && uploadProgress.total > 0 - ? `${uploadProgress.completed}/${uploadProgress.total}` + ? uploadProgress.currentPercent > 0 && uploadProgress.currentPercent < 100 + ? `${uploadProgress.completed}/${uploadProgress.total} · ${uploadProgress.currentPercent}%` + : `${uploadProgress.completed}/${uploadProgress.total}` : uploading ? 'Uploading...' : 'Upload' diff --git a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx index e60f2bebd21..46f78e1f89e 100644 --- a/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx +++ b/apps/sim/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-content/resource-content.tsx @@ -19,11 +19,8 @@ import { markRunToolManuallyStopped, reportManualRunToolStop, } from '@/lib/copilot/tools/client/run-tool-execution' -import { - downloadWorkspaceFile, - getFileExtension, - getMimeTypeFromExtension, -} from '@/lib/uploads/utils/file-utils' +import { triggerFileDownload } from '@/lib/uploads/client/download' +import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils' import { workflowBorderColor } from '@/lib/workspaces/colors' import { FileViewer, @@ -422,7 +419,7 @@ function EmbeddedFileActions({ workspaceId, fileId }: EmbeddedFileActionsProps) const handleDownload = async () => { if (!file) return try { - await downloadWorkspaceFile(file) + await triggerFileDownload(file) } catch (err) { fileLogger.error('Failed to download file:', err) } diff --git a/apps/sim/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload.ts b/apps/sim/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload.ts index bb7cab5a7d1..87ebc397b72 100644 --- a/apps/sim/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload.ts +++ b/apps/sim/app/workspace/[workspaceId]/knowledge/hooks/use-knowledge-upload.ts @@ -2,20 +2,36 @@ import { useCallback, useState } from 'react' import { createLogger } from '@sim/logger' import { sleep } from '@sim/utils/helpers' import { useQueryClient } from '@tanstack/react-query' -import { isApiClientError } from '@/lib/api/client/errors' -import { requestJson } from '@/lib/api/client/request' -import { createKnowledgeDocumentsContract } from '@/lib/api/contracts/knowledge/documents' -import { getFileExtension, getMimeTypeFromExtension } from '@/lib/uploads/utils/file-utils' +import { + calculateUploadTimeoutMs, + DirectUploadError, + isTransientUploadError, + LARGE_FILE_THRESHOLD, + MULTIPART_MAX_RETRIES, + MULTIPART_RETRY_BACKOFF, + MULTIPART_RETRY_DELAY_MS, + normalizePresignedData, + type PresignedUploadInfo, + runUploadStrategy, + runWithConcurrency, + type UploadProgressEvent, + WHOLE_FILE_PARALLEL_UPLOADS, +} from '@/lib/uploads/client/direct-upload' +import { getFileContentType, isAbortError, isNetworkError } from '@/lib/uploads/utils/file-utils' import { knowledgeKeys } from '@/hooks/queries/kb/knowledge' const logger = createLogger('KnowledgeUpload') +const KB_BATCH_PRESIGNED_ENDPOINT = '/api/files/presigned/batch?type=knowledge-base' +const KB_API_UPLOAD_ENDPOINT = '/api/files/upload' + +const BATCH_REQUEST_SIZE = 50 + export interface UploadedFile { filename: string fileUrl: string fileSize: number mimeType: string - // Document tags tag1?: string tag2?: string tag3?: string @@ -29,7 +45,7 @@ export interface FileUploadStatus { fileName: string fileSize: number status: 'pending' | 'uploading' | 'completed' | 'failed' - progress?: number // 0-100 percentage + progress?: number error?: string } @@ -38,15 +54,15 @@ export interface UploadProgress { filesCompleted: number totalFiles: number currentFile?: string - currentFileProgress?: number // 0-100 percentage for current file - fileStatuses?: FileUploadStatus[] // Track each file's status + currentFileProgress?: number + fileStatuses?: FileUploadStatus[] } export interface UploadError { message: string timestamp: number code?: string - details?: any + details?: unknown } export interface ProcessingOptions { @@ -69,323 +85,123 @@ class KnowledgeUploadError extends Error { } } -class PresignedUrlError extends KnowledgeUploadError { - constructor(message: string, details?: unknown) { - super(message, 'PRESIGNED_URL_ERROR', details) - } -} - -class DirectUploadError extends KnowledgeUploadError { - constructor(message: string, details?: unknown) { - super(message, 'DIRECT_UPLOAD_ERROR', details) - } -} - class ProcessingError extends KnowledgeUploadError { constructor(message: string, details?: unknown) { super(message, 'PROCESSING_ERROR', details) } } -/** - * Loosely-typed shape of a failed `Response` JSON body returned by routes - * in this codebase. Routes generally surface `{ error?, message?, details? }` - * where `details` is a Zod issue array (`{ message: string }[]`). Treated - * as a structural read-only view; missing/undefined fields are tolerated. - */ -interface ApiErrorBodyShape { - error?: unknown - message?: unknown - details?: unknown -} - -/** - * Reads a failed `Response`'s JSON body into a typed shape and returns the - * parsed body plus a human-readable error string that combines the - * top-level `error`/`message` with any Zod `details[].message` entries. - * Falls back to `statusText` then status code when the body is unreadable. - */ -async function readApiResponseError( - response: Response, - fallback = 'Unknown error' -): Promise<{ message: string; body: ApiErrorBodyShape | null }> { - let body: ApiErrorBodyShape | null = null - try { - const parsed: unknown = await response.json() - if (parsed && typeof parsed === 'object') { - body = parsed as ApiErrorBodyShape - } - } catch { - body = null - } - - const baseError = - (typeof body?.error === 'string' && body.error) || - (typeof body?.message === 'string' && body.message) || - response.statusText || - `HTTP ${response.status}` || - fallback - - const detailMessages = Array.isArray(body?.details) - ? body.details - .map((d) => - d && typeof d === 'object' && 'message' in d && typeof d.message === 'string' - ? d.message - : null - ) - .filter((m): m is string => Boolean(m)) - .join(', ') - : '' - - return { - message: detailMessages ? `${baseError}: ${detailMessages}` : baseError, - body, - } -} - -/** - * Configuration constants for file upload operations - */ -const UPLOAD_CONFIG = { - MAX_PARALLEL_UPLOADS: 3, - MAX_RETRIES: 3, - RETRY_DELAY_MS: 2000, - RETRY_BACKOFF: 2, - CHUNK_SIZE: 8 * 1024 * 1024, - DIRECT_UPLOAD_THRESHOLD: 4 * 1024 * 1024, - LARGE_FILE_THRESHOLD: 50 * 1024 * 1024, - BASE_TIMEOUT_MS: 2 * 60 * 1000, - TIMEOUT_PER_MB_MS: 1500, - MAX_TIMEOUT_MS: 10 * 60 * 1000, - MULTIPART_PART_CONCURRENCY: 3, - MULTIPART_MAX_RETRIES: 3, - BATCH_REQUEST_SIZE: 50, -} as const - -/** - * Calculates the upload timeout based on file size - */ -const calculateUploadTimeoutMs = (fileSize: number) => { - const sizeInMb = fileSize / (1024 * 1024) - const dynamicBudget = UPLOAD_CONFIG.BASE_TIMEOUT_MS + sizeInMb * UPLOAD_CONFIG.TIMEOUT_PER_MB_MS - return Math.min(dynamicBudget, UPLOAD_CONFIG.MAX_TIMEOUT_MS) -} - -/** - * Gets high resolution timestamp for performance measurements - */ -const getHighResTime = () => - typeof performance !== 'undefined' && typeof performance.now === 'function' - ? performance.now() - : Date.now() - -/** - * Formats bytes to megabytes with 2 decimal places - */ -const formatMegabytes = (bytes: number) => Number((bytes / (1024 * 1024)).toFixed(2)) - -/** - * Calculates throughput in Mbps - */ -const calculateThroughputMbps = (bytes: number, durationMs: number) => { - if (!bytes || !durationMs) return 0 - return Number((((bytes * 8) / durationMs) * 0.001).toFixed(2)) -} - -/** - * Formats duration from milliseconds to seconds - */ -const formatDurationSeconds = (durationMs: number) => Number((durationMs / 1000).toFixed(2)) +const getErrorMessage = (error: unknown): string => + error instanceof Error ? error.message : typeof error === 'string' ? error : 'Unknown error' -/** - * Gets the content type for a file, falling back to extension-based lookup if browser doesn't provide one - */ -const getFileContentType = (file: File): string => { - if (file.type?.trim()) { - return file.type - } - const extension = getFileExtension(file.name) - return getMimeTypeFromExtension(extension) +interface BatchPresignedFile { + fileName: string + contentType: string + fileSize: number } /** - * Runs async operations with concurrency limit + * Fetch presigned upload data for the small files in `files`. Returns a sparse + * array aligned with the input: entries for files >= LARGE_FILE_THRESHOLD are + * `undefined` because those uploads use multipart and never consume a presigned + * single-PUT URL. */ -const runWithConcurrency = async ( - items: T[], - limit: number, - worker: (item: T, index: number) => Promise -): Promise>> => { - const results: Array> = Array(items.length) - - if (items.length === 0) { - return results +const fetchBatchPresignedData = async ( + files: File[] +): Promise<(PresignedUploadInfo | undefined)[]> => { + const result: (PresignedUploadInfo | undefined)[] = new Array(files.length).fill(undefined) + const smallFileIndices: number[] = [] + for (let i = 0; i < files.length; i++) { + if (files[i].size <= LARGE_FILE_THRESHOLD) smallFileIndices.push(i) } + if (smallFileIndices.length === 0) return result - const concurrency = Math.max(1, Math.min(limit, items.length)) - let nextIndex = 0 - - const runners = Array.from({ length: concurrency }, async () => { - while (true) { - const currentIndex = nextIndex++ - if (currentIndex >= items.length) { - break - } - - try { - const value = await worker(items[currentIndex], currentIndex) - results[currentIndex] = { status: 'fulfilled', value } - } catch (error) { - results[currentIndex] = { status: 'rejected', reason: error } - } + for (let start = 0; start < smallFileIndices.length; start += BATCH_REQUEST_SIZE) { + const batchIndices = smallFileIndices.slice(start, start + BATCH_REQUEST_SIZE) + const batchFiles = batchIndices.map((i) => files[i]) + const body: { files: BatchPresignedFile[] } = { + files: batchFiles.map((file) => ({ + fileName: file.name, + contentType: getFileContentType(file), + fileSize: file.size, + })), } - }) - - await Promise.all(runners) - return results -} - -/** - * Extracts the error name from an unknown error object - */ -const getErrorName = (error: unknown) => - typeof error === 'object' && error !== null && 'name' in error ? String((error as any).name) : '' - -/** - * Extracts a human-readable message from an unknown error - */ -const getErrorMessage = (error: unknown) => - error instanceof Error ? error.message : typeof error === 'string' ? error : 'Unknown error' -/** - * Checks if an error is an abort error - */ -const isAbortError = (error: unknown) => getErrorName(error) === 'AbortError' - -/** - * Checks if an error is a network-related error - */ -const isNetworkError = (error: unknown) => { - if (!(error instanceof Error)) { - return false - } - - const message = error.message.toLowerCase() - return ( - message.includes('network') || - message.includes('fetch') || - message.includes('connection') || - message.includes('timeout') || - message.includes('timed out') || - message.includes('ecconnreset') - ) -} - -interface PresignedFileInfo { - path: string - key: string - name: string - size: number - type: string -} - -interface PresignedUploadInfo { - fileName: string - presignedUrl: string - fileInfo: PresignedFileInfo - uploadHeaders?: Record - directUploadSupported: boolean - presignedUrls?: any -} + const response = await fetch(KB_BATCH_PRESIGNED_ENDPOINT, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(body), + }) -/** - * Normalizes presigned URL response data into a consistent format - */ -const normalizePresignedData = (data: any, context: string): PresignedUploadInfo => { - const presignedUrl = data?.presignedUrl || data?.uploadUrl - const fileInfo = data?.fileInfo + if (!response.ok) { + throw new Error(`Batch presigned URL generation failed: ${response.statusText}`) + } - if (!presignedUrl || !fileInfo?.path) { - throw new PresignedUrlError(`Invalid presigned response for ${context}`, data) + const { files: presignedItems } = (await response.json()) as { files: unknown[] } + batchIndices.forEach((fileIdx, batchPos) => { + result[fileIdx] = normalizePresignedData(presignedItems[batchPos], batchFiles[batchPos].name) + }) } - return { - fileName: data.fileName || fileInfo.name || context, - presignedUrl, - fileInfo: { - path: fileInfo.path, - key: fileInfo.key, - name: fileInfo.name || context, - size: fileInfo.size || data.fileSize || 0, - type: fileInfo.type || data.contentType || '', - }, - uploadHeaders: data.uploadHeaders || undefined, - directUploadSupported: data.directUploadSupported !== false, - presignedUrls: data.presignedUrls, - } + return result } /** - * Fetches presigned URL data for file upload + * Server-proxied fallback used when cloud storage isn't configured. */ -const getPresignedData = async ( +const uploadFileThroughAPI = async ( file: File, - timeoutMs: number, - controller?: AbortController -): Promise => { - const localController = controller ?? new AbortController() - const timeoutId = setTimeout(() => localController.abort(), timeoutMs) - const startTime = getHighResTime() + workspaceId: string | undefined +): Promise<{ filePath: string }> => { + const formData = new FormData() + formData.append('file', file) + formData.append('context', 'knowledge-base') + if (workspaceId) formData.append('workspaceId', workspaceId) + + const controller = new AbortController() + const timeoutId = setTimeout(() => controller.abort(), calculateUploadTimeoutMs(file.size)) try { - // boundary-raw-fetch: presigned URL coordination is part of the multipart-signed-url upload flow tracked together with XHR PUT progress; keeping this on raw fetch preserves a single retry/timeout AbortController shared with the direct upload XHR - const presignedResponse = await fetch('/api/files/presigned?type=knowledge-base', { + const response = await fetch(KB_API_UPLOAD_ENDPOINT, { method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - fileName: file.name, - contentType: getFileContentType(file), - fileSize: file.size, - }), - signal: localController.signal, + body: formData, + signal: controller.signal, }) - if (!presignedResponse.ok) { - const { message: fullError, body: errorDetails } = - await readApiResponseError(presignedResponse) - - logger.error('Presigned URL request failed', { - status: presignedResponse.status, - fileSize: file.size, - }) + if (!response.ok) { + let errorData: { message?: string; error?: string } | null = null + try { + errorData = (await response.json()) as { message?: string; error?: string } + } catch {} + throw new KnowledgeUploadError( + `Failed to upload ${file.name}: ${errorData?.message || errorData?.error || response.statusText}`, + 'API_UPLOAD_ERROR', + errorData + ) + } - throw new PresignedUrlError( - `Failed to get presigned URL for ${file.name}: ${fullError}`, - errorDetails + const result = (await response.json()) as { + fileInfo?: { path?: string } + path?: string + } + const filePath = result.fileInfo?.path ?? result.path + if (!filePath) { + throw new KnowledgeUploadError( + `Invalid upload response for ${file.name}: missing file path`, + 'API_UPLOAD_ERROR', + result ) } - const presignedData = await presignedResponse.json() - const durationMs = getHighResTime() - startTime - logger.info('Fetched presigned URL', { - fileName: file.name, - sizeMB: formatMegabytes(file.size), - durationMs: formatDurationSeconds(durationMs), - }) - return normalizePresignedData(presignedData, file.name) + return { filePath } } finally { clearTimeout(timeoutId) - if (!controller) { - localController.abort() - } } } -/** - * Hook for managing file uploads to knowledge bases - */ +const toAbsoluteUrl = (path: string): string => + path.startsWith('http') ? path : `${window.location.origin}${path}` + export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) { const queryClient = useQueryClient() const [isUploading, setIsUploading] = useState(false) @@ -396,642 +212,145 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) { }) const [uploadError, setUploadError] = useState(null) - /** - * Creates an UploadedFile object from file metadata - */ - const createUploadedFile = ( - filename: string, - fileUrl: string, - fileSize: number, - mimeType: string, - originalFile?: File - ): UploadedFile => ({ - filename, - fileUrl, - fileSize, - mimeType, - tag1: (originalFile as any)?.tag1, - tag2: (originalFile as any)?.tag2, - tag3: (originalFile as any)?.tag3, - tag4: (originalFile as any)?.tag4, - tag5: (originalFile as any)?.tag5, - tag6: (originalFile as any)?.tag6, - tag7: (originalFile as any)?.tag7, - }) - - /** - * Creates an UploadError from an exception - */ - const createErrorFromException = (error: unknown, defaultMessage: string): UploadError => { - if (error instanceof KnowledgeUploadError) { - return { - message: error.message, - code: error.code, - details: error.details, - timestamp: Date.now(), - } + const buildUploadedFile = (file: File, fileUrl: string): UploadedFile => { + const f = file as File & { + tag1?: string + tag2?: string + tag3?: string + tag4?: string + tag5?: string + tag6?: string + tag7?: string } - - if (error instanceof Error) { - return { - message: error.message, - timestamp: Date.now(), - } - } - return { - message: defaultMessage, - timestamp: Date.now(), - } - } - - /** - * Upload a single file with retry logic - */ - const uploadSingleFileWithRetry = async ( - file: File, - retryCount = 0, - fileIndex?: number, - presignedOverride?: PresignedUploadInfo - ): Promise => { - const timeoutMs = calculateUploadTimeoutMs(file.size) - let presignedData: PresignedUploadInfo | undefined - const attempt = retryCount + 1 - logger.info('Upload attempt started', { - fileName: file.name, - attempt, - sizeMB: formatMegabytes(file.size), - timeoutMs: formatDurationSeconds(timeoutMs), - }) - - try { - const controller = new AbortController() - const timeoutId = setTimeout(() => controller.abort(), timeoutMs) - - try { - if (file.size > UPLOAD_CONFIG.LARGE_FILE_THRESHOLD) { - presignedData = presignedOverride ?? (await getPresignedData(file, timeoutMs, controller)) - return await uploadFileInChunks(file, presignedData, timeoutMs, fileIndex) - } - - if (presignedOverride?.directUploadSupported && presignedOverride.presignedUrl) { - return await uploadFileDirectly(file, presignedOverride, timeoutMs, controller, fileIndex) - } - - return await uploadFileThroughAPI(file, timeoutMs) - } finally { - clearTimeout(timeoutId) - } - } catch (error) { - const isTimeout = isAbortError(error) - const isNetwork = isNetworkError(error) - - if (retryCount < UPLOAD_CONFIG.MAX_RETRIES) { - const delay = UPLOAD_CONFIG.RETRY_DELAY_MS * UPLOAD_CONFIG.RETRY_BACKOFF ** retryCount - if (isTimeout || isNetwork) { - logger.warn( - `Upload failed (${isTimeout ? 'timeout' : 'network'}), retrying in ${delay / 1000}s...`, - { - attempt: retryCount + 1, - fileSize: file.size, - delay: delay, - } - ) - } - - if (fileIndex !== undefined) { - setUploadProgress((prev) => ({ - ...prev, - fileStatuses: prev.fileStatuses?.map((fs, idx) => - idx === fileIndex ? { ...fs, progress: 0, status: 'uploading' as const } : fs - ), - })) - } - - await sleep(delay) - const shouldReusePresigned = (isTimeout || isNetwork) && presignedData - return uploadSingleFileWithRetry( - file, - retryCount + 1, - fileIndex, - shouldReusePresigned ? presignedData : undefined - ) - } - - logger.error('Upload failed after retries', { - fileSize: file.size, - errorType: isTimeout ? 'timeout' : isNetwork ? 'network' : 'unknown', - attempts: UPLOAD_CONFIG.MAX_RETRIES + 1, - }) - throw error + filename: file.name, + fileUrl, + fileSize: file.size, + mimeType: getFileContentType(file), + tag1: f.tag1, + tag2: f.tag2, + tag3: f.tag3, + tag4: f.tag4, + tag5: f.tag5, + tag6: f.tag6, + tag7: f.tag7, } } - /** - * Upload file directly with timeout and progress tracking - */ - const uploadFileDirectly = async ( - file: File, - presignedData: PresignedUploadInfo, - timeoutMs: number, - outerController: AbortController, - fileIndex?: number - ): Promise => { - return new Promise((resolve, reject) => { - const xhr = new XMLHttpRequest() - let isCompleted = false - const startTime = getHighResTime() - - const timeoutId = setTimeout(() => { - if (!isCompleted) { - isCompleted = true - xhr.abort() - reject(new Error('Upload timeout')) - } - }, timeoutMs) - - const abortHandler = () => { - if (!isCompleted) { - isCompleted = true - clearTimeout(timeoutId) - xhr.abort() - reject(new DirectUploadError(`Upload aborted for ${file.name}`, {})) - } - } - - outerController.signal.addEventListener('abort', abortHandler) - - xhr.upload.addEventListener('progress', (event) => { - if (event.lengthComputable && fileIndex !== undefined && !isCompleted) { - const percentComplete = Math.round((event.loaded / event.total) * 100) - setUploadProgress((prev) => { - if (prev.fileStatuses?.[fileIndex]?.status === 'uploading') { - return { - ...prev, - fileStatuses: prev.fileStatuses?.map((fs, idx) => - idx === fileIndex ? { ...fs, progress: percentComplete } : fs - ), - } - } - return prev - }) - } - }) - - xhr.addEventListener('load', () => { - if (!isCompleted) { - isCompleted = true - clearTimeout(timeoutId) - outerController.signal.removeEventListener('abort', abortHandler) - const durationMs = getHighResTime() - startTime - if (xhr.status >= 200 && xhr.status < 300) { - const fullFileUrl = presignedData.fileInfo.path.startsWith('http') - ? presignedData.fileInfo.path - : `${window.location.origin}${presignedData.fileInfo.path}` - logger.info('Direct upload completed', { - fileName: file.name, - sizeMB: formatMegabytes(file.size), - durationMs: formatDurationSeconds(durationMs), - throughputMbps: calculateThroughputMbps(file.size, durationMs), - status: xhr.status, - }) - resolve( - createUploadedFile(file.name, fullFileUrl, file.size, getFileContentType(file), file) - ) - } else { - logger.error('S3 PUT request failed', { - status: xhr.status, - fileSize: file.size, - }) - reject( - new DirectUploadError( - `Direct upload failed for ${file.name}: ${xhr.status} ${xhr.statusText}`, - { - uploadResponse: xhr.statusText, - } - ) - ) - } - } - }) - - xhr.addEventListener('error', () => { - if (!isCompleted) { - isCompleted = true - clearTimeout(timeoutId) - outerController.signal.removeEventListener('abort', abortHandler) - const durationMs = getHighResTime() - startTime - logger.error('Direct upload network error', { - fileName: file.name, - sizeMB: formatMegabytes(file.size), - durationMs: formatDurationSeconds(durationMs), - }) - reject(new DirectUploadError(`Network error uploading ${file.name}`, {})) - } - }) - - xhr.addEventListener('abort', abortHandler) - - xhr.open('PUT', presignedData.presignedUrl) - - xhr.setRequestHeader('Content-Type', file.type) - if (presignedData.uploadHeaders) { - Object.entries(presignedData.uploadHeaders).forEach(([key, value]) => { - xhr.setRequestHeader(key, value as string) - }) - } - - xhr.send(file) - }) + const updateFileStatus = (fileIndex: number, patch: Partial) => { + setUploadProgress((prev) => ({ + ...prev, + fileStatuses: prev.fileStatuses?.map((fs, idx) => + idx === fileIndex ? { ...fs, ...patch } : fs + ), + })) } - /** - * Upload large file in chunks (multipart upload) - */ - const uploadFileInChunks = async ( + const uploadOneFile = async ( file: File, - presignedData: PresignedUploadInfo, - timeoutMs: number, - fileIndex?: number + fileIndex: number, + presigned: PresignedUploadInfo | undefined ): Promise => { - logger.info( - `Uploading large file ${file.name} (${(file.size / 1024 / 1024).toFixed(2)}MB) using multipart upload` - ) - const startTime = getHighResTime() + if (!options.workspaceId) { + throw new KnowledgeUploadError('workspaceId is required for upload', 'MISSING_WORKSPACE_ID') + } - try { - if (!options.workspaceId) { - throw new Error('workspaceId is required for multipart upload') - } + const onProgress = (event: UploadProgressEvent) => { + updateFileStatus(fileIndex, { progress: event.percent, status: 'uploading' }) + } - // boundary-raw-fetch: multipart-signed-url initiate step coordinates the cloud multipart upload token consumed by raw-fetch PUTs to provider-issued part URLs below - const initiateResponse = await fetch('/api/files/multipart?action=initiate', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - fileName: file.name, - contentType: getFileContentType(file), - fileSize: file.size, + let attempt = 0 + while (true) { + try { + const result = await runUploadStrategy({ + file, workspaceId: options.workspaceId, - }), - }) - - if (!initiateResponse.ok) { - throw new Error(`Failed to initiate multipart upload: ${initiateResponse.statusText}`) - } - - const { uploadId, key, uploadToken } = await initiateResponse.json() - logger.info(`Initiated multipart upload with ID: ${uploadId}`) - - const chunkSize = UPLOAD_CONFIG.CHUNK_SIZE - const numParts = Math.ceil(file.size / chunkSize) - const partNumbers = Array.from({ length: numParts }, (_, i) => i + 1) - - // boundary-raw-fetch: multipart-signed-url get-part-urls step issues provider-signed PUT URLs consumed by the raw-fetch PUT loop below; kept on raw fetch to preserve retry/abort coordination with the part PUTs - const partUrlsResponse = await fetch('/api/files/multipart?action=get-part-urls', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - uploadToken, - partNumbers, - }), - }) - - if (!partUrlsResponse.ok) { - // boundary-raw-fetch: multipart-signed-url abort step issued from inside the multipart upload error path; keeps cleanup on the same raw-fetch path as the rest of the multipart flow - await fetch('/api/files/multipart?action=abort', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ uploadToken }), + context: 'knowledge-base', + presignedEndpoint: '/api/files/presigned?type=knowledge-base', + presignedOverride: presigned, + onProgress, }) - throw new Error(`Failed to get part URLs: ${partUrlsResponse.statusText}`) - } - - const { presignedUrls } = await partUrlsResponse.json() - - const uploadedParts: Array<{ ETag: string; PartNumber: number }> = [] - - const controller = new AbortController() - const multipartTimeoutId = setTimeout(() => controller.abort(), timeoutMs) - - try { - const uploadPart = async ({ partNumber, url }: any) => { - const start = (partNumber - 1) * chunkSize - const end = Math.min(start + chunkSize, file.size) - const chunk = file.slice(start, end) - - for (let attempt = 0; attempt <= UPLOAD_CONFIG.MULTIPART_MAX_RETRIES; attempt++) { - try { - const partResponse = await fetch(url, { - method: 'PUT', - body: chunk, - signal: controller.signal, - headers: { - 'Content-Type': file.type, - }, - }) - - if (!partResponse.ok) { - throw new Error(`Failed to upload part ${partNumber}: ${partResponse.statusText}`) - } - - const etag = partResponse.headers.get('ETag') || '' - logger.info(`Uploaded part ${partNumber}/${numParts}`) - - if (fileIndex !== undefined) { - const partProgress = Math.min(100, Math.round((partNumber / numParts) * 100)) - setUploadProgress((prev) => ({ - ...prev, - fileStatuses: prev.fileStatuses?.map((fs, idx) => - idx === fileIndex ? { ...fs, progress: partProgress } : fs - ), - })) - } - - return { ETag: etag.replace(/"/g, ''), PartNumber: partNumber } - } catch (partError) { - if (attempt >= UPLOAD_CONFIG.MULTIPART_MAX_RETRIES) { - throw partError - } - - const delay = UPLOAD_CONFIG.RETRY_DELAY_MS * UPLOAD_CONFIG.RETRY_BACKOFF ** attempt - logger.warn( - `Part ${partNumber} failed (attempt ${attempt + 1}), retrying in ${Math.round(delay / 1000)}s` - ) - await sleep(delay) - } - } - - throw new Error(`Retries exhausted for part ${partNumber}`) + return buildUploadedFile(file, toAbsoluteUrl(result.path)) + } catch (error) { + if (error instanceof DirectUploadError && error.code === 'FALLBACK_REQUIRED') { + const { filePath } = await uploadFileThroughAPI(file, options.workspaceId) + return buildUploadedFile(file, toAbsoluteUrl(filePath)) } - const partResults = await runWithConcurrency( - presignedUrls, - UPLOAD_CONFIG.MULTIPART_PART_CONCURRENCY, - uploadPart - ) - - partResults.forEach((result) => { - if (result?.status === 'fulfilled') { - uploadedParts.push(result.value) - } else if (result?.status === 'rejected') { - throw result.reason - } - }) - } finally { - clearTimeout(multipartTimeoutId) - } - - // boundary-raw-fetch: multipart-signed-url complete step finalizes the multipart upload coordinated with raw-fetch part PUTs above - const completeResponse = await fetch('/api/files/multipart?action=complete', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - uploadToken, - parts: uploadedParts, - }), - }) - - if (!completeResponse.ok) { - throw new Error(`Failed to complete multipart upload: ${completeResponse.statusText}`) - } - - const { path } = await completeResponse.json() - logger.info(`Completed multipart upload for ${file.name}`) - - const durationMs = getHighResTime() - startTime - logger.info('Multipart upload metrics', { - fileName: file.name, - sizeMB: formatMegabytes(file.size), - parts: uploadedParts.length, - durationMs: formatDurationSeconds(durationMs), - throughputMbps: calculateThroughputMbps(file.size, durationMs), - }) - - const fullFileUrl = path.startsWith('http') ? path : `${window.location.origin}${path}` - - return createUploadedFile(file.name, fullFileUrl, file.size, getFileContentType(file), file) - } catch (error) { - logger.error(`Multipart upload failed for ${file.name}:`, error) - const durationMs = getHighResTime() - startTime - logger.warn('Falling back to direct upload after multipart failure', { - fileName: file.name, - sizeMB: formatMegabytes(file.size), - durationMs: formatDurationSeconds(durationMs), - }) - return uploadFileDirectly(file, presignedData, timeoutMs, new AbortController(), fileIndex) - } - } - - /** - * Fallback upload through API - */ - const uploadFileThroughAPI = async (file: File, timeoutMs: number): Promise => { - const controller = new AbortController() - const timeoutId = setTimeout(() => controller.abort(), timeoutMs) - - try { - const formData = new FormData() - formData.append('file', file) - formData.append('context', 'knowledge-base') - - if (options.workspaceId) { - formData.append('workspaceId', options.workspaceId) - } - - // boundary-raw-fetch: multipart/form-data upload (FileUpload boundary), incompatible with requestJson which JSON-stringifies bodies - const uploadResponse = await fetch('/api/files/upload', { - method: 'POST', - body: formData, - signal: controller.signal, - }) - - if (!uploadResponse.ok) { - const { message: fullError, body: errorData } = await readApiResponseError(uploadResponse) - throw new DirectUploadError(`Failed to upload ${file.name}: ${fullError}`, errorData) - } - - const uploadResult = await uploadResponse.json() - - const filePath = uploadResult.fileInfo?.path || uploadResult.path + const retryable = isNetworkError(error) || isTransientUploadError(error) + if (isAbortError(error) || !retryable || attempt >= MULTIPART_MAX_RETRIES) { + throw error + } - if (!filePath) { - throw new DirectUploadError( - `Invalid upload response for ${file.name}: missing file path`, - uploadResult + const delay = MULTIPART_RETRY_DELAY_MS * MULTIPART_RETRY_BACKOFF ** attempt + attempt++ + logger.warn( + `Upload retry ${attempt}/${MULTIPART_MAX_RETRIES} for ${file.name} in ${Math.round(delay / 1000)}s` ) + updateFileStatus(fileIndex, { progress: 0, status: 'uploading' }) + await sleep(delay) } - - return createUploadedFile( - file.name, - filePath.startsWith('http') ? filePath : `${window.location.origin}${filePath}`, - file.size, - getFileContentType(file), - file - ) - } finally { - clearTimeout(timeoutId) } } - /** - * Uploads files in batches using presigned URLs - */ const uploadFilesInBatches = async (files: File[]): Promise => { - const results: UploadedFile[] = [] - const failedFiles: Array<{ file: File; error: Error }> = [] - const fileStatuses: FileUploadStatus[] = files.map((file) => ({ fileName: file.name, fileSize: file.size, - status: 'pending' as const, + status: 'pending', progress: 0, })) - setUploadProgress((prev) => ({ - ...prev, - fileStatuses, - })) + setUploadProgress((prev) => ({ ...prev, fileStatuses })) logger.info(`Starting batch upload of ${files.length} files`) - try { - const batches = [] - - for ( - let batchStart = 0; - batchStart < files.length; - batchStart += UPLOAD_CONFIG.BATCH_REQUEST_SIZE - ) { - const batchFiles = files.slice(batchStart, batchStart + UPLOAD_CONFIG.BATCH_REQUEST_SIZE) - const batchIndexOffset = batchStart - batches.push({ batchFiles, batchIndexOffset }) - } - - logger.info(`Starting parallel processing of ${batches.length} batches`) - - const presignedPromises = batches.map(async ({ batchFiles }, batchIndex) => { - logger.info( - `Getting presigned URLs for batch ${batchIndex + 1}/${batches.length} (${batchFiles.length} files)` - ) - - const batchRequest = { - files: batchFiles.map((file) => ({ - fileName: file.name, - contentType: getFileContentType(file), - fileSize: file.size, - })), - } - - // boundary-raw-fetch: signed-URL batch coordination for direct uploads handed to raw-fetch PUTs against provider URLs; kept on raw fetch to preserve the same controller/timeout used by the multipart flow - const batchResponse = await fetch('/api/files/presigned/batch?type=knowledge-base', { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(batchRequest), - }) - - if (!batchResponse.ok) { - const { message: fullError } = await readApiResponseError(batchResponse) - throw new Error(`Batch ${batchIndex + 1} presigned URL generation failed: ${fullError}`) - } - - const { files: presignedData } = await batchResponse.json() - return { batchFiles, presignedData, batchIndex } - }) - - const allPresignedData = await Promise.all(presignedPromises) - logger.info(`Got all presigned URLs, starting uploads`) - - const allUploads = allPresignedData.flatMap(({ batchFiles, presignedData, batchIndex }) => { - const batchIndexOffset = batchIndex * UPLOAD_CONFIG.BATCH_REQUEST_SIZE - - return batchFiles.map((file, batchFileIndex) => { - const fileIndex = batchIndexOffset + batchFileIndex - const presigned = presignedData[batchFileIndex] - - return { file, presigned, fileIndex } - }) - }) - - const uploadResults = await runWithConcurrency( - allUploads, - UPLOAD_CONFIG.MAX_PARALLEL_UPLOADS, - async ({ file, presigned, fileIndex }) => { - if (!presigned) { - throw new Error(`No presigned data for file ${file.name}`) - } + const presignedData = await fetchBatchPresignedData(files) + const settled = await runWithConcurrency( + files, + WHOLE_FILE_PARALLEL_UPLOADS, + async (file, index) => { + updateFileStatus(index, { status: 'uploading' }) + try { + const uploaded = await uploadOneFile(file, index, presignedData[index]) setUploadProgress((prev) => ({ ...prev, - fileStatuses: prev.fileStatuses?.map((fs, idx) => - idx === fileIndex ? { ...fs, status: 'uploading' as const } : fs - ), + filesCompleted: prev.filesCompleted + 1, })) - - try { - const result = await uploadSingleFileWithRetry(file, 0, fileIndex, presigned) - - setUploadProgress((prev) => ({ - ...prev, - filesCompleted: prev.filesCompleted + 1, - fileStatuses: prev.fileStatuses?.map((fs, idx) => - idx === fileIndex ? { ...fs, status: 'completed' as const, progress: 100 } : fs - ), - })) - - return result - } catch (error) { - setUploadProgress((prev) => ({ - ...prev, - fileStatuses: prev.fileStatuses?.map((fs, idx) => - idx === fileIndex - ? { - ...fs, - status: 'failed' as const, - error: getErrorMessage(error), - } - : fs - ), - })) - throw error - } + updateFileStatus(index, { status: 'completed', progress: 100 }) + return uploaded + } catch (error) { + updateFileStatus(index, { status: 'failed', error: getErrorMessage(error) }) + throw error } - ) - - uploadResults.forEach((result, idx) => { - if (result?.status === 'fulfilled') { - results.push(result.value) - } else if (result?.status === 'rejected') { - failedFiles.push({ - file: allUploads[idx].file, - error: - result.reason instanceof Error ? result.reason : new Error(String(result.reason)), - }) - } - }) + } + ) - if (failedFiles.length > 0) { - logger.error(`Failed to upload ${failedFiles.length} files`) - throw new KnowledgeUploadError( - `Failed to upload ${failedFiles.length} file(s)`, - 'PARTIAL_UPLOAD_FAILURE', - { - failedFiles, - uploadedFiles: results, - } - ) + const succeeded: UploadedFile[] = [] + const failed: Array<{ file: File; error: Error }> = [] + settled.forEach((result, idx) => { + if (result?.status === 'fulfilled') { + succeeded.push(result.value) + } else if (result?.status === 'rejected') { + failed.push({ + file: files[idx], + error: result.reason instanceof Error ? result.reason : new Error(String(result.reason)), + }) } + }) - return results - } catch (error) { - logger.error('Batch upload failed:', error) - throw error + if (failed.length > 0) { + throw new KnowledgeUploadError( + `Failed to upload ${failed.length} file(s)`, + 'PARTIAL_UPLOAD_FAILURE', + { failedFiles: failed, uploadedFiles: succeeded } + ) } + + return succeeded } - /** - * Main upload function that handles file uploads and document processing - */ const uploadFiles = async ( files: File[], knowledgeBaseId: string, @@ -1040,7 +359,6 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) { if (files.length === 0) { throw new KnowledgeUploadError('No files provided for upload', 'NO_FILES') } - if (!knowledgeBaseId?.trim()) { throw new KnowledgeUploadError('Knowledge base ID is required', 'INVALID_KB_ID') } @@ -1054,43 +372,49 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) { setUploadProgress((prev) => ({ ...prev, stage: 'processing' })) - const processPayload = { - documents: uploadedFiles.map((file) => ({ - ...file, - })), - processingOptions: { - recipe: processingOptions.recipe ?? 'default', - lang: 'en', - }, - bulk: true, - } + // boundary-raw-fetch: bulk document-processing kickoff with dynamic recipe payload; response is consumed alongside the upload progress lifecycle and not modeled by a single contract + const processResponse = await fetch(`/api/knowledge/${knowledgeBaseId}/documents`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + documents: uploadedFiles.map((f) => ({ ...f })), + processingOptions: { + recipe: processingOptions.recipe ?? 'default', + lang: 'en', + }, + bulk: true, + }), + }) - let processResult: Awaited< - ReturnType> - > - try { - processResult = await requestJson(createKnowledgeDocumentsContract, { - params: { id: knowledgeBaseId }, - body: processPayload, + if (!processResponse.ok) { + let errorData: { error?: string; message?: string } | null = null + try { + errorData = (await processResponse.json()) as { error?: string; message?: string } + } catch {} + logger.error('Document processing failed:', { + status: processResponse.status, + error: errorData, }) - } catch (err) { - if (isApiClientError(err)) { - logger.error('Document processing failed:', { - status: err.status, - error: err.body, - uploadedFiles: uploadedFiles.map((f) => ({ - filename: f.filename, - fileUrl: f.fileUrl, - fileSize: f.fileSize, - mimeType: f.mimeType, - })), - }) - throw new ProcessingError(`Failed to start document processing: ${err.message}`, err.body) - } - throw err + throw new ProcessingError( + `Failed to start document processing: ${errorData?.error || errorData?.message || 'Unknown error'}`, + errorData + ) } - if (!('documentsCreated' in processResult.data)) { + const processResult = (await processResponse.json()) as { + success?: boolean + error?: string + data?: { documentsCreated?: unknown } + } + + if (!processResult.success) { + throw new ProcessingError( + `Document processing failed: ${processResult.error || 'Unknown error'}`, + processResult + ) + } + + if (!processResult.data?.documentsCreated) { throw new ProcessingError( 'Invalid processing response: missing document data', processResult @@ -1098,23 +422,25 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) { } setUploadProgress((prev) => ({ ...prev, stage: 'completing' })) - logger.info(`Successfully started processing ${uploadedFiles.length} documents`) - await queryClient.invalidateQueries({ - queryKey: knowledgeKeys.detail(knowledgeBaseId), - }) + await queryClient.invalidateQueries({ queryKey: knowledgeKeys.detail(knowledgeBaseId) }) return uploadedFiles } catch (err) { logger.error('Error uploading documents:', err) - const error = createErrorFromException(err, 'Unknown error occurred during upload') + const error: UploadError = + err instanceof KnowledgeUploadError + ? { message: err.message, code: err.code, details: err.details, timestamp: Date.now() } + : err instanceof DirectUploadError + ? { message: err.message, code: err.code, details: err.details, timestamp: Date.now() } + : err instanceof Error + ? { message: err.message, timestamp: Date.now() } + : { message: 'Unknown error occurred during upload', timestamp: Date.now() } + setUploadError(error) options.onError?.(error) - - logger.error('Document upload failed:', error.message) - throw err } finally { setIsUploading(false) @@ -1122,9 +448,6 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) { } } - /** - * Clears the current upload error - */ const clearError = useCallback(() => { setUploadError(null) }, []) diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/log-details.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/log-details.tsx index ee33779f6dd..c1cd8c78b91 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/log-details.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/log-details/log-details.tsx @@ -664,7 +664,10 @@ export const LogDetails = memo(function LogDetails({ const { handleMouseDown } = useLogDetailsResize() const maxVw = `${MAX_LOG_DETAILS_WIDTH_RATIO * 100}vw` - const effectiveWidth = `clamp(${MIN_LOG_DETAILS_WIDTH}px, ${panelWidth}px, ${maxVw})` + // CSS-side clamp matching `clampPanelWidth` in stores/logs/utils.ts: the + // floor is itself capped at the max-vw ratio so a narrow viewport doesn't + // let the min outpace the cap and cover the table behind the panel. + const effectiveWidth = `clamp(min(${MIN_LOG_DETAILS_WIDTH}px, ${maxVw}), ${panelWidth}px, ${maxVw})` useEffect(() => { const handleKeyDown = (e: KeyboardEvent) => { diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-sidebar/column-sidebar.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-sidebar/column-sidebar.tsx new file mode 100644 index 00000000000..69fc07bcf02 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-sidebar/column-sidebar.tsx @@ -0,0 +1,1312 @@ +'use client' + +import type React from 'react' +import { useEffect, useMemo, useRef, useState } from 'react' +import { toError } from '@sim/utils/errors' +import { generateId } from '@sim/utils/id' +import { useMutation, useQueryClient } from '@tanstack/react-query' +import { + ChevronDown, + ChevronRight, + ExternalLink, + Loader2, + Plus, + RepeatIcon, + SplitIcon, + X, +} from 'lucide-react' +import { + Button, + Checkbox, + Combobox, + Expandable, + ExpandableContent, + Input, + Label, + Switch, + Tooltip, + toast, +} from '@/components/emcn' +import { requestJson } from '@/lib/api/client/request' +import type { + AddWorkflowGroupBodyInput, + UpdateWorkflowGroupBodyInput, +} from '@/lib/api/contracts/tables' +import { + putWorkflowNormalizedStateContract, + type WorkflowStateContractInput, +} from '@/lib/api/contracts/workflows' +import { cn } from '@/lib/core/utils/cn' +import type { + ColumnDefinition, + WorkflowGroup, + WorkflowGroupDependencies, + WorkflowGroupOutput, +} from '@/lib/table' +import { columnTypeForLeaf, deriveOutputColumnName } from '@/lib/table/column-naming' +import { + type FlattenOutputsBlockInput, + type FlattenOutputsEdgeInput, + flattenWorkflowOutputs, + getBlockExecutionOrder, +} from '@/lib/workflows/blocks/flatten-outputs' +import { normalizeInputFormatValue } from '@/lib/workflows/input-format' +import { TriggerUtils } from '@/lib/workflows/triggers/triggers' +import type { InputFormatField } from '@/lib/workflows/types' +import { PreviewWorkflow } from '@/app/workspace/[workspaceId]/w/components/preview' +import { getBlock } from '@/blocks' +import { + useAddTableColumn, + useAddWorkflowGroup, + useUpdateColumn, + useUpdateWorkflowGroup, +} from '@/hooks/queries/tables' +import { useWorkflowState, workflowKeys } from '@/hooks/queries/workflows' +import type { WorkflowMetadata } from '@/stores/workflows/registry/types' +import { COLUMN_TYPE_OPTIONS, type SidebarColumnType } from './column-types' + +export type ColumnConfigState = + | { mode: 'edit'; columnName: string } + | { mode: 'new'; columnName: string; workflowId: string; proposedName: string } + | { + mode: 'create' + columnName: string + proposedName: string + /** When present, the sidebar opens with the workflow type pre-selected. */ + workflowId?: string + } + | null + +interface ColumnSidebarProps { + configState: ColumnConfigState + onClose: () => void + /** The current column record for edit mode. Null for new mode or closed. */ + existingColumn: ColumnDefinition | null + allColumns: ColumnDefinition[] + workflowGroups: WorkflowGroup[] + workflows: WorkflowMetadata[] | undefined + workspaceId: string + tableId: string +} + +const OUTPUT_VALUE_SEPARATOR = '::' + +/** Shared dashed-divider style — mirrors the workflow editor's subblock divider. */ +const DASHED_DIVIDER_STYLE = { + backgroundImage: + 'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)', +} as const + +/** Encodes blockId + path so duplicate field names across blocks stay distinct in the picker UI. */ +const encodeOutputValue = (blockId: string, path: string) => + `${blockId}${OUTPUT_VALUE_SEPARATOR}${path}` + +/** Splits an encoded `${blockId}::${path}` into its components for persistence. */ +const decodeOutputValue = (value: string): { blockId: string; path: string } => { + const idx = value.indexOf(OUTPUT_VALUE_SEPARATOR) + if (idx === -1) return { blockId: '', path: value } + return { blockId: value.slice(0, idx), path: value.slice(idx + OUTPUT_VALUE_SEPARATOR.length) } +} + +interface BlockOutputGroup { + blockId: string + blockName: string + blockType: string + blockIcon: string | React.ComponentType<{ className?: string }> + blockColor: string + paths: string[] +} + +/** + * Loose shape of `useWorkflowState` data — we only need the fields we round-trip + * through PUT /state. Typed locally to avoid pulling the heavy `WorkflowState` + * generic from `@/stores/workflows/workflow/types`. + */ +interface WorkflowStatePayload { + blocks: Record< + string, + { + type: string + subBlocks?: Record + } & Record + > + edges: unknown[] + loops: unknown + parallels: unknown + lastSaved?: number + isDeployed?: boolean +} + +function tableColumnTypeToInputType(colType: ColumnDefinition['type'] | undefined): string { + switch (colType) { + case 'number': + return 'number' + case 'boolean': + return 'boolean' + case 'json': + return 'object' + default: + return 'string' + } +} + +const TagIcon: React.FC<{ + icon: string | React.ComponentType<{ className?: string }> + color: string +}> = ({ icon, color }) => ( +
+ {typeof icon === 'string' ? ( + {icon} + ) : ( + (() => { + const IconComponent = icon + return + })() + )} +
+) + +function FieldDivider() { + return ( +
+
+
+ ) +} + +/** Mirrors the workflow editor's required-field label: title + asterisk. */ +function FieldLabel({ + htmlFor, + required, + children, +}: { + htmlFor?: string + required?: boolean + children: React.ReactNode +}) { + return ( + + ) +} + +/** Inline validation message styled like the workflow editor's destructive text. */ +function FieldError({ message }: { message: string }) { + return

{message}

+} + +/** + * Tinted inline warning row with a message on the left and an action button + * on the right. Stacks naturally — render multiple in sequence and they line + * up. Color mirrors the group-header deploy badge: `red` for blocking states, + * `amber` for soft warnings. + */ +function WarningRow({ + tone, + message, + action, +}: { + tone: 'red' | 'amber' + message: string + action: React.ReactNode +}) { + return ( +
+ + {message} + +
{action}
+
+ ) +} + +/** + * Collapsible "Run settings" section. Collapsed by default since outputs are + * the primary focus of the workflow flow — most users never need to touch + * the trigger conditions. The header shows a one-line summary of when the + * group will fire so the current state is visible without expanding. + */ +function RunSettingsSection({ + open, + onOpenChange, + summary, + scalarDepColumns, + groupDepOptions, + deps, + groupDeps, + workflows, + onToggleDep, + onToggleGroupDep, +}: { + open: boolean + onOpenChange: (open: boolean) => void + summary: string + scalarDepColumns: ColumnDefinition[] + groupDepOptions: WorkflowGroup[] + deps: string[] + groupDeps: string[] + workflows: WorkflowMetadata[] | undefined + onToggleDep: (name: string) => void + onToggleGroupDep: (groupId: string) => void +}) { + return ( +
+ + + +
+ {scalarDepColumns.length === 0 && groupDepOptions.length === 0 ? ( +
+ No upstream columns or groups. +
+ ) : ( + <> + {scalarDepColumns.map((c, idx) => { + const checked = deps.includes(c.name) + const isLast = idx === scalarDepColumns.length - 1 && groupDepOptions.length === 0 + return ( +
onToggleDep(c.name)} + onKeyDown={(e) => { + if (e.key === ' ' || e.key === 'Enter') { + e.preventDefault() + onToggleDep(c.name) + } + }} + className={cn( + 'flex h-[36px] flex-shrink-0 cursor-pointer items-center gap-2.5 px-2.5 hover:bg-[var(--surface-2)]', + !isLast && 'border-[var(--border)] border-b' + )} + > + + + {c.name} + + + {c.type} + +
+ ) + })} + {groupDepOptions.map((g, idx) => { + const checked = groupDeps.includes(g.id) + const isLast = idx === groupDepOptions.length - 1 + const wf = workflows?.find((w) => w.id === g.workflowId) + const color = wf?.color ?? 'var(--text-muted)' + const label = g.name ?? wf?.name ?? 'Workflow' + return ( +
onToggleGroupDep(g.id)} + onKeyDown={(e) => { + if (e.key === ' ' || e.key === 'Enter') { + e.preventDefault() + onToggleGroupDep(g.id) + } + }} + className={cn( + 'flex h-[36px] flex-shrink-0 cursor-pointer items-center gap-2.5 px-2.5 hover:bg-[var(--surface-2)]', + !isLast && 'border-[var(--border)] border-b' + )} + > + +
+ ) + })} + + )} +
+
+
+
+ ) +} + +/** + * Right-edge configuration panel for any column. + * + * Shows name / type / unique for every column, plus workflow-specific fields + * (workflow picker, output field, dependencies, run concurrency) when the + * selected type is `'workflow'`. + * + * Three modes: + * - 'edit': modify an existing column. PATCH sends a unified updates payload. + * - 'new': user picked a workflow via Change type → Workflow → [pick]. Nothing + * is persisted yet. Save writes type + workflowConfig + renames in one PATCH. + * - 'create': user picked a workflow from "Add column"; the column doesn't exist yet + * and Save creates it. + * + * Visual styling mirrors the workflow editor's subblock panel (label above + * control, dashed dividers between fields). + */ +export function ColumnSidebar({ + configState, + onClose, + existingColumn, + allColumns, + workflowGroups, + workflows, + workspaceId, + tableId, +}: ColumnSidebarProps) { + const updateColumn = useUpdateColumn({ workspaceId, tableId }) + const addColumn = useAddTableColumn({ workspaceId, tableId }) + const addWorkflowGroup = useAddWorkflowGroup({ workspaceId, tableId }) + const updateWorkflowGroup = useUpdateWorkflowGroup({ workspaceId, tableId }) + const open = configState !== null + + const columnName = configState ? configState.columnName : '' + + /** + * If the column being edited is a workflow output, resolve its parent group + * so we can populate workflow / outputs / dependencies state from it. + */ + const existingGroup = useMemo(() => { + if (!existingColumn?.workflowGroupId) return undefined + return workflowGroups.find((g) => g.id === existingColumn.workflowGroupId) + }, [existingColumn, workflowGroups]) + + const [nameInput, setNameInput] = useState('') + const [typeInput, setTypeInput] = useState('string') + + const isWorkflow = !!existingGroup || configState?.mode === 'new' || typeInput === 'workflow' + + /** + * Show the Column name field whenever a *specific* column is open: scalar + * columns (create or edit) and per-output workflow columns (edit only). Hide + * it when the surface is the workflow-group as a whole — i.e. creating a + * brand-new workflow column where individual output names are auto-derived. + */ + const showColumnNameField = + !isWorkflow || configState?.mode === 'edit' || configState?.mode === 'new' + + /** + * Columns to the left of the current column — these are the only valid trigger + * dependencies, since a workflow column can't depend on values that haven't been + * filled yet. For 'create' mode the column doesn't exist yet, so every existing + * column counts as left of it. + */ + const otherColumns = useMemo(() => { + if (!configState) return [] + if (configState.mode === 'create') return allColumns + const idx = allColumns.findIndex((c) => c.name === configState.columnName) + if (idx === -1) return allColumns.filter((c) => c.name !== configState.columnName) + return allColumns.slice(0, idx) + }, [configState, allColumns]) + + /** + * Split `otherColumns` into the two dep buckets: + * - `scalarDepColumns` — plain columns; tickable into `dependencies.columns`. + * - `groupDepOptions` — producing workflow groups whose outputs land left of the + * current column; tickable into `dependencies.workflowGroups`. A group only + * shows up here when at least one of its output columns is left-of-current. + * The current group itself is excluded so we never depend on ourselves. + */ + const scalarDepColumns = useMemo( + () => otherColumns.filter((c) => !c.workflowGroupId), + [otherColumns] + ) + const groupDepOptions = useMemo(() => { + const seen = new Set() + const result: WorkflowGroup[] = [] + for (const c of otherColumns) { + if (!c.workflowGroupId) continue + if (seen.has(c.workflowGroupId)) continue + if (existingGroup && c.workflowGroupId === existingGroup.id) continue + const g = workflowGroups.find((gg) => gg.id === c.workflowGroupId) + if (!g) continue + seen.add(c.workflowGroupId) + result.push(g) + } + return result + }, [otherColumns, workflowGroups, existingGroup]) + + const [uniqueInput, setUniqueInput] = useState(false) + const [selectedWorkflowId, setSelectedWorkflowId] = useState('') + /** Plain (non-workflow-output) column names this group waits on. */ + const [deps, setDeps] = useState([]) + /** Producing workflow group ids this group waits on. Workflow-output columns are + * represented by their parent group, since the schema validator forbids depending + * on a workflow-output column directly (`workflow-columns.ts` enforces this). */ + const [groupDeps, setGroupDeps] = useState([]) + /** Encoded `${blockId}::${path}` values — disambiguates duplicate paths in the picker. */ + const [selectedOutputs, setSelectedOutputs] = useState([]) + /** Surfaces required-field errors only after a save attempt, matching the workflow editor's deploy flow. */ + const [showValidation, setShowValidation] = useState(false) + /** Save-time error (network/validation thrown by the mutation). Rendered inline next to the footer + * buttons so it isn't covered by the toaster, which sits over the bottom-right of the panel. */ + const [saveError, setSaveError] = useState(null) + /** Run settings (the trigger-deps picker) starts collapsed — outputs are the + * primary task; configuring run timing is rare. */ + const [runSettingsOpen, setRunSettingsOpen] = useState(false) + + const existingColumnRef = useRef(existingColumn) + existingColumnRef.current = existingColumn + const allColumnsRef = useRef(allColumns) + allColumnsRef.current = allColumns + + useEffect(() => { + if (!open || !configState) return + setShowValidation(false) + setSaveError(null) + setRunSettingsOpen(false) + const existing = existingColumnRef.current + const cols = allColumnsRef.current + const leftOfCurrent = (() => { + if (configState.mode === 'create') return cols + const idx = cols.findIndex((c) => c.name === configState.columnName) + if (idx === -1) return cols.filter((c) => c.name !== configState.columnName) + return cols.slice(0, idx) + })() + // Default deps when there's no persisted group yet: tick every left-of-current + // scalar column + every left-of-current producing group. + const defaultScalarDeps = leftOfCurrent.filter((c) => !c.workflowGroupId).map((c) => c.name) + const defaultGroupDeps = (() => { + const seen = new Set() + for (const c of leftOfCurrent) { + if (c.workflowGroupId) seen.add(c.workflowGroupId) + } + return Array.from(seen) + })() + if (configState.mode === 'edit') { + const group = existing?.workflowGroupId + ? workflowGroups.find((g) => g.id === existing.workflowGroupId) + : undefined + // Surface workflow-typed columns as `'workflow'` in the combobox even + // though they're stored as scalar columns under the hood. + setTypeInput(group ? 'workflow' : (existing?.type ?? 'string')) + setUniqueInput(!!existing?.unique) + setNameInput(existing?.name ?? configState.columnName) + if (group) { + setSelectedWorkflowId(group.workflowId) + // Sanitize legacy persisted deps: any workflow-output column names that + // sneaked into `dependencies.columns` (writes from before the schema + // validator forbade them) are lifted into `workflowGroups` here so the + // sidebar surfaces a re-saveable state. + const persistedCols = group.dependencies?.columns + const persistedGroups = group.dependencies?.workflowGroups + if (persistedCols !== undefined || persistedGroups !== undefined) { + const liftedGroupIds = new Set(persistedGroups ?? []) + const cleanCols: string[] = [] + for (const colName of persistedCols ?? []) { + const c = cols.find((cc) => cc.name === colName) + if (c?.workflowGroupId) liftedGroupIds.add(c.workflowGroupId) + else cleanCols.push(colName) + } + setDeps(cleanCols) + setGroupDeps(Array.from(liftedGroupIds)) + } else { + setDeps(defaultScalarDeps) + setGroupDeps(defaultGroupDeps) + } + setSelectedOutputs([]) // re-encoded against current workflow blocks below + } else { + setSelectedWorkflowId('') + setDeps([]) + setGroupDeps([]) + setSelectedOutputs([]) + } + } else { + const workflowId = + 'workflowId' in configState && configState.workflowId ? configState.workflowId : '' + setTypeInput(workflowId ? 'workflow' : 'string') + setUniqueInput(false) + setNameInput(configState.proposedName) + setSelectedWorkflowId(workflowId) + setDeps(defaultScalarDeps) + setGroupDeps(defaultGroupDeps) + setSelectedOutputs([]) + } + }, [open, configState, workflowGroups]) + + const workflowState = useWorkflowState( + open && isWorkflow && selectedWorkflowId ? selectedWorkflowId : undefined + ) + + /** + * Resolves the unified Start block id and its current `inputFormat` field + * names. The "Add inputs" mutation only adds rows for table columns that + * aren't already represented in the start block — clicking the button when + * everything's covered does nothing, so we hide it in that case. + */ + const startBlockInputs = useMemo<{ + blockId: string | null + existingNames: Set + existing: InputFormatField[] + }>(() => { + const blocks = (workflowState.data as { blocks?: Record } | null) + ?.blocks + if (!blocks) return { blockId: null, existingNames: new Set(), existing: [] } + const candidate = TriggerUtils.findStartBlock(blocks, 'manual') + if (!candidate) return { blockId: null, existingNames: new Set(), existing: [] } + const block = blocks[candidate.blockId] as + | { subBlocks?: Record } + | undefined + const existing = normalizeInputFormatValue(block?.subBlocks?.inputFormat?.value) + return { + blockId: candidate.blockId, + existingNames: new Set(existing.map((f) => f.name).filter((n): n is string => !!n)), + existing, + } + }, [workflowState.data]) + + const missingInputColumnNames = useMemo(() => { + if (!startBlockInputs.blockId) return [] + return allColumns + .filter( + (c) => + c.name !== columnName && !c.workflowGroupId && !startBlockInputs.existingNames.has(c.name) + ) + .map((c) => c.name) + }, [allColumns, columnName, startBlockInputs]) + + const queryClient = useQueryClient() + const addInputsMutation = useMutation({ + mutationFn: async () => { + const wfId = selectedWorkflowId + const startBlockId = startBlockInputs.blockId + const state = workflowState.data as WorkflowStatePayload | null | undefined + if (!wfId || !startBlockId || !state || missingInputColumnNames.length === 0) { + throw new Error('Nothing to add') + } + const startBlock = state.blocks[startBlockId] + if (!startBlock) throw new Error('Start block missing from workflow') + + const newFields: InputFormatField[] = missingInputColumnNames.map((name) => { + const col = allColumns.find((c) => c.name === name) + return { + id: generateId(), + name, + type: tableColumnTypeToInputType(col?.type), + value: '', + collapsed: false, + } as InputFormatField & { id: string; collapsed: boolean } + }) + + const updatedSubBlock = { + ...(startBlock.subBlocks?.inputFormat ?? { id: 'inputFormat', type: 'input-format' }), + value: [...startBlockInputs.existing, ...newFields], + } + const updatedBlocks = { + ...state.blocks, + [startBlockId]: { + ...startBlock, + subBlocks: { ...startBlock.subBlocks, inputFormat: updatedSubBlock }, + }, + } + + const rawBody = { + blocks: updatedBlocks, + edges: state.edges, + loops: state.loops, + parallels: state.parallels, + lastSaved: state.lastSaved ?? Date.now(), + isDeployed: state.isDeployed ?? false, + } + // double-cast-allowed: WorkflowStatePayload is the loose local view of + // useWorkflowState; we round-trip it back to the strict PUT body shape. + const body = rawBody as unknown as WorkflowStateContractInput + await requestJson(putWorkflowNormalizedStateContract, { + params: { id: wfId }, + body, + }) + return missingInputColumnNames.length + }, + onSuccess: (added) => { + queryClient.invalidateQueries({ queryKey: workflowKeys.state(selectedWorkflowId) }) + toast.success(`Added ${added} input${added === 1 ? '' : 's'} to start block`) + }, + onError: (err) => { + toast.error(toError(err).message) + }, + }) + + const blockOutputGroups = useMemo(() => { + const state = workflowState.data as + | { + blocks?: Record + edges?: FlattenOutputsEdgeInput[] + } + | null + | undefined + if (!state?.blocks) return [] + + const blocks = Object.values(state.blocks) + const edges = state.edges ?? [] + const flat = flattenWorkflowOutputs(blocks, edges) + if (flat.length === 0) return [] + + const groupsByBlockId = new Map() + for (const f of flat) { + let group = groupsByBlockId.get(f.blockId) + if (!group) { + const blockConfig = getBlock(f.blockType) + const blockColor = blockConfig?.bgColor || '#2F55FF' + let blockIcon: string | React.ComponentType<{ className?: string }> = f.blockName + .charAt(0) + .toUpperCase() + if (blockConfig?.icon) blockIcon = blockConfig.icon + else if (f.blockType === 'loop') blockIcon = RepeatIcon + else if (f.blockType === 'parallel') blockIcon = SplitIcon + group = { + blockId: f.blockId, + blockName: f.blockName, + blockType: f.blockType, + blockIcon, + blockColor, + paths: [], + } + groupsByBlockId.set(f.blockId, group) + } + group.paths.push(f.path) + } + // Sort the picker by execution order (start block first) so it matches the + // saved-column ordering. Unreachable blocks sink to the end. + const distances = getBlockExecutionOrder(blocks, edges) + return Array.from(groupsByBlockId.values()).sort((a, b) => { + const da = distances[a.blockId] + const db = distances[b.blockId] + const sa = da === undefined || da < 0 ? Number.POSITIVE_INFINITY : da + const sb = db === undefined || db < 0 ? Number.POSITIVE_INFINITY : db + return sa - sb + }) + }, [workflowState.data]) + + /** + * Re-encode persisted `{blockId, path}` entries into the picker's encoded form + * once the workflow's blocks are loaded. Stale entries (block deleted or path + * removed) are dropped silently — the user can re-pick on save. + */ + useEffect(() => { + if (!existingGroup?.outputs.length) return + if (selectedOutputs.length > 0) return + if (blockOutputGroups.length === 0) return + const encoded: string[] = [] + for (const entry of existingGroup.outputs) { + const match = blockOutputGroups.find( + (g) => g.blockId === entry.blockId && g.paths.includes(entry.path) + ) + if (match) encoded.push(encodeOutputValue(entry.blockId, entry.path)) + } + if (encoded.length > 0) setSelectedOutputs(encoded) + }, [blockOutputGroups, selectedOutputs.length, existingGroup]) + + const toggleDep = (name: string) => { + setDeps((prev) => (prev.includes(name) ? prev.filter((d) => d !== name) : [...prev, name])) + } + + const toggleGroupDep = (groupId: string) => { + setGroupDeps((prev) => + prev.includes(groupId) ? prev.filter((d) => d !== groupId) : [...prev, groupId] + ) + } + + const toggleOutput = (encoded: string) => { + setSelectedOutputs((prev) => + prev.includes(encoded) ? prev.filter((v) => v !== encoded) : [...prev, encoded] + ) + } + + const typeOptions = useMemo( + () => + COLUMN_TYPE_OPTIONS.filter((o) => o.type !== 'workflow' || !!existingGroup).map((o) => ({ + label: o.label, + value: o.type, + icon: o.icon, + })), + [existingGroup] + ) + + /** + * One-line summary of the trigger picker shown when Run settings is collapsed. + * Lists the dep names ("Run when X, Y, are filled") so the user can see at a + * glance whether anything's gating the group without expanding the section. + */ + const runSettingsSummary = useMemo(() => { + const names: string[] = [...deps] + for (const gid of groupDeps) { + const g = workflowGroups.find((gg) => gg.id === gid) + const wf = workflows?.find((w) => w.id === g?.workflowId) + const label = g?.name ?? wf?.name ?? 'workflow' + names.push(label) + } + if (names.length === 0) return 'Runs as soon as the group is added' + return `Runs when ${names.join(', ')} ${names.length === 1 ? 'is' : 'are'} filled` + }, [deps, groupDeps, workflowGroups, workflows]) + + /** + * Builds the ordered, deduplicated `(blockId, path)` list from the picker + * state, sorted by execution order. Empty array if the user hasn't picked + * anything. + */ + const buildOrderedPickedOutputs = (): Array<{ + blockId: string + path: string + leafType?: string + }> => { + const seen = new Set() + const outputs: Array<{ blockId: string; path: string; leafType?: string }> = [] + for (const encoded of selectedOutputs) { + if (seen.has(encoded)) continue + seen.add(encoded) + outputs.push(decodeOutputValue(encoded)) + } + const wfState = workflowState.data as + | { + blocks?: Record + edges?: FlattenOutputsEdgeInput[] + } + | null + | undefined + if (wfState?.blocks) { + const blocks = Object.values(wfState.blocks) + const edges = wfState.edges ?? [] + const distances = getBlockExecutionOrder(blocks, edges) + const flat = flattenWorkflowOutputs(blocks, edges) + const indexInFlat = new Map( + flat.map((f, i) => [`${f.blockId}${OUTPUT_VALUE_SEPARATOR}${f.path}`, i]) + ) + const leafTypeByKey = new Map( + flat.map((f) => [`${f.blockId}${OUTPUT_VALUE_SEPARATOR}${f.path}`, f.leafType]) + ) + for (const o of outputs) { + o.leafType = leafTypeByKey.get(`${o.blockId}${OUTPUT_VALUE_SEPARATOR}${o.path}`) + } + outputs.sort((a, b) => { + const da = distances[a.blockId] + const db = distances[b.blockId] + const sa = da === undefined || da < 0 ? Number.POSITIVE_INFINITY : da + const sb = db === undefined || db < 0 ? Number.POSITIVE_INFINITY : db + if (sa !== sb) return sa - sb + const ia = + indexInFlat.get(`${a.blockId}${OUTPUT_VALUE_SEPARATOR}${a.path}`) ?? + Number.POSITIVE_INFINITY + const ib = + indexInFlat.get(`${b.blockId}${OUTPUT_VALUE_SEPARATOR}${b.path}`) ?? + Number.POSITIVE_INFINITY + return ia - ib + }) + } + return outputs + } + + const handleSave = async () => { + if (!configState) return + setSaveError(null) + const trimmedName = nameInput.trim() + // Name is required iff the field is shown — when configuring a whole + // workflow group at creation time, per-output column names are auto-derived + // and the field is hidden, so don't gate save on it. + const missing: string[] = [] + if (showColumnNameField && !trimmedName) missing.push('a column name') + if (isWorkflow && !selectedWorkflowId) missing.push('a workflow') + if (isWorkflow && selectedWorkflowId && selectedOutputs.length === 0) { + missing.push('at least one output column') + } + if (missing.length > 0) { + setShowValidation(true) + // Surface a short summary near the Save button too — the inline FieldError + // can be scrolled out of view when the panel content is tall. + setSaveError(`Add ${missing.join(' and ')} before saving.`) + return + } + + try { + if (isWorkflow) { + const orderedOutputs = buildOrderedPickedOutputs() + const dependencies: WorkflowGroupDependencies = { + columns: deps, + ...(groupDeps.length > 0 ? { workflowGroups: groupDeps } : {}), + } + + if (existingGroup) { + // Update path: diff outputs, derive new column names for added entries, + // call updateWorkflowGroup so service handles add/remove transactionally. + // If the sidebar was opened on a *specific* workflow-output column and + // the user renamed it, propagate that into the group's `outputs` ref + // (the column rename itself goes through `updateColumn` below, which + // server-side cascades into outputs/deps — but our outgoing payload + // also has to use the new name so the group update doesn't undo it). + const editedColumnName = configState.mode === 'edit' ? configState.columnName : null + const renamedColumn = + editedColumnName && trimmedName && trimmedName !== editedColumnName + ? { from: editedColumnName, to: trimmedName } + : null + const oldKeys = new Set(existingGroup.outputs.map((o) => `${o.blockId}::${o.path}`)) + const taken = new Set( + allColumns.map((c) => + renamedColumn && c.name === renamedColumn.from ? renamedColumn.to : c.name + ) + ) + const fullOutputs: WorkflowGroupOutput[] = [] + const newOutputColumns: NonNullable = [] + for (const o of orderedOutputs) { + const key = `${o.blockId}::${o.path}` + const existing = existingGroup.outputs.find( + (e) => e.blockId === o.blockId && e.path === o.path + ) + if (existing) { + fullOutputs.push( + renamedColumn && existing.columnName === renamedColumn.from + ? { ...existing, columnName: renamedColumn.to } + : existing + ) + } else { + const colName = deriveOutputColumnName(o.path, taken) + taken.add(colName) + fullOutputs.push({ blockId: o.blockId, path: o.path, columnName: colName }) + newOutputColumns.push({ + name: colName, + type: columnTypeForLeaf(o.leafType), + required: false, + unique: false, + workflowGroupId: existingGroup.id, + }) + } + oldKeys.delete(key) + } + if (renamedColumn) { + await updateColumn.mutateAsync({ + columnName: renamedColumn.from, + updates: { name: renamedColumn.to }, + }) + } + await updateWorkflowGroup.mutateAsync({ + groupId: existingGroup.id, + workflowId: selectedWorkflowId, + name: existingGroup.name, + dependencies, + outputs: fullOutputs, + ...(newOutputColumns.length > 0 ? { newOutputColumns } : {}), + }) + toast.success(`Saved "${existingGroup.name ?? 'Workflow'}"`) + } else { + // Create path: build a fresh group with auto-derived column names. + const groupId = generateId() + const taken = new Set(allColumns.map((c) => c.name)) + const newOutputColumns: AddWorkflowGroupBodyInput['outputColumns'] = [] + const groupOutputs: WorkflowGroupOutput[] = [] + for (const o of orderedOutputs) { + const colName = deriveOutputColumnName(o.path, taken) + taken.add(colName) + newOutputColumns.push({ + name: colName, + type: columnTypeForLeaf(o.leafType), + required: false, + unique: false, + workflowGroupId: groupId, + }) + groupOutputs.push({ blockId: o.blockId, path: o.path, columnName: colName }) + } + const workflowName = + workflows?.find((w) => w.id === selectedWorkflowId)?.name ?? 'Workflow' + const group: WorkflowGroup = { + id: groupId, + workflowId: selectedWorkflowId, + name: workflowName, + dependencies, + outputs: groupOutputs, + } + await addWorkflowGroup.mutateAsync({ group, outputColumns: newOutputColumns }) + toast.success(`Added "${workflowName}"`) + } + } else if (configState.mode === 'create') { + // `isWorkflow` is false here, so `typeInput` is a real ColumnDefinition type. + const scalarType = typeInput as ColumnDefinition['type'] + await addColumn.mutateAsync({ + name: trimmedName, + type: scalarType, + }) + toast.success(`Added "${trimmedName}"`) + } else { + const existing = existingColumnRef.current + const scalarType = typeInput as ColumnDefinition['type'] + const renamed = trimmedName !== configState.columnName + const typeChanged = !!existing && existing.type !== scalarType + const uniqueChanged = !!existing && !!existing.unique !== uniqueInput + + const updates: { + name?: string + type?: ColumnDefinition['type'] + unique?: boolean + } = { + ...(renamed ? { name: trimmedName } : {}), + ...(typeChanged ? { type: scalarType } : {}), + ...(uniqueChanged ? { unique: uniqueInput } : {}), + } + + if (Object.keys(updates).length === 0) { + onClose() + return + } + + await updateColumn.mutateAsync({ + columnName: configState.columnName, + updates, + }) + toast.success(`Saved "${trimmedName}"`) + } + + onClose() + } catch (err) { + setSaveError(toError(err).message) + } + } + + const saveDisabled = updateColumn.isPending || addColumn.isPending + + return ( + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-sidebar/column-types.ts b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-sidebar/column-types.ts new file mode 100644 index 00000000000..10e392e82a1 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/column-sidebar/column-types.ts @@ -0,0 +1,32 @@ +import type React from 'react' +import { + Calendar as CalendarIcon, + PlayOutline, + TypeBoolean, + TypeJson, + TypeNumber, + TypeText, +} from '@/components/emcn/icons' +import type { ColumnDefinition } from '@/lib/table' + +/** + * UI-only column type. `'workflow'` is a virtual selection that lets the user + * configure a workflow group from the sidebar; on save, it expands into N real + * scalar columns + one workflow group, none of which carry a `'workflow'` type. + */ +export type SidebarColumnType = ColumnDefinition['type'] | 'workflow' + +export interface ColumnTypeOption { + type: SidebarColumnType + label: string + icon: React.ComponentType<{ className?: string }> +} + +export const COLUMN_TYPE_OPTIONS: ColumnTypeOption[] = [ + { type: 'string', label: 'Text', icon: TypeText }, + { type: 'number', label: 'Number', icon: TypeNumber }, + { type: 'boolean', label: 'Boolean', icon: TypeBoolean }, + { type: 'date', label: 'Date', icon: CalendarIcon }, + { type: 'json', label: 'JSON', icon: TypeJson }, + { type: 'workflow', label: 'Workflow', icon: PlayOutline }, +] diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/context-menu/context-menu.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/context-menu/context-menu.tsx index 9939e3cd2f4..dfe0523ba8d 100644 --- a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/context-menu/context-menu.tsx +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/context-menu/context-menu.tsx @@ -5,7 +5,7 @@ import { DropdownMenuSeparator, DropdownMenuTrigger, } from '@/components/emcn' -import { ArrowDown, ArrowUp, Duplicate, Pencil, Trash } from '@/components/emcn/icons' +import { ArrowDown, ArrowUp, Duplicate, Eye, Pencil, Trash } from '@/components/emcn/icons' import type { ContextMenuState } from '../../types' interface ContextMenuProps { @@ -16,6 +16,9 @@ interface ContextMenuProps { onInsertAbove: () => void onInsertBelow: () => void onDuplicate: () => void + onViewExecution?: () => void + canViewExecution?: boolean + canEditCell?: boolean selectedRowCount?: number disableEdit?: boolean disableInsert?: boolean @@ -30,6 +33,9 @@ export function ContextMenu({ onInsertAbove, onInsertBelow, onDuplicate, + onViewExecution, + canViewExecution = false, + canEditCell = true, selectedRowCount = 1, disableEdit = false, disableInsert = false, @@ -63,12 +69,18 @@ export function ContextMenu({ sideOffset={4} onCloseAutoFocus={(e) => e.preventDefault()} > - {contextMenu.columnName && ( + {contextMenu.columnName && canEditCell && ( Edit cell )} + {canViewExecution && onViewExecution && ( + + + View execution + + )} Insert row above diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/cell-content.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/cell-content.tsx new file mode 100644 index 00000000000..57457056af0 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/cell-content.tsx @@ -0,0 +1,171 @@ +'use client' + +import type React from 'react' +import { Circle } from 'lucide-react' +import { Checkbox } from '@/components/emcn' +import { Loader } from '@/components/emcn/icons/loader' +import { cn } from '@/lib/core/utils/cn' +import type { RowExecutionMetadata } from '@/lib/table' +import type { SaveReason } from '../../../types' +import { storageToDisplay } from '../../../utils' +import type { DisplayColumn } from '../types' +import { InlineEditor } from './inline-editors' + +interface CellContentProps { + value: unknown + exec?: RowExecutionMetadata + column: DisplayColumn + isEditing: boolean + initialCharacter?: string | null + onSave: (value: unknown, reason: SaveReason) => void + onCancel: () => void + workflowNameById?: Record +} + +/** + * Renders the visible content of a single cell. Workflow-output cells follow + * a status-state-machine (block error / value / running / waiting / cancelled + * / dash); plain cells render the typed value. When `isEditing` is true the + * `InlineEditor` overlay sits on top of the static content. + */ +export function CellContent({ + value, + exec, + column, + isEditing, + initialCharacter, + onSave, + onCancel, +}: CellContentProps) { + const isNull = value === null || value === undefined + + let displayContent: React.ReactNode = null + if (column.workflowGroupId) { + const blockId = column.outputBlockId + const blockError = blockId ? exec?.blockErrors?.[blockId] : undefined + const blockRunning = blockId ? (exec?.runningBlockIds?.includes(blockId) ?? false) : false + const hasValue = !isNull + const valueText = + typeof value === 'string' + ? value + : value === null || value === undefined + ? '' + : JSON.stringify(value) + + // Once any block in the group has reported an error, downstream cells + // that haven't started won't run on this attempt — collapse them to dash + // instead of leaving a stale "Waiting" spinner if the cell task didn't + // reach a clean terminal state. + const groupHasBlockErrors = !!(exec?.blockErrors && Object.keys(exec.blockErrors).length > 0) + if (blockError) { + displayContent = ( + + Error + + ) + } else if (hasValue) { + displayContent = ( + + {valueText} + + ) + } else if ( + (exec?.status === 'running' || exec?.status === 'pending') && + !(groupHasBlockErrors && !blockRunning) + ) { + // Motion only when this cell's own block is in flight. Pending and + // upstream-blocked Waiting render as static dots — the moving spinner + // is reserved for "right now, actually running". + if (blockRunning) { + displayContent = ( +
+ + + Running + +
+ ) + } else { + const label = exec.status === 'pending' ? 'Pending' : 'Waiting' + displayContent = ( +
+ + + {label} + +
+ ) + } + } else if (exec?.status === 'cancelled') { + displayContent = ( + + Cancelled + + ) + } else { + displayContent = + } + // Workflow-output cells are hand-editable: hide the status content under + // the InlineEditor when the user opts to edit, then fall through to the + // common return that renders the editor overlay. + if (isEditing) { + displayContent =
{displayContent}
+ } + } else if (column.type === 'boolean') { + displayContent = ( +
+ +
+ ) + } else if (!isNull && column.type === 'json') { + displayContent = ( + + {JSON.stringify(value)} + + ) + } else if (!isNull && column.type === 'date') { + displayContent = ( + + {storageToDisplay(String(value))} + + ) + } else if (!isNull) { + displayContent = ( + + {String(value)} + + ) + } + + return ( + <> + {isEditing && ( +
+ +
+ )} + {displayContent} + + ) +} diff --git a/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/expanded-cell-popover.tsx b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/expanded-cell-popover.tsx new file mode 100644 index 00000000000..388622c82b9 --- /dev/null +++ b/apps/sim/app/workspace/[workspaceId]/tables/[tableId]/components/table/cells/expanded-cell-popover.tsx @@ -0,0 +1,209 @@ +'use client' + +import type React from 'react' +import { useEffect, useLayoutEffect, useMemo, useRef, useState } from 'react' +import { Button } from '@/components/emcn' +import type { TableRow as TableRowType } from '@/lib/table' +import type { EditingCell, SaveReason } from '../../../types' +import { cleanCellValue, displayToStorage, formatValueForInput } from '../../../utils' +import type { DisplayColumn } from '../types' + +interface ExpandedCellPopoverProps { + expandedCell: EditingCell | null + onClose: () => void + rows: TableRowType[] + columns: DisplayColumn[] + onSave: (rowId: string, columnName: string, value: unknown, reason: SaveReason) => void + canEdit: boolean + scrollContainer: HTMLElement | null +} + +const EXPANDED_CELL_MIN_WIDTH = 420 +const EXPANDED_CELL_HEIGHT = 280 + +/** + * Supabase-style anchored cell expander. Floats over the clicked cell at the cell's + * top-left, minimum width {@link EXPANDED_CELL_MIN_WIDTH}, fixed height, internally + * scrollable. Triggered by cell double-click so long values are readable/editable + * without widening the column. Inline edit via Enter/F2/typing is unaffected. + * + * Workflow and boolean cells are read-only in this view — workflow cells are driven + * by the scheduler, booleans use a checkbox cell inline. + */ +export function ExpandedCellPopover({ + expandedCell, + onClose, + rows, + columns, + onSave, + canEdit, + scrollContainer, +}: ExpandedCellPopoverProps) { + const rootRef = useRef(null) + const textareaRef = useRef(null) + const [rect, setRect] = useState<{ top: number; left: number; width: number } | null>(null) + const [draftValue, setDraftValue] = useState('') + + const target = useMemo(() => { + if (!expandedCell) return null + const row = rows.find((r) => r.id === expandedCell.rowId) + // Match the specific visual column the user double-clicked on. Fanned-out + // workflow columns share `name` across siblings, so prefer `key` when set. + const matchByKey = expandedCell.columnKey + ? (c: DisplayColumn) => c.key === expandedCell.columnKey + : (c: DisplayColumn) => c.name === expandedCell.columnName + const column = columns.find(matchByKey) + if (!row || !column) return null + const colIndex = columns.findIndex(matchByKey) + return { row, column, colIndex, value: row.data[column.name] } + }, [expandedCell, rows, columns]) + + const isBooleanCell = target?.column.type === 'boolean' + // Workflow-output cells are editable in the expanded view too — the user + // can override the workflow's value. Booleans toggle inline; the expanded + // popover only handles text-shaped inputs. + const isEditable = Boolean(target) && canEdit && !isBooleanCell + + const displayText = useMemo(() => { + if (!target) return '' + const { value } = target + if (value == null) return '' + if (typeof value === 'string') return value + return JSON.stringify(value, null, 2) + }, [target]) + + useLayoutEffect(() => { + if (!expandedCell || !target) { + setRect(null) + return + } + setDraftValue(isEditable ? formatValueForInput(target.value, target.column.type) : '') + const selector = `[data-table-scroll] [data-row="${target.row.position}"][data-col="${target.colIndex}"]` + const el = document.querySelector(selector) + if (!el) { + setRect(null) + return + } + const r = el.getBoundingClientRect() + setRect({ top: r.top, left: r.left, width: r.width }) + // Focus textarea on open so typing works immediately. + requestAnimationFrame(() => textareaRef.current?.focus()) + }, [expandedCell, target, isEditable]) + + useEffect(() => { + if (!expandedCell) return + const handleKey = (e: KeyboardEvent) => { + if (e.key === 'Escape') { + e.preventDefault() + onClose() + } + } + const handleMouseDown = (e: MouseEvent) => { + if (!rootRef.current) return + if (rootRef.current.contains(e.target as Node)) return + onClose() + } + window.addEventListener('keydown', handleKey) + window.addEventListener('mousedown', handleMouseDown) + return () => { + window.removeEventListener('keydown', handleKey) + window.removeEventListener('mousedown', handleMouseDown) + } + }, [expandedCell, onClose]) + + // Close on table scroll — re-anchoring mid-scroll is more jarring than dismissing. + useEffect(() => { + if (!expandedCell || !scrollContainer) return + const handler = () => onClose() + scrollContainer.addEventListener('scroll', handler, { passive: true }) + return () => scrollContainer.removeEventListener('scroll', handler) + }, [expandedCell, scrollContainer, onClose]) + + if (!expandedCell || !target || !rect) return null + + const width = Math.max(rect.width, EXPANDED_CELL_MIN_WIDTH) + // Clamp to viewport. Prefer anchoring at the cell's left edge; if the popover + // would overflow right, align its right edge with the cell's right edge + // (mirroring Radix/menu flip behavior). Same idea for bottom-of-viewport. + const VIEWPORT_PAD = 8 + const cellRight = rect.left + rect.width + const overflowsRight = rect.left + width > window.innerWidth - VIEWPORT_PAD + const left = overflowsRight + ? Math.max(VIEWPORT_PAD, cellRight - width) + : Math.max(VIEWPORT_PAD, rect.left) + const overflowsBottom = rect.top + EXPANDED_CELL_HEIGHT > window.innerHeight - VIEWPORT_PAD + const top = overflowsBottom + ? Math.max(VIEWPORT_PAD, window.innerHeight - EXPANDED_CELL_HEIGHT - VIEWPORT_PAD) + : rect.top + + const handleSave = () => { + if (!isEditable) return + // `displayToStorage` only normalizes dates — it returns null for anything else. + // Fall back to the raw draft for non-date columns, matching the inline editor. + const raw = displayToStorage(draftValue) ?? draftValue + const cleaned = cleanCellValue(raw, target.column) + onSave(target.row.id, target.column.name, cleaned, 'blur') + onClose() + } + + const handleTextareaKeyDown = (e: React.KeyboardEvent) => { + if (e.key === 'Enter' && !e.shiftKey) { + e.preventDefault() + handleSave() + } + } + + return ( +
+ {isEditable ? ( + <> +