Skip to content

Commit 96fdfbc

Browse files
committed
fix(browser-agents): log live URL for stagehand and browser-use
1 parent a66c1e3 commit 96fdfbc

File tree

6 files changed

+165
-134
lines changed

6 files changed

+165
-134
lines changed

apps/sim/app/api/logs/cleanup/route.ts

Lines changed: 80 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,13 @@ export const dynamic = 'force-dynamic'
1010

1111
const logger = createLogger('LogsCleanup')
1212

13-
const BATCH_SIZE = 500
13+
const BATCH_SIZE = 2000
1414
const S3_CONFIG = {
1515
bucket: process.env.S3_LOGS_BUCKET_NAME || '',
1616
region: process.env.AWS_REGION || '',
1717
}
1818

19-
export async function POST(request: Request) {
19+
export async function GET(request: Request) {
2020
try {
2121
const authHeader = request.headers.get('authorization')
2222

@@ -66,89 +66,103 @@ export async function POST(request: Request) {
6666

6767
const workflowIds = workflowsQuery.map((w) => w.id)
6868

69-
const oldLogs = await db
70-
.select({
71-
id: workflowLogs.id,
72-
workflowId: workflowLogs.workflowId,
73-
executionId: workflowLogs.executionId,
74-
level: workflowLogs.level,
75-
message: workflowLogs.message,
76-
duration: workflowLogs.duration,
77-
trigger: workflowLogs.trigger,
78-
createdAt: workflowLogs.createdAt,
79-
metadata: workflowLogs.metadata,
80-
})
81-
.from(workflowLogs)
82-
.where(
83-
and(
84-
inArray(workflowLogs.workflowId, workflowIds),
85-
lt(workflowLogs.createdAt, retentionDate)
86-
)
87-
)
88-
.limit(BATCH_SIZE)
89-
90-
logger.info(`Found ${oldLogs.length} logs older than ${retentionDate.toISOString()} to archive`)
91-
92-
if (oldLogs.length === 0) {
93-
return NextResponse.json({ message: 'No logs to clean up' })
94-
}
95-
9669
const results = {
97-
total: oldLogs.length,
70+
total: 0,
9871
archived: 0,
9972
archiveFailed: 0,
10073
deleted: 0,
10174
deleteFailed: 0,
10275
}
10376

104-
for (const log of oldLogs) {
105-
const today = new Date().toISOString().split('T')[0]
106-
107-
const logKey = `archived-logs/${today}/${log.id}.json`
108-
const logData = JSON.stringify(log)
109-
110-
try {
111-
await s3Client.send(
112-
new PutObjectCommand({
113-
Bucket: S3_CONFIG.bucket,
114-
Key: logKey,
115-
Body: logData,
116-
ContentType: 'application/json',
117-
Metadata: {
118-
logId: String(log.id),
119-
workflowId: String(log.workflowId),
120-
archivedAt: new Date().toISOString(),
121-
},
122-
})
77+
const startTime = Date.now()
78+
const MAX_BATCHES = 10
79+
80+
let batchesProcessed = 0
81+
let hasMoreLogs = true
82+
83+
while (hasMoreLogs && batchesProcessed < MAX_BATCHES) {
84+
const oldLogs = await db
85+
.select({
86+
id: workflowLogs.id,
87+
workflowId: workflowLogs.workflowId,
88+
executionId: workflowLogs.executionId,
89+
level: workflowLogs.level,
90+
message: workflowLogs.message,
91+
duration: workflowLogs.duration,
92+
trigger: workflowLogs.trigger,
93+
createdAt: workflowLogs.createdAt,
94+
metadata: workflowLogs.metadata,
95+
})
96+
.from(workflowLogs)
97+
.where(
98+
and(
99+
inArray(workflowLogs.workflowId, workflowIds),
100+
lt(workflowLogs.createdAt, retentionDate)
101+
)
123102
)
103+
.limit(BATCH_SIZE)
124104

125-
results.archived++
105+
results.total += oldLogs.length
106+
107+
for (const log of oldLogs) {
108+
const today = new Date().toISOString().split('T')[0]
109+
110+
const logKey = `archived-logs/${today}/${log.id}.json`
111+
const logData = JSON.stringify(log)
126112

127113
try {
128-
const deleteResult = await db
129-
.delete(workflowLogs)
130-
.where(eq(workflowLogs.id, log.id))
131-
.returning({ id: workflowLogs.id })
132-
133-
if (deleteResult.length > 0) {
134-
results.deleted++
135-
} else {
114+
await s3Client.send(
115+
new PutObjectCommand({
116+
Bucket: S3_CONFIG.bucket,
117+
Key: logKey,
118+
Body: logData,
119+
ContentType: 'application/json',
120+
Metadata: {
121+
logId: String(log.id),
122+
workflowId: String(log.workflowId),
123+
archivedAt: new Date().toISOString(),
124+
},
125+
})
126+
)
127+
128+
results.archived++
129+
130+
try {
131+
const deleteResult = await db
132+
.delete(workflowLogs)
133+
.where(eq(workflowLogs.id, log.id))
134+
.returning({ id: workflowLogs.id })
135+
136+
if (deleteResult.length > 0) {
137+
results.deleted++
138+
} else {
139+
results.deleteFailed++
140+
logger.warn(`Failed to delete log ${log.id} after archiving: No rows deleted`)
141+
}
142+
} catch (deleteError) {
136143
results.deleteFailed++
137-
logger.warn(`Failed to delete log ${log.id} after archiving: No rows deleted`)
144+
logger.error(`Error deleting log ${log.id} after archiving:`, { deleteError })
138145
}
139-
} catch (deleteError) {
140-
results.deleteFailed++
141-
logger.error(`Error deleting log ${log.id} after archiving:`, { deleteError })
146+
} catch (archiveError) {
147+
results.archiveFailed++
148+
logger.error(`Failed to archive log ${log.id}:`, { archiveError })
142149
}
143-
} catch (archiveError) {
144-
results.archiveFailed++
145-
logger.error(`Failed to archive log ${log.id}:`, { archiveError })
146150
}
151+
152+
batchesProcessed++
153+
hasMoreLogs = oldLogs.length === BATCH_SIZE
154+
155+
logger.info(`Processed batch ${batchesProcessed}: ${oldLogs.length} logs`)
147156
}
148157

158+
const timeElapsed = (Date.now() - startTime) / 1000
159+
const reachedLimit = batchesProcessed >= MAX_BATCHES && hasMoreLogs
160+
149161
return NextResponse.json({
150-
message: `Successfully processed ${results.total} logs: archived ${results.archived}, deleted ${results.deleted}`,
162+
message: `Processed ${batchesProcessed} batches (${results.total} logs) in ${timeElapsed.toFixed(2)}s${reachedLimit ? ' (batch limit reached)' : ''}`,
151163
results,
164+
complete: !hasMoreLogs,
165+
batchLimitReached: reachedLimit,
152166
})
153167
} catch (error) {
154168
logger.error('Error in log cleanup process:', { error })

apps/sim/app/api/tools/stagehand/agent/route.ts

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,9 @@ import { ensureZodObject, normalizeUrl } from '../utils'
66

77
const logger = createLogger('StagehandAgentAPI')
88

9-
// Environment variables for Browserbase
109
const BROWSERBASE_API_KEY = process.env.BROWSERBASE_API_KEY
1110
const BROWSERBASE_PROJECT_ID = process.env.BROWSERBASE_PROJECT_ID
1211

13-
// Input validation schema
1412
const requestSchema = z.object({
1513
task: z.string().min(1),
1614
startUrl: z.string().url(),
@@ -19,16 +17,13 @@ const requestSchema = z.object({
1917
apiKey: z.string(),
2018
})
2119

22-
// Helper function to get the schema object from the input
2320
function getSchemaObject(outputSchema: Record<string, any>): Record<string, any> {
24-
// Check if schema is nested under a 'schema' property (common pattern)
2521
if (outputSchema.schema && typeof outputSchema.schema === 'object') {
2622
return outputSchema.schema
2723
}
2824
return outputSchema
2925
}
3026

31-
// Helper function to format schema for instructions
3227
function formatSchemaForInstructions(schema: Record<string, any>): string {
3328
try {
3429
return JSON.stringify(schema, null, 2)
@@ -38,7 +33,6 @@ function formatSchemaForInstructions(schema: Record<string, any>): string {
3833
}
3934
}
4035

41-
// Helper to extract special action directives with variable placeholders
4236
function extractActionDirectives(task: string): {
4337
processedTask: string
4438
actionDirectives: Array<{ index: number; action: string }>
@@ -438,7 +432,6 @@ export async function POST(request: NextRequest) {
438432
let stagehand: Stagehand | null = null
439433

440434
try {
441-
// Parse and validate request body
442435
const body = await request.json()
443436
logger.info('Received Stagehand agent request', {
444437
startUrl: body.startUrl,

0 commit comments

Comments
 (0)