@@ -10,13 +10,13 @@ export const dynamic = 'force-dynamic'
1010
1111const logger = createLogger ( 'LogsCleanup' )
1212
13- const BATCH_SIZE = 500
13+ const BATCH_SIZE = 2000
1414const S3_CONFIG = {
1515 bucket : process . env . S3_LOGS_BUCKET_NAME || '' ,
1616 region : process . env . AWS_REGION || '' ,
1717}
1818
19- export async function POST ( request : Request ) {
19+ export async function GET ( request : Request ) {
2020 try {
2121 const authHeader = request . headers . get ( 'authorization' )
2222
@@ -66,89 +66,103 @@ export async function POST(request: Request) {
6666
6767 const workflowIds = workflowsQuery . map ( ( w ) => w . id )
6868
69- const oldLogs = await db
70- . select ( {
71- id : workflowLogs . id ,
72- workflowId : workflowLogs . workflowId ,
73- executionId : workflowLogs . executionId ,
74- level : workflowLogs . level ,
75- message : workflowLogs . message ,
76- duration : workflowLogs . duration ,
77- trigger : workflowLogs . trigger ,
78- createdAt : workflowLogs . createdAt ,
79- metadata : workflowLogs . metadata ,
80- } )
81- . from ( workflowLogs )
82- . where (
83- and (
84- inArray ( workflowLogs . workflowId , workflowIds ) ,
85- lt ( workflowLogs . createdAt , retentionDate )
86- )
87- )
88- . limit ( BATCH_SIZE )
89-
90- logger . info ( `Found ${ oldLogs . length } logs older than ${ retentionDate . toISOString ( ) } to archive` )
91-
92- if ( oldLogs . length === 0 ) {
93- return NextResponse . json ( { message : 'No logs to clean up' } )
94- }
95-
9669 const results = {
97- total : oldLogs . length ,
70+ total : 0 ,
9871 archived : 0 ,
9972 archiveFailed : 0 ,
10073 deleted : 0 ,
10174 deleteFailed : 0 ,
10275 }
10376
104- for ( const log of oldLogs ) {
105- const today = new Date ( ) . toISOString ( ) . split ( 'T' ) [ 0 ]
106-
107- const logKey = `archived-logs/${ today } /${ log . id } .json`
108- const logData = JSON . stringify ( log )
109-
110- try {
111- await s3Client . send (
112- new PutObjectCommand ( {
113- Bucket : S3_CONFIG . bucket ,
114- Key : logKey ,
115- Body : logData ,
116- ContentType : 'application/json' ,
117- Metadata : {
118- logId : String ( log . id ) ,
119- workflowId : String ( log . workflowId ) ,
120- archivedAt : new Date ( ) . toISOString ( ) ,
121- } ,
122- } )
77+ const startTime = Date . now ( )
78+ const MAX_BATCHES = 10
79+
80+ let batchesProcessed = 0
81+ let hasMoreLogs = true
82+
83+ while ( hasMoreLogs && batchesProcessed < MAX_BATCHES ) {
84+ const oldLogs = await db
85+ . select ( {
86+ id : workflowLogs . id ,
87+ workflowId : workflowLogs . workflowId ,
88+ executionId : workflowLogs . executionId ,
89+ level : workflowLogs . level ,
90+ message : workflowLogs . message ,
91+ duration : workflowLogs . duration ,
92+ trigger : workflowLogs . trigger ,
93+ createdAt : workflowLogs . createdAt ,
94+ metadata : workflowLogs . metadata ,
95+ } )
96+ . from ( workflowLogs )
97+ . where (
98+ and (
99+ inArray ( workflowLogs . workflowId , workflowIds ) ,
100+ lt ( workflowLogs . createdAt , retentionDate )
101+ )
123102 )
103+ . limit ( BATCH_SIZE )
124104
125- results . archived ++
105+ results . total += oldLogs . length
106+
107+ for ( const log of oldLogs ) {
108+ const today = new Date ( ) . toISOString ( ) . split ( 'T' ) [ 0 ]
109+
110+ const logKey = `archived-logs/${ today } /${ log . id } .json`
111+ const logData = JSON . stringify ( log )
126112
127113 try {
128- const deleteResult = await db
129- . delete ( workflowLogs )
130- . where ( eq ( workflowLogs . id , log . id ) )
131- . returning ( { id : workflowLogs . id } )
132-
133- if ( deleteResult . length > 0 ) {
134- results . deleted ++
135- } else {
114+ await s3Client . send (
115+ new PutObjectCommand ( {
116+ Bucket : S3_CONFIG . bucket ,
117+ Key : logKey ,
118+ Body : logData ,
119+ ContentType : 'application/json' ,
120+ Metadata : {
121+ logId : String ( log . id ) ,
122+ workflowId : String ( log . workflowId ) ,
123+ archivedAt : new Date ( ) . toISOString ( ) ,
124+ } ,
125+ } )
126+ )
127+
128+ results . archived ++
129+
130+ try {
131+ const deleteResult = await db
132+ . delete ( workflowLogs )
133+ . where ( eq ( workflowLogs . id , log . id ) )
134+ . returning ( { id : workflowLogs . id } )
135+
136+ if ( deleteResult . length > 0 ) {
137+ results . deleted ++
138+ } else {
139+ results . deleteFailed ++
140+ logger . warn ( `Failed to delete log ${ log . id } after archiving: No rows deleted` )
141+ }
142+ } catch ( deleteError ) {
136143 results . deleteFailed ++
137- logger . warn ( `Failed to delete log ${ log . id } after archiving: No rows deleted` )
144+ logger . error ( `Error deleting log ${ log . id } after archiving:` , { deleteError } )
138145 }
139- } catch ( deleteError ) {
140- results . deleteFailed ++
141- logger . error ( `Error deleting log ${ log . id } after archiving :` , { deleteError } )
146+ } catch ( archiveError ) {
147+ results . archiveFailed ++
148+ logger . error ( `Failed to archive log ${ log . id } :` , { archiveError } )
142149 }
143- } catch ( archiveError ) {
144- results . archiveFailed ++
145- logger . error ( `Failed to archive log ${ log . id } :` , { archiveError } )
146150 }
151+
152+ batchesProcessed ++
153+ hasMoreLogs = oldLogs . length === BATCH_SIZE
154+
155+ logger . info ( `Processed batch ${ batchesProcessed } : ${ oldLogs . length } logs` )
147156 }
148157
158+ const timeElapsed = ( Date . now ( ) - startTime ) / 1000
159+ const reachedLimit = batchesProcessed >= MAX_BATCHES && hasMoreLogs
160+
149161 return NextResponse . json ( {
150- message : `Successfully processed ${ results . total } logs: archived ${ results . archived } , deleted ${ results . deleted } ` ,
162+ message : `Processed ${ batchesProcessed } batches ( ${ results . total } logs) in ${ timeElapsed . toFixed ( 2 ) } s ${ reachedLimit ? ' (batch limit reached)' : '' } ` ,
151163 results,
164+ complete : ! hasMoreLogs ,
165+ batchLimitReached : reachedLimit ,
152166 } )
153167 } catch ( error ) {
154168 logger . error ( 'Error in log cleanup process:' , { error } )
0 commit comments