11import io
2+ from datetime import datetime
23
34import pandas as pd
5+ import pytz
46from flask import Response
57from flask import abort
68from flask import current_app
2628from app .data .celery import predict_v4_task
2729from app .data .celery import update_db_task
2830from app .data .database import execute_sql
29- from app .data .database import get_current_time
3031
3132
3233def send_csv_attachment_of_dataframe (
@@ -54,7 +55,7 @@ def send_csv_attachment_of_dataframe(
5455
5556 # Set the file name:
5657 if date_prefix :
57- now = get_current_time ( )
58+ now = datetime . now ( pytz . timezone ( "US/Eastern" ) )
5859 todays_date = now .strftime ("%Y-%m-%d" )
5960 filename = f"{ todays_date } -{ filename } "
6061
@@ -127,7 +128,7 @@ def download_from_db(self, sql_table_name: str):
127128
128129 @expose ("/csv/src/hobolink_source" )
129130 def source_hobolink (self ):
130- async_result = live_hobolink_data_task .delay (export_name = "code_for_boston_export_90d" )
131+ async_result = live_hobolink_data_task .delay (days_ago = 90 )
131132 return redirect (
132133 url_for ("admin_downloadview.csv_wait" , task_id = async_result .id , data_source = "hobolink" )
133134 )
@@ -148,43 +149,35 @@ def source_usgs_b(self):
148149
149150 @expose ("/csv/src/processed_data_v1_source" )
150151 def source_combine_data_v1 (self ):
151- async_result = combine_data_v1_task .delay (
152- export_name = "code_for_boston_export_90d" , days_ago = 90
153- )
152+ async_result = combine_data_v1_task .delay (days_ago = 90 )
154153 return redirect (
155154 url_for ("admin_downloadview.csv_wait" , task_id = async_result .id , data_source = "combined" )
156155 )
157156
158157 @expose ("/csv/src/processed_data_v2_source" )
159158 def source_combine_data_v2 (self ):
160- async_result = combine_data_v2_task .delay (
161- export_name = "code_for_boston_export_90d" , days_ago = 90
162- )
159+ async_result = combine_data_v2_task .delay (days_ago = 90 )
163160 return redirect (
164161 url_for ("admin_downloadview.csv_wait" , task_id = async_result .id , data_source = "combined" )
165162 )
166163
167164 @expose ("/csv/src/processed_data_v3_source" )
168165 def source_combine_data_v3 (self ):
169- async_result = combine_data_v3_task .delay (
170- export_name = "code_for_boston_export_90d" , days_ago = 90
171- )
166+ async_result = combine_data_v3_task .delay (days_ago = 90 )
172167 return redirect (
173168 url_for ("admin_downloadview.csv_wait" , task_id = async_result .id , data_source = "combined" )
174169 )
175170
176171 @expose ("/csv/src/processed_data_v4_source" )
177172 def source_combine_data_v4 (self ):
178- async_result = combine_data_v4_task .delay (
179- export_name = "code_for_boston_export_90d" , days_ago = 90
180- )
173+ async_result = combine_data_v4_task .delay (days_ago = 90 )
181174 return redirect (
182175 url_for ("admin_downloadview.csv_wait" , task_id = async_result .id , data_source = "combined" )
183176 )
184177
185178 @expose ("/csv/src/prediction_v1_source" )
186179 def source_prediction_v1 (self ):
187- async_result = predict_v1_task .delay (export_name = "code_for_boston_export_90d" , days_ago = 90 )
180+ async_result = predict_v1_task .delay (days_ago = 90 )
188181 return redirect (
189182 url_for (
190183 "admin_downloadview.csv_wait" , task_id = async_result .id , data_source = "prediction"
@@ -193,7 +186,7 @@ def source_prediction_v1(self):
193186
194187 @expose ("/csv/src/prediction_v2_source" )
195188 def source_prediction_v2 (self ):
196- async_result = predict_v2_task .delay (export_name = "code_for_boston_export_90d" , days_ago = 90 )
189+ async_result = predict_v2_task .delay (days_ago = 90 )
197190 return redirect (
198191 url_for (
199192 "admin_downloadview.csv_wait" , task_id = async_result .id , data_source = "prediction"
@@ -202,7 +195,7 @@ def source_prediction_v2(self):
202195
203196 @expose ("/csv/src/prediction_v3_source" )
204197 def source_prediction_v3 (self ):
205- async_result = predict_v3_task .delay (export_name = "code_for_boston_export_90d" , days_ago = 90 )
198+ async_result = predict_v3_task .delay (days_ago = 90 )
206199 return redirect (
207200 url_for (
208201 "admin_downloadview.csv_wait" , task_id = async_result .id , data_source = "prediction"
@@ -211,7 +204,7 @@ def source_prediction_v3(self):
211204
212205 @expose ("/csv/src/prediction_v4_source" )
213206 def source_prediction_v4 (self ):
214- async_result = predict_v4_task .delay (export_name = "code_for_boston_export_90d" , days_ago = 90 )
207+ async_result = predict_v4_task .delay (days_ago = 90 )
215208 return redirect (
216209 url_for (
217210 "admin_downloadview.csv_wait" , task_id = async_result .id , data_source = "prediction"
@@ -269,56 +262,56 @@ def sync_source_usgs_b(self):
269262
270263 @expose ("/csv/src_sync/processed_data_v1_source" )
271264 def sync_source_combine_data_v1 (self ):
272- df = combine_data_v1_task .run (days_ago = 90 , export_name = "code_for_boston_export_90d" )
265+ df = combine_data_v1_task .run (days_ago = 90 )
273266 return send_csv_attachment_of_dataframe (
274267 df = pd .DataFrame (df ), filename = "model_processed_data.csv"
275268 )
276269
277270 @expose ("/csv/src_sync/processed_data_v2_source" )
278271 def sync_source_combine_data_v2 (self ):
279- df = combine_data_v2_task .run (days_ago = 90 , export_name = "code_for_boston_export_90d" )
272+ df = combine_data_v2_task .run (days_ago = 90 )
280273 return send_csv_attachment_of_dataframe (
281274 df = pd .DataFrame (df ), filename = "model_processed_data.csv"
282275 )
283276
284277 @expose ("/csv/src_sync/processed_data_v3_source" )
285278 def sync_source_combine_data_v3 (self ):
286- df = combine_data_v3_task .run (days_ago = 90 , export_name = "code_for_boston_export_90d" )
279+ df = combine_data_v3_task .run (days_ago = 90 )
287280 return send_csv_attachment_of_dataframe (
288281 df = pd .DataFrame (df ), filename = "model_processed_data.csv"
289282 )
290283
291284 @expose ("/csv/src_sync/processed_data_v4_source" )
292285 def sync_source_combine_data_v4 (self ):
293- df = combine_data_v4_task .run (days_ago = 90 , export_name = "code_for_boston_export_90d" )
286+ df = combine_data_v4_task .run (days_ago = 90 )
294287 return send_csv_attachment_of_dataframe (
295288 df = pd .DataFrame (df ), filename = "model_processed_data.csv"
296289 )
297290
298291 @expose ("/csv/src_sync/prediction_v1_source" )
299292 def sync_source_prediction_v1 (self ):
300- df = predict_v1_task .run (days_ago = 90 , export_name = "code_for_boston_export_90d" )
293+ df = predict_v1_task .run (days_ago = 90 )
301294 return send_csv_attachment_of_dataframe (
302295 df = pd .DataFrame (df ), filename = "prediction_source.csv"
303296 )
304297
305298 @expose ("/csv/src_sync/prediction_v2_source" )
306299 def sync_source_prediction_v2 (self ):
307- df = predict_v2_task .run (days_ago = 90 , export_name = "code_for_boston_export_90d" )
300+ df = predict_v2_task .run (days_ago = 90 )
308301 return send_csv_attachment_of_dataframe (
309302 df = pd .DataFrame (df ), filename = "prediction_source.csv"
310303 )
311304
312305 @expose ("/csv/src_sync/prediction_v3_source" )
313306 def sync_source_prediction_v3 (self ):
314- df = predict_v3_task .run (days_ago = 90 , export_name = "code_for_boston_export_90d" )
307+ df = predict_v3_task .run (days_ago = 90 )
315308 return send_csv_attachment_of_dataframe (
316309 df = pd .DataFrame (df ), filename = "prediction_source.csv"
317310 )
318311
319312 @expose ("/csv/src_sync/prediction_v4_source" )
320313 def sync_source_prediction_v4 (self ):
321- df = predict_v4_task .run (days_ago = 90 , export_name = "code_for_boston_export_90d" )
314+ df = predict_v4_task .run (days_ago = 90 )
322315 return send_csv_attachment_of_dataframe (
323316 df = pd .DataFrame (df ), filename = "prediction_source.csv"
324317 )
0 commit comments