Skip to content

Commit 6ca7401

Browse files
authored
Merge pull request #262 from dwreeves/main
Switch to new Hobolink API
2 parents e019142 + 7fe59bf commit 6ca7401

36 files changed

+330
-9089
lines changed

.env.example

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,24 @@
1-
FLASK_ENV=development
2-
FLASK_APP="app.main:create_app"
1+
#FLASK_APP="app.main:create_app"
32

4-
HOBOLINK_USERNAME=replace_me
5-
HOBOLINK_PASSWORD=replace_me
6-
HOBOLINK_TOKEN=replace_me
3+
FLASK_DEBUG=true
4+
TESTING=true
5+
PREFERRED_URL_SCHEME=http
6+
7+
HOBOLINK_LOGGERS=21949833
8+
#HOBOLINK_BEARER_TOKEN=replace_me
9+
HOBOLINK_EXCLUDE_SENSORS=21113095-1
710

811
BASIC_AUTH_USERNAME=admin
912
BASIC_AUTH_PASSWORD=password
1013

11-
MAPBOX_ACCESS_TOKEN=replace_me
14+
CACHE_DEFAULT_TIMEOUT=60
15+
16+
PGADMIN_DEFAULT_EMAIL=admin@example.com
17+
PGADMIN_DEFAULT_PASSWORD=admin
18+
19+
#MAPBOX_ACCESS_TOKEN=replace_me
1220

13-
SENTRY_DSN=replace_me
14-
SENTRY_ENVIRONMENT=replace_me
21+
#SENTRY_DSN=replace_me
22+
#SENTRY_ENVIRONMENT=replace_me
1523

1624
USE_MOCK_DATA=false

app/admin/main.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,7 @@ def auth_protect_admin_pages():
3232
"""Authorize all paths that start with /admin/."""
3333
if re.match("^/admin(?:$|/+)", request.path):
3434
# Force HTTPS
35-
if app.config["ENV"] == "development":
36-
pass
37-
elif not request.is_secure:
35+
if not request.is_secure:
3836
url = request.url.replace("http://", "https://", 1)
3937
return redirect(url)
4038

app/admin/views/data.py

Lines changed: 20 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
import io
2+
from datetime import datetime
23

34
import pandas as pd
5+
import pytz
46
from flask import Response
57
from flask import abort
68
from flask import current_app
@@ -26,7 +28,6 @@
2628
from app.data.celery import predict_v4_task
2729
from app.data.celery import update_db_task
2830
from app.data.database import execute_sql
29-
from app.data.database import get_current_time
3031

3132

3233
def send_csv_attachment_of_dataframe(
@@ -54,7 +55,7 @@ def send_csv_attachment_of_dataframe(
5455

5556
# Set the file name:
5657
if date_prefix:
57-
now = get_current_time()
58+
now = datetime.now(pytz.timezone("US/Eastern"))
5859
todays_date = now.strftime("%Y-%m-%d")
5960
filename = f"{todays_date}-{filename}"
6061

@@ -127,7 +128,7 @@ def download_from_db(self, sql_table_name: str):
127128

128129
@expose("/csv/src/hobolink_source")
129130
def source_hobolink(self):
130-
async_result = live_hobolink_data_task.delay(export_name="code_for_boston_export_90d")
131+
async_result = live_hobolink_data_task.delay(days_ago=90)
131132
return redirect(
132133
url_for("admin_downloadview.csv_wait", task_id=async_result.id, data_source="hobolink")
133134
)
@@ -148,43 +149,35 @@ def source_usgs_b(self):
148149

149150
@expose("/csv/src/processed_data_v1_source")
150151
def source_combine_data_v1(self):
151-
async_result = combine_data_v1_task.delay(
152-
export_name="code_for_boston_export_90d", days_ago=90
153-
)
152+
async_result = combine_data_v1_task.delay(days_ago=90)
154153
return redirect(
155154
url_for("admin_downloadview.csv_wait", task_id=async_result.id, data_source="combined")
156155
)
157156

158157
@expose("/csv/src/processed_data_v2_source")
159158
def source_combine_data_v2(self):
160-
async_result = combine_data_v2_task.delay(
161-
export_name="code_for_boston_export_90d", days_ago=90
162-
)
159+
async_result = combine_data_v2_task.delay(days_ago=90)
163160
return redirect(
164161
url_for("admin_downloadview.csv_wait", task_id=async_result.id, data_source="combined")
165162
)
166163

167164
@expose("/csv/src/processed_data_v3_source")
168165
def source_combine_data_v3(self):
169-
async_result = combine_data_v3_task.delay(
170-
export_name="code_for_boston_export_90d", days_ago=90
171-
)
166+
async_result = combine_data_v3_task.delay(days_ago=90)
172167
return redirect(
173168
url_for("admin_downloadview.csv_wait", task_id=async_result.id, data_source="combined")
174169
)
175170

176171
@expose("/csv/src/processed_data_v4_source")
177172
def source_combine_data_v4(self):
178-
async_result = combine_data_v4_task.delay(
179-
export_name="code_for_boston_export_90d", days_ago=90
180-
)
173+
async_result = combine_data_v4_task.delay(days_ago=90)
181174
return redirect(
182175
url_for("admin_downloadview.csv_wait", task_id=async_result.id, data_source="combined")
183176
)
184177

185178
@expose("/csv/src/prediction_v1_source")
186179
def source_prediction_v1(self):
187-
async_result = predict_v1_task.delay(export_name="code_for_boston_export_90d", days_ago=90)
180+
async_result = predict_v1_task.delay(days_ago=90)
188181
return redirect(
189182
url_for(
190183
"admin_downloadview.csv_wait", task_id=async_result.id, data_source="prediction"
@@ -193,7 +186,7 @@ def source_prediction_v1(self):
193186

194187
@expose("/csv/src/prediction_v2_source")
195188
def source_prediction_v2(self):
196-
async_result = predict_v2_task.delay(export_name="code_for_boston_export_90d", days_ago=90)
189+
async_result = predict_v2_task.delay(days_ago=90)
197190
return redirect(
198191
url_for(
199192
"admin_downloadview.csv_wait", task_id=async_result.id, data_source="prediction"
@@ -202,7 +195,7 @@ def source_prediction_v2(self):
202195

203196
@expose("/csv/src/prediction_v3_source")
204197
def source_prediction_v3(self):
205-
async_result = predict_v3_task.delay(export_name="code_for_boston_export_90d", days_ago=90)
198+
async_result = predict_v3_task.delay(days_ago=90)
206199
return redirect(
207200
url_for(
208201
"admin_downloadview.csv_wait", task_id=async_result.id, data_source="prediction"
@@ -211,7 +204,7 @@ def source_prediction_v3(self):
211204

212205
@expose("/csv/src/prediction_v4_source")
213206
def source_prediction_v4(self):
214-
async_result = predict_v4_task.delay(export_name="code_for_boston_export_90d", days_ago=90)
207+
async_result = predict_v4_task.delay(days_ago=90)
215208
return redirect(
216209
url_for(
217210
"admin_downloadview.csv_wait", task_id=async_result.id, data_source="prediction"
@@ -269,56 +262,56 @@ def sync_source_usgs_b(self):
269262

270263
@expose("/csv/src_sync/processed_data_v1_source")
271264
def sync_source_combine_data_v1(self):
272-
df = combine_data_v1_task.run(days_ago=90, export_name="code_for_boston_export_90d")
265+
df = combine_data_v1_task.run(days_ago=90)
273266
return send_csv_attachment_of_dataframe(
274267
df=pd.DataFrame(df), filename="model_processed_data.csv"
275268
)
276269

277270
@expose("/csv/src_sync/processed_data_v2_source")
278271
def sync_source_combine_data_v2(self):
279-
df = combine_data_v2_task.run(days_ago=90, export_name="code_for_boston_export_90d")
272+
df = combine_data_v2_task.run(days_ago=90)
280273
return send_csv_attachment_of_dataframe(
281274
df=pd.DataFrame(df), filename="model_processed_data.csv"
282275
)
283276

284277
@expose("/csv/src_sync/processed_data_v3_source")
285278
def sync_source_combine_data_v3(self):
286-
df = combine_data_v3_task.run(days_ago=90, export_name="code_for_boston_export_90d")
279+
df = combine_data_v3_task.run(days_ago=90)
287280
return send_csv_attachment_of_dataframe(
288281
df=pd.DataFrame(df), filename="model_processed_data.csv"
289282
)
290283

291284
@expose("/csv/src_sync/processed_data_v4_source")
292285
def sync_source_combine_data_v4(self):
293-
df = combine_data_v4_task.run(days_ago=90, export_name="code_for_boston_export_90d")
286+
df = combine_data_v4_task.run(days_ago=90)
294287
return send_csv_attachment_of_dataframe(
295288
df=pd.DataFrame(df), filename="model_processed_data.csv"
296289
)
297290

298291
@expose("/csv/src_sync/prediction_v1_source")
299292
def sync_source_prediction_v1(self):
300-
df = predict_v1_task.run(days_ago=90, export_name="code_for_boston_export_90d")
293+
df = predict_v1_task.run(days_ago=90)
301294
return send_csv_attachment_of_dataframe(
302295
df=pd.DataFrame(df), filename="prediction_source.csv"
303296
)
304297

305298
@expose("/csv/src_sync/prediction_v2_source")
306299
def sync_source_prediction_v2(self):
307-
df = predict_v2_task.run(days_ago=90, export_name="code_for_boston_export_90d")
300+
df = predict_v2_task.run(days_ago=90)
308301
return send_csv_attachment_of_dataframe(
309302
df=pd.DataFrame(df), filename="prediction_source.csv"
310303
)
311304

312305
@expose("/csv/src_sync/prediction_v3_source")
313306
def sync_source_prediction_v3(self):
314-
df = predict_v3_task.run(days_ago=90, export_name="code_for_boston_export_90d")
307+
df = predict_v3_task.run(days_ago=90)
315308
return send_csv_attachment_of_dataframe(
316309
df=pd.DataFrame(df), filename="prediction_source.csv"
317310
)
318311

319312
@expose("/csv/src_sync/prediction_v4_source")
320313
def sync_source_prediction_v4(self):
321-
df = predict_v4_task.run(days_ago=90, export_name="code_for_boston_export_90d")
314+
df = predict_v4_task.run(days_ago=90)
322315
return send_csv_attachment_of_dataframe(
323316
df=pd.DataFrame(df), filename="prediction_source.csv"
324317
)

app/blueprints/api_v1.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
from datetime import UTC
2+
from datetime import datetime
3+
14
from flasgger import LazyString
25
from flasgger import Swagger
36
from flasgger import swag_from
@@ -9,7 +12,6 @@
912
from flask import url_for
1013

1114
from app.data.database import execute_sql
12-
from app.data.database import get_current_time
1315
from app.data.globals import boathouses
1416
from app.data.globals import cache
1517
from app.data.globals import reaches
@@ -36,7 +38,7 @@ def predictive_model_api():
3638
return jsonify(
3739
{
3840
"model_version": DEFAULT_MODEL_VERSION,
39-
"time_returned": get_current_time(),
41+
"time_returned": datetime.now(UTC),
4042
"is_boating_season": website_options.boating_season,
4143
"model_outputs": [
4244
{

app/blueprints/frontend.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
from datetime import UTC
2+
from datetime import datetime
13
from typing import Any
24
from typing import Dict
35

@@ -7,7 +9,6 @@
79
from flask import flash
810
from flask import render_template
911

10-
from app.data.database import get_current_time
1112
from app.data.globals import boathouses
1213
from app.data.globals import cache
1314
from app.data.globals import reaches
@@ -22,7 +23,7 @@
2223
@cache.cached() # <-- needs to be here. some issues occur if you exclude it.
2324
def before_request():
2425
last_pred_time = get_latest_prediction_time()
25-
current_time = get_current_time()
26+
current_time = datetime.now(UTC)
2627
# Calculate difference between now and latest prediction time
2728
# If model_outputs has zero rows, we raise the following error:
2829
# > TypeError: unsupported operand type(s) for -: 'Timestamp' and 'NoneType'
@@ -33,8 +34,8 @@ def before_request():
3334
diff = None
3435

3536
# If more than 48 hours, flash message.
36-
if current_app.config["ENV"] == "demo":
37-
flash("This website is currently in demo mode. It is not using live data.")
37+
if current_app.config["USE_MOCK_DATA"]:
38+
flash("This website is currently in MOCK_DATA=true mode. It is not using live data.")
3839
elif diff is None:
3940
flash(
4041
"A unknown error occurred. It is likely that the database does not "

0 commit comments

Comments
 (0)