Skip to content

Commit d461e2c

Browse files
authored
Merge pull request #2 from Breeding-Insight/csv_export
Fixed CSV exporting for image and folder modes. Nick added confidence threshold column to CSV for better reproducibility
2 parents 3ca52f3 + 07ff16e commit d461e2c

File tree

1 file changed

+20
-6
lines changed

1 file changed

+20
-6
lines changed

app.py

Lines changed: 20 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@
3939
WEIGHTS_FILE = APP_ROOT / 'weights.pt'
4040
app.config['UPLOAD_FOLDER'] = str(UPLOAD_FOLDER)
4141
app.config['RESULTS_FOLDER'] = str(RESULTS_FOLDER)
42-
app.config['WEIGHTS_FILE'] = str(WEIGHTS_FILE)
4342
app.config['ANNOT_FOLDER'] = str(ANNOT_FOLDER)
43+
app.config['WEIGHTS_FILE'] = str(WEIGHTS_FILE)
4444
app.config['ALLOWED_EXTENSIONS'] = {'png', 'jpg', 'jpeg', 'tif', 'tiff'}
4545

4646
# skip these -- created dirs in dockerfile
@@ -235,6 +235,7 @@ def get_progress():
235235
with open(pkl_file, 'rb') as pf:
236236
all_results[uuid_base] = pickle.load(pf)
237237
resp['results'] = all_results
238+
print(f"Job executed successfully! {len(all_results)} results aggregated.")
238239
return jsonify(resp)
239240

240241
# If still processing, update progress
@@ -283,7 +284,6 @@ def annotate_image():
283284

284285
if not img_name:
285286
return jsonify({'error': 'File not found'}), 404
286-
287287
# Load detections from pickle
288288
result_path = Path(app.config['RESULTS_FOLDER']) / session_id / f"{uuid}.pkl"
289289
if not result_path.exists():
@@ -367,14 +367,28 @@ def export_csv():
367367
try:
368368
data = request.json
369369
session_id = session['id']
370-
threshold = float(data.get('confidence', 0.5))
371370
job_state = session.get('job_state')
371+
filename_map = session.get('filename_map')
372+
threshold = float(data.get('confidence', 0.5))
372373
if not job_state:
373374
return jsonify({'error': 'Job not found'}), 404
375+
376+
# iterate through the results
377+
results_dir = Path(app.config['RESULTS_FOLDER']) / session_id
378+
pkl_paths = list(results_dir.glob('*.pkl'))
379+
all_results = {}
380+
for path in pkl_paths:
381+
uuid_base = path.stem
382+
with open(path, 'rb') as pf:
383+
all_results[uuid_base] = pickle.load(pf)
384+
385+
# populate rows for CSV conversion
374386
rows = []
375-
for orig_name, detections in job_state['detections'].items():
376-
count = sum(1 for d in detections if d['score'] >= threshold)
377-
rows.append({'Filename': orig_name, 'EggsDetected': count})
387+
for uuid in all_results.keys():
388+
count = sum(1 for d in all_results[uuid] if d['score'] >= threshold)
389+
rows.append({'Filename': filename_map[uuid], 'EggsDetected': count})
390+
rows = sorted(rows, key=lambda x: x['Filename'].lower())
391+
# write the CSV out
378392
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
379393
output = io.StringIO()
380394
writer = csv.DictWriter(output, fieldnames=['Filename', 'EggsDetected'])

0 commit comments

Comments
 (0)