MotionBench / eval_final_results.py
wenyi's picture
Update eval_final_results.py
a403317 verified
raw
history blame contribute delete
577 Bytes
from compute_accuracy import compute_accuracy
def eval_final(test_metafile,dev_metafile,to_eval):
print("Computing accuracy...")
result_test = compute_accuracy(to_eval, test_metafile)
# permit not submitting dev results
try:
result_dev = compute_accuracy(to_eval, dev_metafile)
except:
print("Parsing dev answer error, return 0 as results")
result_dev = {'answered_acc': 0}
output = {"dev avg": result_dev['answered_acc'],
"test avg": result_test['answered_acc'],
**result_test}
return output