Skip to content

Commit 00d147d

Browse files
ankursharmascopybara-github
authored andcommitted
fix: check if eval config file exists
PiperOrigin-RevId: 824709118
1 parent b7dbfed commit 00d147d

File tree

2 files changed

+15
-2
lines changed

2 files changed

+15
-2
lines changed

src/google/adk/evaluation/eval_config.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
from __future__ import annotations
1616

1717
import logging
18+
import os
1819
from typing import Optional
1920
from typing import Union
2021

@@ -89,12 +90,14 @@ def get_evaluation_criteria_or_default(
8990
9091
Otherwise a default one is returned.
9192
"""
92-
if eval_config_file_path:
93+
if eval_config_file_path and os.path.exists(eval_config_file_path):
9394
with open(eval_config_file_path, "r", encoding="utf-8") as f:
9495
content = f.read()
9596
return EvalConfig.model_validate_json(content)
9697

97-
logger.info("No config file supplied. Using default criteria.")
98+
logger.info(
99+
"No config file supplied or file not found. Using default criteria."
100+
)
98101
return _DEFAULT_EVAL_CONFIG
99102

100103

tests/unittests/evaluation/test_eval_config.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ def test_get_evaluation_criteria_or_default_returns_default():
2727

2828

2929
def test_get_evaluation_criteria_or_default_reads_from_file(mocker):
30+
mocker.patch("os.path.exists", return_value=True)
3031
eval_config = EvalConfig(
3132
criteria={"tool_trajectory_avg_score": 0.5, "response_match_score": 0.5}
3233
)
@@ -36,6 +37,15 @@ def test_get_evaluation_criteria_or_default_reads_from_file(mocker):
3637
assert get_evaluation_criteria_or_default("dummy_path") == eval_config
3738

3839

40+
def test_get_evaluation_criteria_or_default_returns_default_if_file_not_found(
41+
mocker,
42+
):
43+
mocker.patch("os.path.exists", return_value=False)
44+
assert (
45+
get_evaluation_criteria_or_default("dummy_path") == _DEFAULT_EVAL_CONFIG
46+
)
47+
48+
3949
def test_get_eval_metrics_from_config():
4050
rubric_1 = Rubric(
4151
rubric_id="test-rubric",

0 commit comments

Comments
 (0)