Skip to content

Commit 59677a1

Browse files
authored
Added evaluation_id to the return from evaluation.run(...) (#25)
1 parent b9ca6bf commit 59677a1

File tree

1 file changed

+3
-0
lines changed

1 file changed

+3
-0
lines changed

src/humanloop/eval_utils.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -154,6 +154,8 @@ class EvaluatorCheck(BaseModel):
154154
"""The threshold to check the Evaluator against."""
155155
threshold_check: Optional[bool]
156156
"""Whether the latest version has an average Evaluator result above a threshold."""
157+
evaluation_id: str
158+
"""The ID of the corresponding Evaluation."""
157159

158160

159161
def _run_eval(
@@ -441,6 +443,7 @@ def process_datapoint(datapoint: Datapoint):
441443
delta=delta,
442444
threshold=threshold,
443445
threshold_check=threshold_check,
446+
evaluation_id=evaluation.id,
444447
)
445448
)
446449
logger.info(f"\n{CYAN}View your Evaluation:{RESET}\n{evaluation.url}\n")

0 commit comments

Comments
 (0)