Skip to content

Commit c4db3ef

Browse files
author
Andrei Bratu
committed
Properly close flow logs
1 parent 4b4b1eb commit c4db3ef

File tree

5 files changed

+19
-11
lines changed

5 files changed

+19
-11
lines changed

.fernignore

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,3 +18,7 @@ tests/
1818
## CI
1919

2020
.github/workflows/ci.yml
21+
22+
## Config
23+
24+
.gitignore

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,3 +3,5 @@ dist/
33
__pycache__/
44
poetry.toml
55
.ruff_cache/
6+
.vscode
7+
.env

src/humanloop/client.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -175,14 +175,14 @@ def prompt(
175175
path: Optional[str] = None,
176176
**prompt_kernel: Unpack[DecoratorPromptKernelRequestParams], # type: ignore
177177
):
178-
"""Decorator for declaring a (Prompt)[https://humanloop.com/docs/explanation/prompts] in code.
178+
"""Decorator for declaring a [Prompt](https://humanloop.com/docs/explanation/prompts) in code.
179179
180180
The decorator intercepts calls to LLM provider APIs and creates
181181
a new Prompt file based on the hyperparameters used in the call.
182182
If a hyperparameter is specified in the `@prompt` decorator, then
183183
they override any value intercepted from the LLM provider call.
184184
185-
If the (Prompt)[https://humanloop.com/docs/explanation/prompts] already exists
185+
If the [Prompt](https://humanloop.com/docs/explanation/prompts) already exists
186186
on the specified path, a new version will be upserted when any of the above change.
187187
188188
Here's an example of declaring a (Prompt)[https://humanloop.com/docs/explanation/prompts] in code:
@@ -200,7 +200,7 @@ def call_llm(messages):
200200
).choices[0].message.content
201201
```
202202
203-
This will create a (Prompt)[https://humanloop.com/docs/explanation/prompts] with the following attributes:
203+
This will create a [Prompt](https://humanloop.com/docs/explanation/prompts] with the following attributes:
204204
205205
```python
206206
{
@@ -342,9 +342,9 @@ def flow(
342342
):
343343
"""Decorator for declaring a [Flow](https://humanloop.com/docs/explanation/flows) in code.
344344
345-
A [Flow](https://humanloop.com/docs/explanation/flows) decorator should be added
346-
at the entrypoint of your LLM feature. Call other functions decorated with
347-
Humanloop SDK decorators to create a Trace of Logs on Humanloop.
345+
A [Flow](https://humanloop.com/docs/explanation/flows) wrapped callable should
346+
be used as the entrypoint of your LLM feature. Call other functions wrapped with
347+
Humanloop decorators to create a trace of Logs on Humanloop.
348348
349349
Here's an example of declaring a [Flow](https://humanloop.com/docs/explanation/flows) in code:
350350
```python

src/humanloop/decorators/flow.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -82,10 +82,6 @@ def wrapper(*args: Sequence[Any], **kwargs: Mapping[str, Any]) -> Any:
8282
"output": output_stringified,
8383
"error": error,
8484
}
85-
if inputs:
86-
flow_log["inputs"] = inputs
87-
if output:
88-
flow_log["output"] = output
8985

9086
# Write the Flow Log to the Span on HL_LOG_OT_KEY
9187
if flow_log:

src/humanloop/otel/exporter.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import contextvars
2-
import copy
32
import json
43
import logging
54
import threading
@@ -70,6 +69,7 @@ def __init__(
7069
for thread in self._threads:
7170
thread.start()
7271
logger.debug("Exporter Thread %s started", thread.ident)
72+
self._flow_logs_to_complete: list[str] = []
7373

7474
def export(self, spans: trace.Sequence[ReadableSpan]) -> SpanExportResult:
7575
def is_evaluated_file(
@@ -133,6 +133,11 @@ def shutdown(self) -> None:
133133
for thread in self._threads:
134134
thread.join()
135135
logger.debug("Exporter Thread %s joined", thread.ident)
136+
for log_id in self._flow_logs_to_complete:
137+
self._client.flows.update_log(
138+
log_id=log_id,
139+
trace_status="complete",
140+
)
136141

137142
def force_flush(self, timeout_millis: int = 3000) -> bool:
138143
self._shutdown = True
@@ -340,6 +345,7 @@ def _export_flow(self, span: ReadableSpan) -> None:
340345
**log_object,
341346
trace_parent_id=trace_parent_id,
342347
)
348+
self._flow_logs_to_complete.append(log_response.id)
343349
self._span_id_to_uploaded_log_id[span.get_span_context().span_id] = log_response.id
344350
except HumanloopApiError as e:
345351
logger.error(str(e))

0 commit comments

Comments
 (0)