Skip to content

Commit 0799123

Browse files
author
Andrei Bratu
committed
draft
1 parent 577de0e commit 0799123

File tree

18 files changed

+145
-186
lines changed

18 files changed

+145
-186
lines changed

poetry.lock

Lines changed: 6 additions & 6 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

reference.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ client.prompts.log(
5656
messages=[{"role": "user", "content": "What really happened at Roswell?"}],
5757
inputs={"person": "Trump"},
5858
created_at=datetime.datetime.fromisoformat(
59-
"2024-07-19 00:29:35.178000+00:00",
59+
"2024-07-18 23:29:35.178000+00:00",
6060
),
6161
provider_latency=6.5931549072265625,
6262
output_message={
@@ -6598,10 +6598,10 @@ client.flows.log(
65986598
output="The patient is likely experiencing a myocardial infarction. Immediate medical attention is required.",
65996599
trace_status="incomplete",
66006600
start_time=datetime.datetime.fromisoformat(
6601-
"2024-07-08 22:40:35+00:00",
6601+
"2024-07-08 21:40:35+00:00",
66026602
),
66036603
end_time=datetime.datetime.fromisoformat(
6604-
"2024-07-08 22:40:39+00:00",
6604+
"2024-07-08 21:40:39+00:00",
66056605
),
66066606
)
66076607

src/humanloop/client.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -10,16 +10,16 @@
1010
from opentelemetry.trace import Tracer
1111

1212
from humanloop.core.client_wrapper import SyncClientWrapper
13-
from humanloop.decorators.types import DecoratorPromptKernelRequestParams
13+
from humanloop.utilities.types import DecoratorPromptKernelRequestParams
1414
from humanloop.eval_utils.context import EVALUATION_CONTEXT_VARIABLE_NAME, EvaluationContext
1515

1616
from humanloop.eval_utils import log_with_evaluation_context, run_eval
1717
from humanloop.eval_utils.types import Dataset, Evaluator, EvaluatorCheck, File
1818

1919
from humanloop.base_client import AsyncBaseHumanloop, BaseHumanloop
20-
from humanloop.decorators.flow import flow as flow_decorator_factory
21-
from humanloop.decorators.prompt import prompt as prompt_decorator_factory
22-
from humanloop.decorators.tool import tool as tool_decorator_factory
20+
from humanloop.utilities.flow import flow as flow_decorator_factory
21+
from humanloop.utilities.prompt import prompt as prompt_decorator_factory
22+
from humanloop.utilities.tool import tool as tool_decorator_factory
2323
from humanloop.environment import HumanloopEnvironment
2424
from humanloop.evaluations.client import EvaluationsClient
2525
from humanloop.otel import instrument_provider
@@ -49,7 +49,6 @@ def run(
4949
name: Optional[str],
5050
dataset: Dataset,
5151
evaluators: Optional[Sequence[Evaluator]] = None,
52-
# logs: typing.Sequence[dict] | None = None,
5352
workers: int = 4,
5453
) -> List[EvaluatorCheck]:
5554
"""Evaluate your function for a given `Dataset` and set of `Evaluators`.
@@ -175,14 +174,14 @@ def prompt(
175174
path: Optional[str] = None,
176175
**prompt_kernel: Unpack[DecoratorPromptKernelRequestParams], # type: ignore
177176
):
178-
"""Decorator for declaring a (Prompt)[https://humanloop.com/docs/explanation/prompts] in code.
177+
"""Decorator for declaring a [Prompt](https://humanloop.com/docs/explanation/prompts) in code.
179178
180179
The decorator intercepts calls to LLM provider APIs and creates
181180
a new Prompt file based on the hyperparameters used in the call.
182181
If a hyperparameter is specified in the `@prompt` decorator, then
183182
they override any value intercepted from the LLM provider call.
184183
185-
If the (Prompt)[https://humanloop.com/docs/explanation/prompts] already exists
184+
If the [Prompt](https://humanloop.com/docs/explanation/prompts) already exists
186185
on the specified path, a new version will be upserted when any of the above change.
187186
188187
Here's an example of declaring a (Prompt)[https://humanloop.com/docs/explanation/prompts] in code:
@@ -200,7 +199,7 @@ def call_llm(messages):
200199
).choices[0].message.content
201200
```
202201
203-
This will create a (Prompt)[https://humanloop.com/docs/explanation/prompts] with the following attributes:
202+
This will create a [Prompt](https://humanloop.com/docs/explanation/prompts] with the following attributes:
204203
205204
```python
206205
{
@@ -342,9 +341,9 @@ def flow(
342341
):
343342
"""Decorator for declaring a [Flow](https://humanloop.com/docs/explanation/flows) in code.
344343
345-
A [Flow](https://humanloop.com/docs/explanation/flows) decorator should be added
346-
at the entrypoint of your LLM feature. Call other functions decorated with
347-
Humanloop SDK decorators to create a Trace of Logs on Humanloop.
344+
A [Flow](https://humanloop.com/docs/explanation/flows) wrapped callable should
345+
be used as the entrypoint of your LLM feature. Call other functions wrapped with
346+
Humanloop decorators to create a trace of Logs on Humanloop.
348347
349348
Here's an example of declaring a [Flow](https://humanloop.com/docs/explanation/flows) in code:
350349
```python

src/humanloop/core/client_wrapper.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ def get_headers(self) -> typing.Dict[str, str]:
1616
headers: typing.Dict[str, str] = {
1717
"X-Fern-Language": "Python",
1818
"X-Fern-SDK-Name": "humanloop",
19-
"X-Fern-SDK-Version": "0.8.19",
19+
"X-Fern-SDK-Version": "0.8.20",
2020
}
2121
headers["X-API-KEY"] = self.api_key
2222
return headers

src/humanloop/eval_utils/run.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -212,10 +212,6 @@ def increment(self):
212212
sys.stderr.write("\n")
213213

214214

215-
# Module-level so it can be shared by threads.
216-
_PROGRESS_BAR: Optional[_SimpleProgressBar] = None
217-
218-
219215
def run_eval(
220216
client: "BaseHumanloop",
221217
file: File,
@@ -236,7 +232,6 @@ def run_eval(
236232
:param workers: the number of threads to process datapoints using your function concurrently.
237233
:return: per Evaluator checks.
238234
"""
239-
global _PROGRESS_BAR
240235

241236
if hasattr(file["callable"], "file"):
242237
# When the decorator inside `file` is a decorated function,
@@ -274,7 +269,7 @@ def run_eval(
274269
try:
275270
type_ = typing.cast(FileType, file_.pop("type"))
276271
logger.info(
277-
f"{CYAN}Evaluating your {type_} function corresponding to `{file_['path']}` on Humanloop{RESET} \n\n"
272+
f"{CYAN}Evaluating your {type_} function corresponding to `{file_.get('path') or file_.get('id')}` on Humanloop{RESET} \n\n"
278273
)
279274
except KeyError as _:
280275
type_ = "flow"

src/humanloop/flows/client.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -202,10 +202,10 @@ def log(
202202
output="The patient is likely experiencing a myocardial infarction. Immediate medical attention is required.",
203203
trace_status="incomplete",
204204
start_time=datetime.datetime.fromisoformat(
205-
"2024-07-08 22:40:35+00:00",
205+
"2024-07-08 21:40:35+00:00",
206206
),
207207
end_time=datetime.datetime.fromisoformat(
208-
"2024-07-08 22:40:39+00:00",
208+
"2024-07-08 21:40:39+00:00",
209209
),
210210
)
211211
"""
@@ -1438,10 +1438,10 @@ async def main() -> None:
14381438
output="The patient is likely experiencing a myocardial infarction. Immediate medical attention is required.",
14391439
trace_status="incomplete",
14401440
start_time=datetime.datetime.fromisoformat(
1441-
"2024-07-08 22:40:35+00:00",
1441+
"2024-07-08 21:40:35+00:00",
14421442
),
14431443
end_time=datetime.datetime.fromisoformat(
1444-
"2024-07-08 22:40:39+00:00",
1444+
"2024-07-08 21:40:39+00:00",
14451445
),
14461446
)
14471447

src/humanloop/otel/__init__.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,4 @@
1-
from typing import Optional, TypedDict
2-
31
from opentelemetry.sdk.trace import TracerProvider
4-
from typing_extensions import NotRequired
52

63
from humanloop.otel.helpers import module_is_installed
74

@@ -41,12 +38,3 @@ def instrument_provider(provider: TracerProvider):
4138
from opentelemetry.instrumentation.bedrock import BedrockInstrumentor
4239

4340
BedrockInstrumentor().instrument(tracer_provider=provider)
44-
45-
46-
class FlowContext(TypedDict):
47-
trace_id: NotRequired[str]
48-
trace_parent_id: NotRequired[Optional[int]]
49-
is_flow_log: NotRequired[bool]
50-
51-
52-
TRACE_FLOW_CONTEXT: dict[int, FlowContext] = {}

src/humanloop/otel/constants.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,3 +4,5 @@
44
HUMANLOOP_LOG_KEY = "humanloop.log"
55
HUMANLOOP_FILE_TYPE_KEY = "humanloop.file.type"
66
HUMANLOOP_PATH_KEY = "humanloop.file.path"
7+
# Required for the exporter to know when to mark the Flow Log as complete
8+
HUMANLOOP_FLOW_PREREQUISITES_KEY = "humanloop.flow.prerequisites"

0 commit comments

Comments
 (0)