Skip to content

Commit 64af826

Browse files
Fix prompt utils (#22)
* Release 0.8.6 * add prompt utils * ruff * make url more visible, improve capitalization * add docstring * mypy ignore for prompt_utils * type hints * | type syntax * more types! * use relative imports * add prompt_utils to fernignore --------- Co-authored-by: fern-api <115122769+fern-api[bot]@users.noreply.github.com>
1 parent e9b69bb commit 64af826

File tree

6 files changed

+170
-12
lines changed

6 files changed

+170
-12
lines changed

.fernignore

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# Specify files that shouldn't be modified by Fern
22

33
src/humanloop/eval_utils.py
4+
src/humanloop/prompt_utils.py
45
src/humanloop/client.py
56
mypy.ini
6-

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ client.prompts.log(
4141
messages=[{"role": "user", "content": "What really happened at Roswell?"}],
4242
inputs={"person": "Trump"},
4343
created_at=datetime.datetime.fromisoformat(
44-
"2024-07-19 00:29:35.178000+00:00",
44+
"2024-07-18 23:29:35.178000+00:00",
4545
),
4646
provider_latency=6.5931549072265625,
4747
output_message={
@@ -88,7 +88,7 @@ async def main() -> None:
8888
],
8989
inputs={"person": "Trump"},
9090
created_at=datetime.datetime.fromisoformat(
91-
"2024-07-19 00:29:35.178000+00:00",
91+
"2024-07-18 23:29:35.178000+00:00",
9292
),
9393
provider_latency=6.5931549072265625,
9494
output_message={

reference.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ client.prompts.log(
5656
messages=[{"role": "user", "content": "What really happened at Roswell?"}],
5757
inputs={"person": "Trump"},
5858
created_at=datetime.datetime.fromisoformat(
59-
"2024-07-19 00:29:35.178000+00:00",
59+
"2024-07-18 23:29:35.178000+00:00",
6060
),
6161
provider_latency=6.5931549072265625,
6262
output_message={
@@ -6258,10 +6258,10 @@ client.flows.log(
62586258
output="The patient is likely experiencing a myocardial infarction. Immediate medical attention is required.",
62596259
trace_status="incomplete",
62606260
start_time=datetime.datetime.fromisoformat(
6261-
"2024-07-08 22:40:35+00:00",
6261+
"2024-07-08 21:40:35+00:00",
62626262
),
62636263
end_time=datetime.datetime.fromisoformat(
6264-
"2024-07-08 22:40:39+00:00",
6264+
"2024-07-08 21:40:39+00:00",
62656265
),
62666266
)
62676267

src/humanloop/flows/client.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -197,10 +197,10 @@ def log(
197197
output="The patient is likely experiencing a myocardial infarction. Immediate medical attention is required.",
198198
trace_status="incomplete",
199199
start_time=datetime.datetime.fromisoformat(
200-
"2024-07-08 22:40:35+00:00",
200+
"2024-07-08 21:40:35+00:00",
201201
),
202202
end_time=datetime.datetime.fromisoformat(
203-
"2024-07-08 22:40:39+00:00",
203+
"2024-07-08 21:40:39+00:00",
204204
),
205205
)
206206
"""
@@ -1366,10 +1366,10 @@ async def main() -> None:
13661366
output="The patient is likely experiencing a myocardial infarction. Immediate medical attention is required.",
13671367
trace_status="incomplete",
13681368
start_time=datetime.datetime.fromisoformat(
1369-
"2024-07-08 22:40:35+00:00",
1369+
"2024-07-08 21:40:35+00:00",
13701370
),
13711371
end_time=datetime.datetime.fromisoformat(
1372-
"2024-07-08 22:40:39+00:00",
1372+
"2024-07-08 21:40:39+00:00",
13731373
),
13741374
)
13751375

src/humanloop/prompt_utils.py

Lines changed: 158 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,158 @@
1+
import copy
2+
from typing import Any, Dict, List, Optional, TypeVar, Sequence
3+
import logging
4+
5+
import re
6+
7+
from .requests.chat_message import ChatMessageParams
8+
from .prompts.requests.prompt_request_template import (
9+
PromptRequestTemplateParams,
10+
)
11+
12+
13+
logger = logging.getLogger(__name__)
14+
15+
16+
class PromptVariablesNotFoundError(ValueError):
17+
"""Raised when inputs do not satisfy prompt variables."""
18+
19+
missing_variables: List[str]
20+
"""Missing variables"""
21+
22+
def __init__(self, missing_variables: List[str]) -> None:
23+
self.missing_variables = missing_variables
24+
super().__init__(f"Prompt requires inputs for the following " f"variables: {self.missing_variables}")
25+
26+
27+
def populate_prompt_template(
28+
template: str,
29+
inputs: Optional[Dict[str, Any]],
30+
) -> str:
31+
"""Interpolate a string template with kwargs, where template variables
32+
are specified using double curly bracket syntax: {{variable}}.
33+
34+
args:
35+
template: str - string template where template variables are specified
36+
using double curly bracket syntax: {{variable}}.
37+
38+
inputs - represent the key, value string pairs to inject into the template
39+
variables, where key corresponds to the template variable name and
40+
value to the variable value to inject
41+
42+
return:
43+
The interpolated template string
44+
45+
raises:
46+
PromptVariablesNotFoundError - if any variables are missing from inputs
47+
"""
48+
template_variables: List[str] = re.findall(
49+
# Matching variables: `{{ variable_2 }}`
50+
r"{{\s?([a-zA-Z_\d\.\[\]]+)\s?}}",
51+
template,
52+
) + re.findall(
53+
# Matching tools: `{{ tool_2("all characters$#@$!") }}`
54+
# https://regexr.com/7nvrf
55+
r"\{\{\s?([a-zA-Z_\-\d]+\([a-zA-Z_\-\d,\s\"]+\))\s?\}\}",
56+
template,
57+
)
58+
59+
# populate the template variables, tracking if any are missing
60+
prompt = template
61+
missing_vars = []
62+
63+
if inputs is None:
64+
inputs = {}
65+
66+
# e.g. var: input_name, sig(input_name), sig(other_name), sig("string")
67+
for var in template_variables:
68+
text: Optional[str] = None
69+
70+
if var in inputs:
71+
text = inputs[var]
72+
73+
if text is None:
74+
missing_vars.append(var)
75+
else:
76+
if not isinstance(text, str):
77+
logger.info(f"Converting input value for variable '{var}' to string for prompt template: " f"{text}")
78+
text = str(text)
79+
replacement = sanitize_prompt(prompt=text) if text else text
80+
prompt = re.sub(
81+
r"{{\s?" + re.escape(var) + r"\s?}}",
82+
replacement,
83+
prompt,
84+
)
85+
86+
if missing_vars:
87+
missing_vars.sort()
88+
raise PromptVariablesNotFoundError(
89+
missing_variables=missing_vars,
90+
)
91+
92+
return prompt
93+
94+
95+
def sanitize_prompt(prompt: str):
96+
return prompt.replace("\\", "\\\\")
97+
98+
99+
def populate_chat_template(
100+
chat_template: Sequence[ChatMessageParams],
101+
inputs: Optional[Dict[str, str]] = None,
102+
) -> List[ChatMessageParams]:
103+
"""Interpolate a chat template with kwargs, where template variables."""
104+
messages = []
105+
message: ChatMessageParams
106+
for message in chat_template:
107+
if "content" not in message:
108+
messages.append(message)
109+
continue
110+
111+
message_content = copy.deepcopy(message["content"])
112+
if isinstance(message_content, str):
113+
message_content = populate_prompt_template(
114+
template=message_content,
115+
inputs=inputs,
116+
)
117+
elif isinstance(message_content, list):
118+
for j, content_item in enumerate(message_content):
119+
if content_item["type"] == "text":
120+
content_item_text = content_item["text"]
121+
(content_item_text,) = populate_prompt_template(
122+
template=content_item_text,
123+
inputs=inputs,
124+
)
125+
content_item["text"] = content_item_text
126+
messages.append(
127+
ChatMessageParams(
128+
role=message["role"],
129+
content=message_content,
130+
)
131+
)
132+
return messages
133+
134+
135+
T = TypeVar("T", bound=PromptRequestTemplateParams)
136+
137+
138+
def populate_template(template: T, inputs: Dict[str, str]) -> T:
139+
"""Populate a Prompt's template with the given inputs.
140+
141+
Humanloop supports insertion of variables of the form `{{variable}}` in
142+
Prompt templates.
143+
E.g. If you provide the template `Hello {{name}}` and the input
144+
`{"name": "Alice"}`, the populated template will be `Hello Alice`.
145+
146+
This function supports both completion and chat models. For completion
147+
models, provide template as a string. For chat models, provide template
148+
as a list of messages.
149+
"""
150+
if isinstance(template, str):
151+
return populate_prompt_template(
152+
template=template,
153+
inputs=inputs,
154+
)
155+
return populate_chat_template(
156+
chat_template=template,
157+
inputs=inputs,
158+
)

src/humanloop/prompts/client.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -236,7 +236,7 @@ def log(
236236
messages=[{"role": "user", "content": "What really happened at Roswell?"}],
237237
inputs={"person": "Trump"},
238238
created_at=datetime.datetime.fromisoformat(
239-
"2024-07-19 00:29:35.178000+00:00",
239+
"2024-07-18 23:29:35.178000+00:00",
240240
),
241241
provider_latency=6.5931549072265625,
242242
output_message={
@@ -2117,7 +2117,7 @@ async def main() -> None:
21172117
],
21182118
inputs={"person": "Trump"},
21192119
created_at=datetime.datetime.fromisoformat(
2120-
"2024-07-19 00:29:35.178000+00:00",
2120+
"2024-07-18 23:29:35.178000+00:00",
21212121
),
21222122
provider_latency=6.5931549072265625,
21232123
output_message={

0 commit comments

Comments
 (0)