Skip to content

Commit 94b4270

Browse files
petrbrzekclaude
andcommitted
feat: add TTL support for Anthropic cache control
- Added cache_ttl field to Message interface and schema - Cache control now accepts object format with ttl option - Backwards compatible with existing boolean cacheControl usage Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
1 parent 8980aba commit 94b4270

4 files changed

Lines changed: 33 additions & 11 deletions

File tree

CHANGELOG.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,10 @@
11
# Changelog
22

3+
## 0.16.9
4+
5+
- Added TTL support for Anthropic cache control (`cache_ttl` field)
6+
- Cache control now accepts object format with `ttl` option: `{ type: "ephemeral", ttl: "1h" }`
7+
38
## 0.16.8
49

510
- Add GoogleGeminiV1 to ReasoningFormat and update related tests

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "langtail",
3-
"version": "0.16.8",
3+
"version": "0.16.9",
44
"description": "",
55
"main": "./Langtail.js",
66
"packageManager": "pnpm@8.15.6",

src/schemas.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,7 @@ export interface Message {
136136
// NOTE: dynamic property calculated by the client for the diff view
137137
hash?: string
138138
cache_enabled?: boolean
139+
cache_ttl?: string
139140
}
140141

141142
export interface PlaygroundMessage extends Message {
@@ -237,6 +238,7 @@ export const MessageSchema = z.object({
237238
tool_choice: ToolChoiceSchema.optional(),
238239
tool_call_id: z.string().optional(),
239240
cache_enabled: z.boolean().optional(),
241+
cache_ttl: z.string().optional(),
240242
reasoning: z.array(MessageReasoningSchema).optional(),
241243
reasoning_details: z.array(ReasoningDetailUnionSchema).nullish(),
242244
}) satisfies z.ZodType<Message>

src/vercel-ai/convert-to-openai-chat-messages.ts

Lines changed: 25 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -18,30 +18,42 @@ export function convertToOpenAIChatMessages({
1818
const messages: OpenAIChatPrompt = []
1919

2020
// Helper function to add a message with cacheControl if needed
21-
const addMessage = (message: any, cacheControl: boolean) => {
22-
if (cacheControl) {
21+
const addMessage = (
22+
message: any,
23+
cacheEnabled: boolean,
24+
cacheTtl?: string,
25+
) => {
26+
if (cacheEnabled) {
2327
message.cache_enabled = true
2428
}
29+
if (cacheTtl) {
30+
message.cache_ttl = cacheTtl
31+
}
2532

2633
messages.push(message)
2734
}
2835

2936
for (const { role, content, providerMetadata } of prompt) {
30-
const anthropicCacheControl = Boolean(
31-
providerMetadata?.anthropic?.cacheControl,
32-
)
37+
const anthropicCacheControl = providerMetadata?.anthropic?.cacheControl
38+
const cacheEnabled = Boolean(anthropicCacheControl)
39+
const cacheTtl =
40+
typeof anthropicCacheControl === "object" &&
41+
anthropicCacheControl !== null
42+
? (anthropicCacheControl as { ttl?: string }).ttl
43+
: undefined
3344

3445
switch (role) {
3546
case "system": {
36-
addMessage({ role: "system", content }, anthropicCacheControl)
47+
addMessage({ role: "system", content }, cacheEnabled, cacheTtl)
3748
break
3849
}
3950

4051
case "user": {
4152
if (content.length === 1 && content[0].type === "text") {
4253
addMessage(
4354
{ role: "user", content: content[0].text },
44-
anthropicCacheControl,
55+
cacheEnabled,
56+
cacheTtl,
4557
)
4658
break
4759
}
@@ -78,7 +90,8 @@ export function convertToOpenAIChatMessages({
7890
}
7991
}),
8092
},
81-
anthropicCacheControl,
93+
cacheEnabled,
94+
cacheTtl,
8295
)
8396
break
8497
}
@@ -150,7 +163,8 @@ export function convertToOpenAIChatMessages({
150163
reasoning_details: reasoningDetails,
151164
tool_calls: toolCalls.length > 0 ? toolCalls : undefined,
152165
},
153-
anthropicCacheControl,
166+
cacheEnabled,
167+
cacheTtl,
154168
)
155169
break
156170
}
@@ -203,7 +217,8 @@ export function convertToOpenAIChatMessages({
203217
tool_call_id: toolResponse.toolCallId,
204218
content: toolContent,
205219
},
206-
anthropicCacheControl,
220+
cacheEnabled,
221+
cacheTtl,
207222
)
208223
}
209224
break

0 commit comments

Comments
 (0)