11import { Agent } from 'undici'
22
3- import { deepseekModels } from '@codebuff/common/constants/model-config'
43import { PROFIT_MARGIN } from '@codebuff/common/constants/limits'
54import { getErrorObject } from '@codebuff/common/util/error'
65import { env } from '@codebuff/internal/env'
@@ -10,6 +9,10 @@ import {
109 extractRequestMetadata ,
1110 insertMessageToBigQuery ,
1211} from './helpers'
12+ import {
13+ buildDeepSeekRequestBody ,
14+ DEEPSEEK_MODEL_IDS ,
15+ } from './deepseek-request-body'
1316
1417import type { UsageData } from './helpers'
1518import type { InsertMessageBigqueryFn } from '@codebuff/common/types/contracts/bigquery'
@@ -40,32 +43,25 @@ const DEEPSEEK_V4_PRO_PRICING: DeepSeekPricing = {
4043 outputCostPerToken : 0.87 / 1_000_000 ,
4144}
4245
43- /** Single source of truth for DeepSeek model metadata and pricing.
44- * Kept as one map so adding a model can't drift between routing and billing. */
4546const DEEPSEEK_MODELS : Record <
4647 string ,
4748 { deepseekId : string ; pricing : DeepSeekPricing }
48- > = {
49- [ deepseekModels . deepseekV4ProDirect ] : {
50- deepseekId : deepseekModels . deepseekV4ProDirect ,
51- pricing : DEEPSEEK_V4_PRO_PRICING ,
52- } ,
53- [ deepseekModels . deepseekV4Pro ] : {
54- deepseekId : deepseekModels . deepseekV4ProDirect ,
55- pricing : DEEPSEEK_V4_PRO_PRICING ,
56- } ,
57- }
49+ > = Object . fromEntries (
50+ Object . entries ( DEEPSEEK_MODEL_IDS ) . map ( ( [ model , deepseekId ] ) => [
51+ model ,
52+ {
53+ deepseekId,
54+ pricing : DEEPSEEK_V4_PRO_PRICING ,
55+ } ,
56+ ] ) ,
57+ )
5858
5959const DEEPSEEK_ROUTED_MODELS = new Set < string > ( Object . keys ( DEEPSEEK_MODELS ) )
6060
6161export function isDeepSeekModel ( model : string ) : boolean {
6262 return DEEPSEEK_ROUTED_MODELS . has ( model )
6363}
6464
65- function getDeepSeekModelId ( openrouterModel : string ) : string {
66- return DEEPSEEK_MODELS [ openrouterModel ] ?. deepseekId ?? openrouterModel
67- }
68-
6965function getDeepSeekPricing ( model : string ) : DeepSeekPricing {
7066 const entry = DEEPSEEK_MODELS [ model ]
7167 if ( ! entry ) {
@@ -87,127 +83,13 @@ type LineResult = {
8783 patchedLine : string
8884}
8985
90- function toDeepSeekReasoningEffort ( effort : unknown ) : 'high' | 'max' {
91- return effort === 'max' || effort === 'xhigh' ? 'max' : 'high'
92- }
93-
94- function unsupportedAttachmentNotice ( kind : string , count : number ) : string {
95- const noun = count === 1 ? kind : `${ kind } s`
96- const verb = count === 1 ? 'was' : 'were'
97- return `[${ count } ${ noun } ${ verb } omitted because the DeepSeek API does not support ${ kind } input.]`
98- }
99-
100- function contentPartsToDeepSeekText (
101- content : NonNullable <
102- ChatCompletionRequestBody [ 'messages' ] [ number ] [ 'content' ]
103- > ,
104- ) : string {
105- if ( ! Array . isArray ( content ) ) {
106- return content
107- }
108-
109- const textParts : string [ ] = [ ]
110- let imageCount = 0
111- let fileCount = 0
112- let unsupportedCount = 0
113-
114- for ( const part of content ) {
115- switch ( part . type ) {
116- case 'text' : {
117- if ( typeof part . text === 'string' && part . text . length > 0 ) {
118- textParts . push ( part . text )
119- }
120- break
121- }
122- case 'image_url' : {
123- imageCount += 1
124- break
125- }
126- case 'file' : {
127- fileCount += 1
128- break
129- }
130- default : {
131- unsupportedCount += 1
132- break
133- }
134- }
135- }
136-
137- if ( imageCount > 0 ) {
138- textParts . push ( unsupportedAttachmentNotice ( 'image' , imageCount ) )
139- }
140- if ( fileCount > 0 ) {
141- textParts . push ( unsupportedAttachmentNotice ( 'file' , fileCount ) )
142- }
143- if ( unsupportedCount > 0 ) {
144- textParts . push (
145- unsupportedAttachmentNotice ( 'unsupported content part' , unsupportedCount ) ,
146- )
147- }
148-
149- return textParts . join ( '\n\n' )
150- }
151-
152- export function normalizeDeepSeekRequestBody (
153- body : ChatCompletionRequestBody ,
154- originalModel : string = body . model ,
155- ) : ChatCompletionRequestBody {
156- return {
157- ...body ,
158- model : getDeepSeekModelId ( originalModel ) ,
159- messages : body . messages . map ( ( message ) => ( {
160- ...message ,
161- content :
162- message . content === undefined || message . content === null
163- ? message . content
164- : contentPartsToDeepSeekText ( message . content ) ,
165- } ) ) ,
166- }
167- }
168-
16986export function createDeepSeekRequest ( params : {
17087 body : ChatCompletionRequestBody
17188 originalModel : string
17289 fetch : typeof globalThis . fetch
17390} ) {
17491 const { body, originalModel, fetch } = params
175- const deepseekBody = normalizeDeepSeekRequestBody (
176- body ,
177- originalModel ,
178- ) as unknown as Record < string , unknown >
179-
180- // DeepSeek uses `thinking` instead of OpenRouter's `reasoning`.
181- if ( deepseekBody . reasoning && typeof deepseekBody . reasoning === 'object' ) {
182- const reasoning = deepseekBody . reasoning as {
183- enabled ?: boolean
184- effort ?: 'high' | 'medium' | 'low'
185- }
186- deepseekBody . thinking = {
187- type : reasoning . enabled === false ? 'disabled' : 'enabled' ,
188- reasoning_effort : toDeepSeekReasoningEffort ( reasoning . effort ) ,
189- }
190- } else if ( deepseekBody . reasoning_effort ) {
191- deepseekBody . thinking = {
192- type : 'enabled' ,
193- reasoning_effort : toDeepSeekReasoningEffort (
194- deepseekBody . reasoning_effort ,
195- ) ,
196- }
197- }
198- delete deepseekBody . reasoning
199- delete deepseekBody . reasoning_effort
200-
201- // Strip OpenRouter-specific / internal fields
202- delete deepseekBody . provider
203- delete deepseekBody . transforms
204- delete deepseekBody . codebuff_metadata
205- delete deepseekBody . usage
206-
207- // For streaming, request usage in the final chunk
208- if ( deepseekBody . stream ) {
209- deepseekBody . stream_options = { include_usage : true }
210- }
92+ const deepseekBody = buildDeepSeekRequestBody ( body , originalModel )
21193
21294 if ( ! env . DEEPSEEK_API_KEY ) {
21395 throw new Error ( 'DEEPSEEK_API_KEY is not configured' )
0 commit comments