-
Notifications
You must be signed in to change notification settings - Fork 174
Add model-sync script that updates the current model list with the AI-SDKs list #2126
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
e48bf6f
712d76e
9154a21
18862db
fe698f0
2fe0174
ca1b87f
1c62f3b
40a3c7e
fd2aa4a
d3af813
72d076d
1bc6793
5e307f7
7d47b3b
04709a9
24f25d2
06454c4
939a7d7
c29db4b
0aee843
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,240 @@ | ||
| /** | ||
| * Syncs AI provider model lists from @ai-sdk type definitions | ||
| * | ||
| * This script fetches the latest TypeScript type definitions from unpkg for each | ||
| * @ai-sdk provider package and extracts the valid model IDs from the union types. | ||
| * Models confirmed to work with the AI SDK are included. | ||
| * | ||
| * Usage: | ||
| * npx tsx sync-models.ts # Check for differences | ||
| * npx tsx sync-models.ts --update # Update provider files | ||
| */ | ||
|
|
||
| import { AiProviderEnum } from '@openops/shared'; | ||
| import fs from 'node:fs'; | ||
| import path from 'node:path'; | ||
|
|
||
| interface TypeSource { | ||
| typeName: string; | ||
| distPath?: string; | ||
| excludedModels?: string[]; | ||
| } | ||
|
|
||
| interface AiSdkConfig { | ||
| package: string; | ||
| providerFile: string; | ||
| typeSources: TypeSource[]; | ||
| } | ||
|
|
||
| export const AI_SDK_CONFIGS: Partial<Record<AiProviderEnum, AiSdkConfig>> = { | ||
| [AiProviderEnum.ANTHROPIC]: { | ||
| package: 'anthropic', | ||
| providerFile: 'anthropic', | ||
| typeSources: [{ typeName: 'AnthropicMessagesModelId' }], | ||
| }, | ||
| [AiProviderEnum.CEREBRAS]: { | ||
| package: 'cerebras', | ||
| providerFile: 'cerebras', | ||
| typeSources: [{ typeName: 'CerebrasChatModelId' }], | ||
| }, | ||
| [AiProviderEnum.COHERE]: { | ||
| package: 'cohere', | ||
| providerFile: 'cohere', | ||
| typeSources: [{ typeName: 'CohereChatModelId' }], | ||
| }, | ||
| [AiProviderEnum.DEEPSEEK]: { | ||
| package: 'deepseek', | ||
| providerFile: 'deep-seek', | ||
| typeSources: [{ typeName: 'DeepSeekChatModelId' }], | ||
| }, | ||
| [AiProviderEnum.GOOGLE]: { | ||
| package: 'google', | ||
| providerFile: 'google', | ||
| typeSources: [{ typeName: 'GoogleGenerativeAIModelId' }], | ||
| }, | ||
| [AiProviderEnum.GOOGLE_VERTEX]: { | ||
| package: 'google-vertex', | ||
| providerFile: 'google-vertex', | ||
| typeSources: [ | ||
| { | ||
| typeName: 'GoogleVertexModelId', | ||
| excludedModels: [ | ||
| 'gemini-1.0-pro', | ||
| 'gemini-1.0-pro-001', | ||
| 'gemini-1.0-pro-002', | ||
| 'gemini-1.0-pro-vision-001', | ||
| 'gemini-1.5-flash-001', | ||
| 'gemini-1.5-flash-002', | ||
| 'gemini-1.5-pro-001', | ||
| 'gemini-1.5-pro-002', | ||
| ], | ||
| }, | ||
| { | ||
| typeName: 'GoogleVertexAnthropicMessagesModelId', | ||
| distPath: 'dist/anthropic/index.d.ts', | ||
| }, | ||
| ], | ||
| }, | ||
| [AiProviderEnum.GROQ]: { | ||
| package: 'groq', | ||
| providerFile: 'groq', | ||
| typeSources: [{ typeName: 'GroqChatModelId' }], | ||
| }, | ||
| [AiProviderEnum.MISTRAL]: { | ||
| package: 'mistral', | ||
| providerFile: 'mistral', | ||
| typeSources: [{ typeName: 'MistralChatModelId' }], | ||
| }, | ||
| [AiProviderEnum.OPENAI]: { | ||
| package: 'openai', | ||
| providerFile: 'openai', | ||
| typeSources: [{ typeName: 'OpenAIChatModelId' }], | ||
| }, | ||
| [AiProviderEnum.PERPLEXITY]: { | ||
| package: 'perplexity', | ||
| providerFile: 'perplexity', | ||
| typeSources: [{ typeName: 'PerplexityLanguageModelId' }], | ||
| }, | ||
| [AiProviderEnum.TOGETHER_AI]: { | ||
| package: 'togetherai', | ||
| providerFile: 'together-ai', | ||
| typeSources: [{ typeName: 'TogetherAIChatModelId' }], | ||
| }, | ||
| [AiProviderEnum.XAI]: { | ||
| package: 'xai', | ||
| providerFile: 'xai', | ||
| typeSources: [{ typeName: 'XaiChatModelId' }], | ||
| }, | ||
| }; | ||
|
|
||
| const NON_CHAT_KEYWORDS = [ | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. im sure theres more we dont want, we can add to this whenever |
||
| 'guard', | ||
| 'embed', | ||
| 'audio', | ||
| 'tts', | ||
| 'native-audio', | ||
| 'imagen', | ||
| 'search-preview', | ||
| 'aqa', | ||
| 'robotics', | ||
| 'computer-use', | ||
| 'nano-banana', | ||
| 'veo', | ||
| '-image', | ||
| ]; | ||
|
|
||
| async function fetchAiSdkModels( | ||
| pkg: string, | ||
| source: TypeSource, | ||
| ): Promise<string[]> { | ||
| const distPath = source.distPath ?? 'dist/index.d.ts'; | ||
| const url = `https://unpkg.com/@ai-sdk/${pkg}@latest/${distPath}`; | ||
| const response = await fetch(url); | ||
| if (!response.ok) { | ||
| throw new Error( | ||
| `Failed to fetch @ai-sdk/${pkg} types: ${response.statusText}`, | ||
| ); | ||
| } | ||
|
|
||
| const dts = await response.text(); | ||
| const pattern = new RegExp( | ||
| `type\\s+${source.typeName}\\s*=\\s*([^;]+);`, | ||
|
Check warning on line 141 in packages/openops/src/lib/ai/sync-models.ts
|
||
| 's', | ||
| ); | ||
| const match = dts.match(pattern); | ||
|
Check warning on line 144 in packages/openops/src/lib/ai/sync-models.ts
|
||
| if (!match) { | ||
| throw new Error(`Could not find type ${source.typeName} in @ai-sdk/${pkg}`); | ||
| } | ||
|
|
||
| const excluded = source.excludedModels ?? []; | ||
| return [...match[1].matchAll(/'([^']+)'/g)] | ||
| .map((m) => m[1]) | ||
| .filter( | ||
| (id) => | ||
| !NON_CHAT_KEYWORDS.some((kw) => id.toLowerCase().includes(kw)) && | ||
| !excluded.includes(id), | ||
| ); | ||
| } | ||
|
|
||
| function getCurrentModels(providerFile: string): string[] { | ||
| const filePath = path.join(__dirname, 'providers', `${providerFile}.ts`); | ||
| if (!fs.existsSync(filePath)) return []; | ||
|
|
||
| const content = fs.readFileSync(filePath, 'utf-8'); | ||
| const match = content.match(/const\s+\w+Models\s*=\s*\[([\s\S]*?)\];/); | ||
| if (!match) return []; | ||
|
|
||
| return match[1] | ||
| .split(',') | ||
| .map((line) => line.match(/['"]([^'"]+)['"]/)?.[1]) | ||
| .filter((model): model is string => model != null) | ||
| .sort(); | ||
| } | ||
|
|
||
| function updateProviderFile(providerFile: string, models: string[]): void { | ||
| const filePath = path.join(__dirname, 'providers', `${providerFile}.ts`); | ||
| const content = fs.readFileSync(filePath, 'utf-8'); | ||
| const match = content.match(/const\s+(\w+Models)\s*=\s*\[([\s\S]*?)\];/); | ||
| if (!match) return; | ||
|
|
||
| const arrayName = match[1]; | ||
| const formattedModels = models.map((model) => ` '${model}',`).join('\n'); | ||
| const newArray = `const ${arrayName} = [\n${formattedModels}\n];`; | ||
| const updatedContent = content.replace( | ||
| /const\s+\w+Models\s*=\s*\[([\s\S]*?)\];/, | ||
| newArray, | ||
| ); | ||
|
|
||
| fs.writeFileSync(filePath, updatedContent, 'utf-8'); | ||
| } | ||
|
|
||
| async function main() { | ||
| const shouldUpdate = process.argv.includes('--update'); | ||
|
|
||
| let hasChanges = false; | ||
|
|
||
| for (const [provider, config] of Object.entries(AI_SDK_CONFIGS)) { | ||
| if (!config) continue; | ||
|
|
||
| let latestModels: string[]; | ||
| try { | ||
| const results = await Promise.all( | ||
| config.typeSources.map((source) => | ||
| fetchAiSdkModels(config.package, source), | ||
| ), | ||
| ); | ||
| latestModels = [...new Set(results.flat())].sort((a, b) => | ||
| a.localeCompare(b), | ||
| ); | ||
| } catch (error) { | ||
| console.error(`Skipping ${provider}: ${(error as Error).message}`); | ||
| continue; | ||
| } | ||
|
|
||
| const currentModels = getCurrentModels(config.providerFile); | ||
| const added = latestModels.filter((m) => !currentModels.includes(m)); | ||
| const removed = currentModels.filter((m) => !latestModels.includes(m)); | ||
|
|
||
| if (added.length === 0 && removed.length === 0) { | ||
| continue; | ||
| } | ||
|
|
||
| hasChanges = true; | ||
| console.log(`${provider}:`); | ||
| if (added.length > 0) console.log(` +${added.length}`); | ||
| if (removed.length > 0) console.log(` -${removed.length}`); | ||
|
|
||
| if (shouldUpdate) { | ||
| updateProviderFile(config.providerFile, latestModels); | ||
| } | ||
| } | ||
|
|
||
| process.exit(hasChanges ? 1 : 0); | ||
| } | ||
|
|
||
| if (require.main === module) { | ||
| main().catch((error) => { | ||
|
Check warning on line 236 in packages/openops/src/lib/ai/sync-models.ts
|
||
| console.error('❌ Error:', error); | ||
| process.exit(1); | ||
| }); | ||
| } | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
ai-sdk does remove retired models, but for some providers they are more consistent than others. If there are models in the sync that should not be there we can exclude them like this.