container lifecycle management
This commit is contained in:
216
gateway/src/llm/provider.ts
Normal file
216
gateway/src/llm/provider.ts
Normal file
@@ -0,0 +1,216 @@
|
||||
import type { BaseChatModel } from '@langchain/core/language_models/chat_models';
|
||||
import { ChatAnthropic } from '@langchain/anthropic';
|
||||
import { ChatOpenAI } from '@langchain/openai';
|
||||
import { ChatGoogleGenerativeAI } from '@langchain/google-genai';
|
||||
import { ChatOpenRouter } from '@langchain/openrouter';
|
||||
import type { FastifyBaseLogger } from 'fastify';
|
||||
|
||||
/**
|
||||
* Supported LLM providers
|
||||
*/
|
||||
export enum LLMProvider {
|
||||
ANTHROPIC = 'anthropic',
|
||||
OPENAI = 'openai',
|
||||
GOOGLE = 'google',
|
||||
OPENROUTER = 'openrouter',
|
||||
}
|
||||
|
||||
/**
|
||||
* Model configuration
|
||||
*/
|
||||
export interface ModelConfig {
|
||||
provider: LLMProvider;
|
||||
model: string;
|
||||
temperature?: number;
|
||||
maxTokens?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provider configuration with API keys
|
||||
*/
|
||||
export interface ProviderConfig {
|
||||
anthropicApiKey?: string;
|
||||
openaiApiKey?: string;
|
||||
googleApiKey?: string;
|
||||
openrouterApiKey?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* LLM Provider factory
|
||||
* Creates model instances with unified interface across providers
|
||||
*/
|
||||
export class LLMProviderFactory {
|
||||
private config: ProviderConfig;
|
||||
private logger: FastifyBaseLogger;
|
||||
|
||||
constructor(config: ProviderConfig, logger: FastifyBaseLogger) {
|
||||
this.config = config;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a chat model instance
|
||||
*/
|
||||
createModel(modelConfig: ModelConfig): BaseChatModel {
|
||||
this.logger.debug(
|
||||
{ provider: modelConfig.provider, model: modelConfig.model },
|
||||
'Creating LLM model'
|
||||
);
|
||||
|
||||
switch (modelConfig.provider) {
|
||||
case LLMProvider.ANTHROPIC:
|
||||
return this.createAnthropicModel(modelConfig);
|
||||
|
||||
case LLMProvider.OPENAI:
|
||||
return this.createOpenAIModel(modelConfig);
|
||||
|
||||
case LLMProvider.GOOGLE:
|
||||
return this.createGoogleModel(modelConfig);
|
||||
|
||||
case LLMProvider.OPENROUTER:
|
||||
return this.createOpenRouterModel(modelConfig);
|
||||
|
||||
default:
|
||||
throw new Error(`Unsupported provider: ${modelConfig.provider}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create Anthropic Claude model
|
||||
*/
|
||||
private createAnthropicModel(config: ModelConfig): ChatAnthropic {
|
||||
if (!this.config.anthropicApiKey) {
|
||||
throw new Error('Anthropic API key not configured');
|
||||
}
|
||||
|
||||
return new ChatAnthropic({
|
||||
model: config.model,
|
||||
temperature: config.temperature ?? 0.7,
|
||||
maxTokens: config.maxTokens ?? 4096,
|
||||
anthropicApiKey: this.config.anthropicApiKey,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create OpenAI GPT model
|
||||
*/
|
||||
private createOpenAIModel(config: ModelConfig): ChatOpenAI {
|
||||
if (!this.config.openaiApiKey) {
|
||||
throw new Error('OpenAI API key not configured');
|
||||
}
|
||||
|
||||
return new ChatOpenAI({
|
||||
model: config.model,
|
||||
temperature: config.temperature ?? 0.7,
|
||||
maxTokens: config.maxTokens ?? 4096,
|
||||
openAIApiKey: this.config.openaiApiKey,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create Google Gemini model
|
||||
*/
|
||||
private createGoogleModel(config: ModelConfig): ChatGoogleGenerativeAI {
|
||||
if (!this.config.googleApiKey) {
|
||||
throw new Error('Google API key not configured');
|
||||
}
|
||||
|
||||
return new ChatGoogleGenerativeAI({
|
||||
model: config.model,
|
||||
temperature: config.temperature ?? 0.7,
|
||||
maxOutputTokens: config.maxTokens ?? 4096,
|
||||
apiKey: this.config.googleApiKey,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create OpenRouter model (access to 300+ models)
|
||||
*/
|
||||
private createOpenRouterModel(config: ModelConfig): ChatOpenRouter {
|
||||
if (!this.config.openrouterApiKey) {
|
||||
throw new Error('OpenRouter API key not configured');
|
||||
}
|
||||
|
||||
return new ChatOpenRouter({
|
||||
model: config.model,
|
||||
temperature: config.temperature ?? 0.7,
|
||||
maxTokens: config.maxTokens ?? 4096,
|
||||
apiKey: this.config.openrouterApiKey,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default model based on environment
|
||||
*/
|
||||
getDefaultModel(): ModelConfig {
|
||||
// Check which API keys are available
|
||||
if (this.config.anthropicApiKey) {
|
||||
return {
|
||||
provider: LLMProvider.ANTHROPIC,
|
||||
model: 'claude-3-5-sonnet-20241022',
|
||||
};
|
||||
}
|
||||
|
||||
if (this.config.openaiApiKey) {
|
||||
return {
|
||||
provider: LLMProvider.OPENAI,
|
||||
model: 'gpt-4o',
|
||||
};
|
||||
}
|
||||
|
||||
if (this.config.googleApiKey) {
|
||||
return {
|
||||
provider: LLMProvider.GOOGLE,
|
||||
model: 'gemini-2.0-flash-exp',
|
||||
};
|
||||
}
|
||||
|
||||
if (this.config.openrouterApiKey) {
|
||||
return {
|
||||
provider: LLMProvider.OPENROUTER,
|
||||
model: 'anthropic/claude-3.5-sonnet',
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error('No LLM API keys configured');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Predefined model configurations
|
||||
*/
|
||||
export const MODELS = {
|
||||
// Anthropic
|
||||
CLAUDE_SONNET: {
|
||||
provider: LLMProvider.ANTHROPIC,
|
||||
model: 'claude-3-5-sonnet-20241022',
|
||||
},
|
||||
CLAUDE_HAIKU: {
|
||||
provider: LLMProvider.ANTHROPIC,
|
||||
model: 'claude-3-5-haiku-20241022',
|
||||
},
|
||||
CLAUDE_OPUS: {
|
||||
provider: LLMProvider.ANTHROPIC,
|
||||
model: 'claude-3-opus-20240229',
|
||||
},
|
||||
|
||||
// OpenAI
|
||||
GPT4O: {
|
||||
provider: LLMProvider.OPENAI,
|
||||
model: 'gpt-4o',
|
||||
},
|
||||
GPT4O_MINI: {
|
||||
provider: LLMProvider.OPENAI,
|
||||
model: 'gpt-4o-mini',
|
||||
},
|
||||
|
||||
// Google
|
||||
GEMINI_2_FLASH: {
|
||||
provider: LLMProvider.GOOGLE,
|
||||
model: 'gemini-2.0-flash-exp',
|
||||
},
|
||||
GEMINI_PRO: {
|
||||
provider: LLMProvider.GOOGLE,
|
||||
model: 'gemini-1.5-pro',
|
||||
},
|
||||
} as const satisfies Record<string, ModelConfig>;
|
||||
Reference in New Issue
Block a user