From 14fcf2ba11892aa4d4931eee85e2ab74132bdedc Mon Sep 17 00:00:00 2001 From: Marvin Zhang Date: Sun, 9 Mar 2025 20:10:37 +0800 Subject: [PATCH] feat: add LLM provider and model data models Introduced two new data models for managing Language Learning Models (LLMs): - LLMProvider: Represents LLM providers like OpenAI, Anthropic - LLMModel: Represents specific models within a provider Models include key attributes such as: - Naming and display information - Enabled/priority status - Supported features - Token pricing - Configuration schemas --- core/models/models/llm_model.go | 41 ++++++++++++++++++++++++++++++ core/models/models/llm_provider.go | 25 ++++++++++++++++++ 2 files changed, 66 insertions(+) create mode 100644 core/models/models/llm_model.go create mode 100644 core/models/models/llm_provider.go diff --git a/core/models/models/llm_model.go b/core/models/models/llm_model.go new file mode 100644 index 00000000..f47757ac --- /dev/null +++ b/core/models/models/llm_model.go @@ -0,0 +1,41 @@ +package models + +import ( + "go.mongodb.org/mongo-driver/bson/primitive" +) + +// LLMModel represents a specific model within an LLM provider +type LLMModel struct { + BaseModel[LLMModel] `bson:",inline"` + ProviderId primitive.ObjectID `json:"provider_id" bson:"provider_id"` // Reference to the provider + ModelId string `json:"model_id" bson:"model_id"` // Provider's model ID + Name string `json:"name" bson:"name"` // Model name + DisplayName string `json:"display_name" bson:"display_name"` // Display name for UI + Description string `json:"description,omitempty" bson:"description,omitempty"` // Description of the model + IsEnabled bool `json:"is_enabled" bson:"is_enabled"` // Whether this model is enabled + Priority int `json:"priority" bson:"priority"` // Priority for sorting in UI + ModelFamily string `json:"model_family,omitempty" bson:"model_family,omitempty"` // Family this model belongs to (e.g., "gpt-4", "claude") + ContextSize int `json:"context_size" bson:"context_size"` // Context window size in tokens + MaxOutputTokens int `json:"max_output_tokens" bson:"max_output_tokens"` // Maximum output tokens + SupportedFeatures []string `json:"supported_features" bson:"supported_features"` // Features supported by this model + DefaultParameters map[string]interface{} `json:"default_parameters" bson:"default_parameters"` // Default parameters for this model + TokenPricing *TokenPricing `json:"token_pricing,omitempty" bson:"token_pricing,omitempty"` // Pricing information +} + +// TokenPricing represents the pricing structure for tokens used by an LLM model +type TokenPricing struct { + InputTokenPrice float64 `json:"input_token_price" bson:"input_token_price"` // Price per input token (USD per 1M tokens) + OutputTokenPrice float64 `json:"output_token_price" bson:"output_token_price"` // Price per output token (USD per 1M tokens) + Currency string `json:"currency" bson:"currency"` // Currency for pricing, default is USD +} + +// GetModelColName returns the collection name for the model +func (m *LLMModel) GetModelColName() string { + return "llm_models" +} + +// Validate validates the model +func (m *LLMModel) Validate() error { + // Basic validation can be implemented here + return nil +} diff --git a/core/models/models/llm_provider.go b/core/models/models/llm_provider.go new file mode 100644 index 00000000..cf4a5b1a --- /dev/null +++ b/core/models/models/llm_provider.go @@ -0,0 +1,25 @@ +package models + +// LLMProvider represents a language model provider such as OpenAI, Anthropic, etc. +type LLMProvider struct { + BaseModel[LLMProvider] `bson:",inline"` + Name string `json:"name" bson:"name"` // Provider name (e.g., "openai", "anthropic", "gemini") + DisplayName string `json:"display_name" bson:"display_name"` // Display name for UI + Description string `json:"description,omitempty" bson:"description,omitempty"` // Description of the provider + IsEnabled bool `json:"is_enabled" bson:"is_enabled"` // Whether this provider is enabled + Priority int `json:"priority" bson:"priority"` // Priority for sorting in UI + ConfigSchema string `json:"config_schema" bson:"config_schema"` // JSON schema for configuration + DefaultConfig string `json:"default_config" bson:"default_config"` // Default configuration as JSON + SupportedFeatures []string `json:"supported_features" bson:"supported_features"` // Features supported by this provider (e.g., "function_calling", "streaming") +} + +// GetModelColName returns the collection name for the provider model +func (p *LLMProvider) GetModelColName() string { + return "llm_providers" +} + +// Validate validates the provider model +func (p *LLMProvider) Validate() error { + // Basic validation can be implemented here + return nil +}