Add Google Gemini as AI provider
- Add gemini.py provider using google-genai SDK - Update config.py with gemini provider and GEMINI_API_KEY - Update ai_service.py factory to support gemini - Add google-genai to requirements.txt - Update .env.example, README.md, and CLAUDE.md documentation
This commit is contained in:
@@ -19,7 +19,7 @@ class Settings(BaseSettings):
|
||||
discord_token: str = Field(..., description="Discord bot token")
|
||||
|
||||
# AI Provider Configuration
|
||||
ai_provider: Literal["openai", "openrouter", "anthropic"] = Field(
|
||||
ai_provider: Literal["openai", "openrouter", "anthropic", "gemini"] = Field(
|
||||
"openai", description="Which AI provider to use"
|
||||
)
|
||||
ai_model: str = Field("gpt-4o", description="AI model to use")
|
||||
@@ -30,6 +30,7 @@ class Settings(BaseSettings):
|
||||
openai_api_key: str | None = Field(None, description="OpenAI API key")
|
||||
openrouter_api_key: str | None = Field(None, description="OpenRouter API key")
|
||||
anthropic_api_key: str | None = Field(None, description="Anthropic API key")
|
||||
gemini_api_key: str | None = Field(None, description="Google Gemini API key")
|
||||
|
||||
# Logging
|
||||
log_level: str = Field("INFO", description="Logging level")
|
||||
@@ -66,6 +67,7 @@ class Settings(BaseSettings):
|
||||
"openai": self.openai_api_key,
|
||||
"openrouter": self.openrouter_api_key,
|
||||
"anthropic": self.anthropic_api_key,
|
||||
"gemini": self.gemini_api_key,
|
||||
}
|
||||
key = key_map.get(self.ai_provider)
|
||||
if not key:
|
||||
|
||||
@@ -9,6 +9,7 @@ from .providers import (
|
||||
AIProvider,
|
||||
AIResponse,
|
||||
AnthropicProvider,
|
||||
GeminiProvider,
|
||||
Message,
|
||||
OpenAIProvider,
|
||||
OpenRouterProvider,
|
||||
@@ -16,7 +17,7 @@ from .providers import (
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ProviderType = Literal["openai", "openrouter", "anthropic"]
|
||||
ProviderType = Literal["openai", "openrouter", "anthropic", "gemini"]
|
||||
|
||||
|
||||
class AIService:
|
||||
@@ -45,6 +46,7 @@ class AIService:
|
||||
"openai": OpenAIProvider,
|
||||
"openrouter": OpenRouterProvider,
|
||||
"anthropic": AnthropicProvider,
|
||||
"gemini": GeminiProvider,
|
||||
}
|
||||
|
||||
provider_class = providers.get(provider_type)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from .anthropic import AnthropicProvider
|
||||
from .base import AIProvider, AIResponse, Message
|
||||
from .gemini import GeminiProvider
|
||||
from .openai import OpenAIProvider
|
||||
from .openrouter import OpenRouterProvider
|
||||
|
||||
@@ -12,4 +13,5 @@ __all__ = [
|
||||
"OpenAIProvider",
|
||||
"OpenRouterProvider",
|
||||
"AnthropicProvider",
|
||||
"GeminiProvider",
|
||||
]
|
||||
|
||||
71
src/daemon_boyfriend/services/providers/gemini.py
Normal file
71
src/daemon_boyfriend/services/providers/gemini.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""Google Gemini provider implementation."""
|
||||
|
||||
import logging
|
||||
|
||||
from google import genai
|
||||
from google.genai import types
|
||||
|
||||
from .base import AIProvider, AIResponse, Message
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GeminiProvider(AIProvider):
|
||||
"""Google Gemini API provider."""
|
||||
|
||||
def __init__(self, api_key: str, model: str = "gemini-2.0-flash") -> None:
|
||||
self.client = genai.Client(api_key=api_key)
|
||||
self.model = model
|
||||
|
||||
@property
|
||||
def provider_name(self) -> str:
|
||||
return "gemini"
|
||||
|
||||
async def generate(
|
||||
self,
|
||||
messages: list[Message],
|
||||
system_prompt: str | None = None,
|
||||
max_tokens: int = 1024,
|
||||
temperature: float = 0.7,
|
||||
) -> AIResponse:
|
||||
"""Generate a response using Gemini."""
|
||||
# Build contents list (Gemini format)
|
||||
contents = []
|
||||
for m in messages:
|
||||
# Gemini uses "user" and "model" roles
|
||||
role = "model" if m.role == "assistant" else m.role
|
||||
contents.append(types.Content(role=role, parts=[types.Part(text=m.content)]))
|
||||
|
||||
logger.debug(f"Sending {len(contents)} messages to Gemini")
|
||||
|
||||
# Build config
|
||||
config = types.GenerateContentConfig(
|
||||
max_output_tokens=max_tokens,
|
||||
temperature=temperature,
|
||||
)
|
||||
if system_prompt:
|
||||
config.system_instruction = system_prompt
|
||||
|
||||
response = await self.client.aio.models.generate_content(
|
||||
model=self.model,
|
||||
contents=contents,
|
||||
config=config,
|
||||
)
|
||||
|
||||
# Extract text from response
|
||||
content = response.text or ""
|
||||
|
||||
# Build usage dict
|
||||
usage = {}
|
||||
if response.usage_metadata:
|
||||
usage = {
|
||||
"prompt_tokens": response.usage_metadata.prompt_token_count or 0,
|
||||
"completion_tokens": response.usage_metadata.candidates_token_count or 0,
|
||||
"total_tokens": response.usage_metadata.total_token_count or 0,
|
||||
}
|
||||
|
||||
return AIResponse(
|
||||
content=content,
|
||||
model=self.model,
|
||||
usage=usage,
|
||||
)
|
||||
Reference in New Issue
Block a user