Add AI_PROVIDER and AI_MODEL support
All checks were successful
CI / ci (push) Successful in 9s
Deploy / deploy-local-runner (push) Has been skipped
Deploy / deploy-ssh (push) Successful in 7s
Docker / docker (push) Successful in 6s
Security / security (push) Successful in 7s

This commit is contained in:
2026-03-01 19:56:14 +01:00
parent f3851f9e96
commit 7cc5d26948
8 changed files with 60 additions and 4 deletions

View File

@@ -4,8 +4,11 @@
# LLM Provider Configuration
# --------------------------
# Available providers: openai | openrouter | ollama | anthropic | azure | gemini
# This value can be overridden by setting the AI_PROVIDER Gitea secret.
provider: openai
# The model to use per provider.
# Override the active provider's model by setting the AI_MODEL Gitea secret.
model:
openai: gpt-4.1-mini
openrouter: anthropic/claude-3.5-sonnet

View File

@@ -332,6 +332,18 @@ def main():
setup_logging(args.verbose)
config = load_config(args.config)
# Allow overriding the provider via a Gitea/CI secret (AI_PROVIDER env var)
ai_provider = os.environ.get("AI_PROVIDER")
if ai_provider:
config["provider"] = ai_provider
# Allow overriding the model via a Gitea/CI secret (AI_MODEL env var)
# Overrides the model for whichever provider is active.
ai_model = os.environ.get("AI_MODEL")
if ai_model:
provider = config.get("provider", "openai")
config.setdefault("model", {})[provider] = ai_model
if args.command == "pr":
run_pr_review(args, config)
elif args.command == "issue":