Add local AI ollama for security purpose (#226)

Co-authored-by: PhotoPortfolio Developer <developer@photportfolio.local>
This commit is contained in:
trungtt6
2026-02-16 11:28:38 +08:00
committed by GitHub
parent 0d18210803
commit cd638fff6c
3 changed files with 16 additions and 3 deletions

View File

@@ -107,6 +107,10 @@
"moonshot": { "moonshot": {
"api_key": "sk-xxx", "api_key": "sk-xxx",
"api_base": "" "api_base": ""
},
"ollama": {
"api_key": "",
"api_base": "http://localhost:11434/v1"
} }
}, },
"tools": { "tools": {

View File

@@ -175,6 +175,7 @@ type ProvidersConfig struct {
VLLM ProviderConfig `json:"vllm"` VLLM ProviderConfig `json:"vllm"`
Gemini ProviderConfig `json:"gemini"` Gemini ProviderConfig `json:"gemini"`
Nvidia ProviderConfig `json:"nvidia"` Nvidia ProviderConfig `json:"nvidia"`
Ollama ProviderConfig `json:"ollama"`
Moonshot ProviderConfig `json:"moonshot"` Moonshot ProviderConfig `json:"moonshot"`
ShengSuanYun ProviderConfig `json:"shengsuanyun"` ShengSuanYun ProviderConfig `json:"shengsuanyun"`
DeepSeek ProviderConfig `json:"deepseek"` DeepSeek ProviderConfig `json:"deepseek"`

View File

@@ -53,10 +53,10 @@ func (p *HTTPProvider) Chat(ctx context.Context, messages []Message, tools []Too
return nil, fmt.Errorf("API base not configured") return nil, fmt.Errorf("API base not configured")
} }
// Strip provider prefix from model name (e.g., moonshot/kimi-k2.5 -> kimi-k2.5) // Strip provider prefix from model name (e.g., moonshot/kimi-k2.5 -> kimi-k2.5, groq/openai/gpt-oss-120b -> openai/gpt-oss-120b, ollama/qwen2.5:14b -> qwen2.5:14b)
if idx := strings.Index(model, "/"); idx != -1 { if idx := strings.Index(model, "/"); idx != -1 {
prefix := model[:idx] prefix := model[:idx]
if prefix == "moonshot" || prefix == "nvidia" { if prefix == "moonshot" || prefix == "nvidia" || prefix == "groq" || prefix == "ollama" {
model = model[idx+1:] model = model[idx+1:]
} }
} }
@@ -400,7 +400,15 @@ func CreateProvider(cfg *config.Config) (LLMProvider, error) {
if apiBase == "" { if apiBase == "" {
apiBase = "https://integrate.api.nvidia.com/v1" apiBase = "https://integrate.api.nvidia.com/v1"
} }
case (strings.Contains(lowerModel, "ollama") || strings.HasPrefix(model, "ollama/")) && cfg.Providers.Ollama.APIKey != "":
fmt.Println("Ollama provider selected based on model name prefix")
apiKey = cfg.Providers.Ollama.APIKey
apiBase = cfg.Providers.Ollama.APIBase
proxy = cfg.Providers.Ollama.Proxy
if apiBase == "" {
apiBase = "http://localhost:11434/v1"
}
fmt.Println("Ollama apiBase:", apiBase)
case cfg.Providers.VLLM.APIBase != "": case cfg.Providers.VLLM.APIBase != "":
apiKey = cfg.Providers.VLLM.APIKey apiKey = cfg.Providers.VLLM.APIKey
apiBase = cfg.Providers.VLLM.APIBase apiBase = cfg.Providers.VLLM.APIBase