From 759496889c0da7eb5bf29805929af016aa92952e Mon Sep 17 00:00:00 2001 From: ochsec Date: Fri, 26 Dec 2025 22:24:10 -0700 Subject: [PATCH] fix: preserve all configured local models when loading from config (#6180) When loading models from the config, the code was looking up existing models using the optional model.id field (if provided by the user). This caused models with the same api.id to potentially get merged into a single entry. For local Ollama models with modelIDs like 'devstral-small-2:latest' and 'devstral-small-2:24b-cloud', the lookup would use model.id (which is undefined in user configs), falling back to the full modelID. However, the issue could occur when the code tried to reuse existing models, potentially causing conflicting keys. Changed the lookup to always use modelID directly when checking for existing models, since the config parsing creates one entry per configured model ID. This ensures all user-configured models are preserved in the final provider. Adds test case to verify multiple Ollama models with similar names are all loaded. --- packages/opencode/src/provider/provider.ts | 2 +- .../opencode/test/provider/provider.test.ts | 49 +++++++++++++++++++ 2 files changed, 50 insertions(+), 1 deletion(-) diff --git a/packages/opencode/src/provider/provider.ts b/packages/opencode/src/provider/provider.ts index 0fdf26392f6..8360783c60d 100644 --- a/packages/opencode/src/provider/provider.ts +++ b/packages/opencode/src/provider/provider.ts @@ -611,7 +611,7 @@ export namespace Provider { } for (const [modelID, model] of Object.entries(provider.models ?? {})) { - const existingModel = parsed.models[model.id ?? modelID] + const existingModel = parsed.models[modelID] const name = iife(() => { if (model.name) return model.name if (model.id && model.id !== modelID) return modelID diff --git a/packages/opencode/test/provider/provider.test.ts b/packages/opencode/test/provider/provider.test.ts index c6c6924f01f..56ee38c8f0d 100644 --- a/packages/opencode/test/provider/provider.test.ts +++ b/packages/opencode/test/provider/provider.test.ts @@ -447,6 +447,55 @@ test("provider with baseURL from config", async () => { }) }) +test("multiple local ollama models with similar names are all loaded", async () => { + await using tmp = await tmpdir({ + init: async (dir) => { + await Bun.write( + path.join(dir, "opencode.json"), + JSON.stringify({ + $schema: "https://opencode.ai/config.json", + provider: { + ollama: { + npm: "@ai-sdk/openai-compatible", + name: "Ollama", + options: { + baseURL: "http://localhost:11434/v1", + }, + models: { + "devstral-small-2:24b-cloud": { + name: "Devstral Small 2 (Cloud)", + }, + "devstral-small-2:latest": { + name: "Devstral Small 2 (local)", + }, + "devstral-2:123b-cloud": { + name: "Devstral 2 (Cloud)", + }, + }, + }, + }, + }), + ) + }, + }) + await Instance.provide({ + directory: tmp.path, + fn: async () => { + const providers = await Provider.list() + expect(providers["ollama"]).toBeDefined() + expect(providers["ollama"].models).toBeDefined() + + const models = Object.values(providers["ollama"].models) + expect(models.length).toBe(3) + + const modelIds = models.map((m) => m.id).sort() + expect(modelIds).toContain("devstral-small-2:24b-cloud") + expect(modelIds).toContain("devstral-small-2:latest") + expect(modelIds).toContain("devstral-2:123b-cloud") + }, + }) +}) + test("model cost defaults to zero when not specified", async () => { await using tmp = await tmpdir({ init: async (dir) => {