Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 74 additions & 0 deletions core/config/ConfigHandler.openConfigProfile.vitest.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import { describe, expect, it, vi } from "vitest";

import { ConfigHandler } from "./ConfigHandler";
import { getConfigYamlPath } from "../util/paths.js";

vi.mock("../util/paths.js", async () => {
const actual =
await vi.importActual<typeof import("../util/paths.js")>(
"../util/paths.js",
);
return {
...actual,
getConfigYamlPath: vi.fn(() => "file:///global/config.yaml"),
};
});

describe("ConfigHandler.openConfigProfile", () => {
it("opens workspace-local source files without touching the global config path", async () => {
const ide = {
openFile: vi.fn(),
openUrl: vi.fn(),
getIdeSettings: vi.fn(),
};

const handler = Object.assign(Object.create(ConfigHandler.prototype), {
ide,
currentProfile: {
profileDescription: {
id: "local-profile",
profileType: "local",
uri: "file:///global/config.yaml",
},
},
currentOrg: {
profiles: [
{
profileDescription: {
id: "local-profile",
profileType: "local",
uri: "file:///global/config.yaml",
},
},
],
},
}) as Pick<ConfigHandler, "openConfigProfile"> & {
ide: typeof ide;
currentProfile: {
profileDescription: {
id: string;
profileType: string;
uri: string;
};
};
currentOrg: {
profiles: Array<{
profileDescription: {
id: string;
profileType: string;
uri: string;
};
}>;
};
};

await handler.openConfigProfile("local-profile", {
sourceFile: "file:///workspace/.continue/config.yaml",
});

expect(ide.openFile).toHaveBeenCalledWith(
"file:///workspace/.continue/config.yaml",
);
expect(getConfigYamlPath).not.toHaveBeenCalled();
});
});
6 changes: 4 additions & 2 deletions core/config/ConfigHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -636,8 +636,10 @@ export class ConfigHandler {
}

if (profile.profileDescription.profileType === "local") {
getConfigYamlPath();
const configFile = element?.sourceFile ?? profile.profileDescription.uri;
const configFile =
element?.sourceFile ??
profile.profileDescription.uri ??
getConfigYamlPath();
await this.ide.openFile(configFile);
} else {
const env = await getControlPlaneEnv(this.ide.getIdeSettings());
Expand Down
10 changes: 9 additions & 1 deletion core/config/profile/doLoadConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import { getControlPlaneEnv } from "../../control-plane/env.js";
import { PolicySingleton } from "../../control-plane/PolicySingleton";
import { TeamAnalytics } from "../../control-plane/TeamAnalytics.js";
import ContinueProxy from "../../llm/llms/stubs/ContinueProxy";
import { guideSlashCommand } from "../../promptFiles/guidePrompt";
import { initSlashCommand } from "../../promptFiles/initPrompt";
import { getConfigDependentToolDefinitions } from "../../tools";
import { encodeMCPToolUri } from "../../tools/callTool";
Expand Down Expand Up @@ -188,7 +189,14 @@ export default async function doLoadConfig(options: {
}
}

newConfig.slashCommands.push(initSlashCommand);
const slashCommands = (newConfig.slashCommands ??= []);
for (const builtInCommand of [initSlashCommand, guideSlashCommand]) {
if (
!slashCommands.some((command) => command.name === builtInCommand.name)
) {
slashCommands.push(builtInCommand);
}
}

const proxyContextProvider = newConfig.contextProviders?.find(
(cp) => cp.description.title === "continue-proxy",
Expand Down
78 changes: 61 additions & 17 deletions core/config/profile/doLoadConfig.vitest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,9 @@ vi.mock("../../control-plane/TeamAnalytics", () => ({
vi.mock("../../promptFiles/initPrompt", () => ({
initSlashCommand: { name: "init", description: "init" },
}));
vi.mock("../../promptFiles/guidePrompt", () => ({
guideSlashCommand: { name: "guide", description: "guide" },
}));

// Mock fs.existsSync to simulate missing file on disk
vi.mock("fs", async (importOriginal) => {
Expand Down Expand Up @@ -132,27 +135,32 @@ const mockControlPlaneClient = {
} as any;

const mockLlmLogger = {} as any;
const testPackageFileUri =
"vscode-remote://wsl+Ubuntu/home/user/.continue/agents/test.yaml";

function createPackageIdentifier(withContent = true): PackageIdentifier {
return {
uriType: "file",
fileUri: testPackageFileUri,
...(withContent
? { content: "name: Test\nversion: 1.0.0\nschema: v1\n" }
: {}),
};
}

describe("doLoadConfig pre-read content bypass", () => {
it("should use YAML loading when packageIdentifier has pre-read content, even if file does not exist on disk", async () => {
mockLoadYaml.mockClear();
mockLoadJson.mockClear();

const packageIdentifier: PackageIdentifier = {
uriType: "file",
fileUri:
"vscode-remote://wsl+Ubuntu/home/user/.continue/agents/test.yaml",
content: "name: Test\nversion: 1.0.0\nschema: v1\n",
};

await doLoadConfig({
ide: mockIde,
controlPlaneClient: mockControlPlaneClient,
llmLogger: mockLlmLogger,
profileId: "test-profile",
overrideConfigYamlByPath: packageIdentifier.fileUri,
overrideConfigYamlByPath: testPackageFileUri,
orgScopeId: null,
packageIdentifier,
packageIdentifier: createPackageIdentifier(true),
});

expect(mockLoadYaml).toHaveBeenCalled();
Expand All @@ -163,23 +171,59 @@ describe("doLoadConfig pre-read content bypass", () => {
mockLoadYaml.mockClear();
mockLoadJson.mockClear();

const packageIdentifier: PackageIdentifier = {
uriType: "file",
fileUri:
"vscode-remote://wsl+Ubuntu/home/user/.continue/agents/test.yaml",
};

await doLoadConfig({
ide: mockIde,
controlPlaneClient: mockControlPlaneClient,
llmLogger: mockLlmLogger,
profileId: "test-profile",
overrideConfigYamlByPath: packageIdentifier.fileUri,
overrideConfigYamlByPath: testPackageFileUri,
orgScopeId: null,
packageIdentifier,
packageIdentifier: createPackageIdentifier(false),
});

expect(mockLoadYaml).not.toHaveBeenCalled();
expect(mockLoadJson).toHaveBeenCalled();
});

it("should always include built-in init and guide slash commands", async () => {
const result = await doLoadConfig({
ide: mockIde,
controlPlaneClient: mockControlPlaneClient,
llmLogger: mockLlmLogger,
profileId: "test-profile",
overrideConfigYamlByPath: testPackageFileUri,
orgScopeId: null,
packageIdentifier: createPackageIdentifier(true),
});

const commandNames = result.config?.slashCommands.map((cmd) => cmd.name);
expect(commandNames).toContain("init");
expect(commandNames).toContain("guide");
});

it("should not duplicate a built-in slash command when config already defines it", async () => {
mockLoadYaml.mockResolvedValueOnce({
config: {
...stubConfig,
slashCommands: [{ name: "guide", description: "custom guide" }],
},
errors: [],
configLoadInterrupted: false,
});

const result = await doLoadConfig({
ide: mockIde,
controlPlaneClient: mockControlPlaneClient,
llmLogger: mockLlmLogger,
profileId: "test-profile",
overrideConfigYamlByPath: testPackageFileUri,
orgScopeId: null,
packageIdentifier: createPackageIdentifier(true),
});

const guideCommands =
result.config?.slashCommands.filter((cmd) => cmd.name === "guide") ?? [];
expect(guideCommands).toHaveLength(1);
expect(guideCommands[0]?.description).toBe("custom guide");
});
});
57 changes: 57 additions & 0 deletions core/llm/llms/Ollama.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,63 @@ describe("Ollama", () => {
});
});

describe("_streamChat tool support gating", () => {
let ollama: Ollama;

beforeEach(() => {
ollama = createOllama();
(ollama as any).modelInfoPromise = Promise.resolve();
(ollama as any).getEndpoint = jest.fn((path: string) => path);
(ollama as any)._getModel = jest.fn(() => "test-model");
});

it("does not attach tools when the template does not advertise support", async () => {
(ollama as any).templateSupportsTools = false;
(ollama.fetch as jest.Mock).mockResolvedValue({
status: 200,
json: async () => ({
message: { role: "assistant", content: "done" },
done: true,
done_reason: "stop",
total_duration: 0,
load_duration: 0,
prompt_eval_count: 0,
prompt_eval_duration: 0,
eval_count: 0,
eval_duration: 0,
context: [],
}),
});

const generator = (ollama as any)._streamChat(
[{ role: "user", content: "hello" }],
new AbortController().signal,
{
stream: false,
tools: [
{
type: "function",
function: {
name: "get_weather",
description: "Get weather",
parameters: { type: "object", properties: {} },
},
},
],
},
);

const messages = [];
for await (const message of generator) {
messages.push(message);
}

expect(messages).toHaveLength(1);
const request = (ollama.fetch as jest.Mock).mock.calls[0][1];
expect(JSON.parse(request.body)).not.toHaveProperty("tools");
});
});

describe("_reorderMessagesForToolCompat", () => {
let ollama: Ollama;

Expand Down
8 changes: 7 additions & 1 deletion core/llm/llms/Ollama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,7 @@ class Ollama extends BaseLLM implements ModelInstaller {
private static modelsBeingInstalledMutex = new Mutex();

private fimSupported: boolean = false;
private templateSupportsTools: boolean = true;

private modelInfoPromise: Promise<void> | undefined = undefined;
private explicitContextLength: boolean;
Expand Down Expand Up @@ -240,6 +241,7 @@ class Ollama extends BaseLLM implements ModelInstaller {
* it's a good indication the model supports FIM.
*/
this.fimSupported = !!body?.template?.includes(".Suffix");
this.templateSupportsTools = !!body?.template?.includes(".Tools");
})
.catch((e) => {
// console.warn("Error calling the Ollama /api/show endpoint: ", e);
Expand Down Expand Up @@ -511,7 +513,11 @@ class Ollama extends BaseLLM implements ModelInstaller {
stream: options.stream,
// format: options.format, // Not currently in base completion options
};
if (options.tools?.length && ollamaMessages.at(-1)?.role === "user") {
if (
options.tools?.length &&
this.templateSupportsTools &&
ollamaMessages.at(-1)?.role === "user"
) {
chatOptions.tools = options.tools.map((tool) => ({
type: "function",
function: {
Expand Down
15 changes: 15 additions & 0 deletions core/llm/llms/OpenRouter.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import { ChatCompletionCreateParams } from "openai/resources/index";

import { OPENROUTER_HEADERS } from "@continuedev/openai-adapters";

import { LLMOptions } from "../../index.js";
import { osModelsEditPrompt } from "../templates/edit.js";

Expand All @@ -18,6 +20,19 @@ class OpenRouter extends OpenAI {
useLegacyCompletionsEndpoint: false,
};

constructor(options: LLMOptions) {
super({
...options,
requestOptions: {
...options.requestOptions,
headers: {
...OPENROUTER_HEADERS,
...options.requestOptions?.headers,
},
},
});
}

private isAnthropicModel(model?: string): boolean {
if (!model) return false;
const modelLower = model.toLowerCase();
Expand Down
Loading
Loading