Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 36 additions & 27 deletions Sources/SwiftLM/Server.swift
Original file line number Diff line number Diff line change
Expand Up @@ -1358,33 +1358,10 @@ func handleChatCompletion(
var chatMessages: [Chat.Message] = []
var systemPromptText = ""
for msg in chatReq.messages {
let textContent = msg.textContent
let images = msg.extractImages()
let audio = msg.extractAudio()
switch msg.role {
case "system", "developer":
chatMessages.append(.system(textContent, images: images, audio: audio))
systemPromptText += textContent
case "assistant":
var formattedToolCalls: [[String: any Sendable]]? = nil
if let tc = msg.tool_calls, !tc.isEmpty {
formattedToolCalls = tc.enumerated().map { (index, call) in
[
"index": index,
"id": call.id,
"type": call.type,
"function": [
"name": call.function.name,
"arguments": call.function.arguments
] as [String: any Sendable]
] as [String: any Sendable]
}
}
chatMessages.append(.assistant(textContent, images: images, audio: audio, toolCalls: formattedToolCalls))
case "tool":
chatMessages.append(.tool(textContent, toolCallId: msg.tool_call_id))
default:
chatMessages.append(.user(textContent, images: images, audio: audio))
let chatMsg = msg.toChatMessage()
chatMessages.append(chatMsg)
if chatMsg.role == .system {
systemPromptText += chatMsg.content
}
}

Expand Down Expand Up @@ -2705,6 +2682,38 @@ struct ChatCompletionRequest: Decodable {
return nil
}
}

/// Convert OpenAI JSON spec message to internal Chat.Message
func toChatMessage() -> Chat.Message {
let text = textContent
let imgs = extractImages()
let aud = extractAudio()
Comment on lines +2686 to +2690
Copy link

Copilot AI Apr 27, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

PR description says ChatRequestParsingTests now use the production toChatMessage() helper instead of a local copy, but the test suite in tests/SwiftLMTests/ChatRequestParsingTests.swift still contains its own mapAssistantToolCalls mapping logic and does not call toChatMessage(). This leaves duplicated logic that can drift and means the new helper isn’t actually being locked down by tests. Either update the tests to assert against msg.toChatMessage() (and its toolCalls) or adjust the PR description to match what’s included.

Copilot uses AI. Check for mistakes.

switch role {
case "system", "developer":
return .system(text, images: imgs, audio: aud)
case "assistant":
var formattedToolCalls: [[String: any Sendable]]? = nil
if let tc = tool_calls, !tc.isEmpty {
formattedToolCalls = tc.enumerated().map { (index, call) in
[
"index": index,
"id": call.id,
"type": call.type,
"function": [
"name": call.function.name,
"arguments": call.function.arguments
] as [String: any Sendable]
] as [String: any Sendable]
}
}
return .assistant(text, images: imgs, audio: aud, toolCalls: formattedToolCalls)
case "tool":
return .tool(text, toolCallId: tool_call_id)
default:
return .user(text, images: imgs, audio: aud)
}
}
}

/// Message content: either a plain string or structured multipart content
Expand Down
Loading