Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ class ChatFunctionsCallStreamProvider {
let stream = try await service.startStreamedChat(parameters: paramsForChat)
for try await result in stream {
// Extract the first choice from the stream results, if none exist, exit the loop.
guard let choice = result.choices?.first else { return }
guard let choice = result.choices?.first else { continue }

/// The streamed content to display
if let newContent = choice.delta?.content {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class ChatFluidConversationProvider {
let stream = try await service.startStreamedChat(parameters: localParameters)
for try await result in stream {
// Extract the first choice from the stream results, if none exist, exit the loop.
guard let choice = result.choices?.first else { return }
guard let choice = result.choices?.first else { continue }

// Store initial `role` and `functionCall` data from the first `choice.delta` for UI display.
// This information is essential for maintaining context in the conversation and for updating
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ class ChatVisionProvider {
let stream = try await service.startStreamedChat(parameters: parameters)
for try await result in stream {
// Extract the first choice from the stream results, if none exist, exit the loop.
guard let choice = result.choices?.first else { return }
guard let choice = result.choices?.first else { continue }

// Store initial `role` and `functionCall` data from the first `choice.delta` for UI display.
// This information is essential for maintaining context in the conversation and for updating
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,9 @@ public struct ChatCompletionParameters: Encodable {
temperature: Double? = nil,
topProbability: Double? = nil,
user: String? = nil,
streamOptions: StreamOptions? = nil)
{
streamOptions: StreamOptions? = nil,
thinking: Thinking? = nil
) {
self.messages = messages
self.model = model.value
self.store = store
Expand Down Expand Up @@ -70,6 +71,7 @@ public struct ChatCompletionParameters: Encodable {
topP = topProbability
self.user = user
self.streamOptions = streamOptions
self.thinking = thinking
}

public struct Message: Encodable {
Expand All @@ -81,8 +83,8 @@ public struct ChatCompletionParameters: Encodable {
audio: Audio? = nil,
functionCall: FunctionCall? = nil,
toolCalls: [ToolCall]? = nil,
toolCallID: String? = nil)
{
toolCallID: String? = nil
) {
self.role = role.rawValue
self.content = content
self.refusal = refusal
Expand Down Expand Up @@ -144,7 +146,7 @@ public struct ChatCompletionParameters: Encodable {
}
}

public static func ==(lhs: MessageContent, rhs: MessageContent) -> Bool {
public static func == (lhs: MessageContent, rhs: MessageContent) -> Bool {
switch (lhs, rhs) {
case (.text(let a), .text(let b)):
a == b
Expand Down Expand Up @@ -205,10 +207,10 @@ public struct ChatCompletionParameters: Encodable {
}

public enum Role: String {
case system // content, role
case user // content, role
case assistant // content, role, tool_calls
case tool // content, role, tool_call_id
case system // content, role
case user // content, role
case assistant // content, role, tool_calls
case tool // content, role, tool_call_id
}

public struct Audio: Encodable {
Expand Down Expand Up @@ -289,8 +291,8 @@ public struct ChatCompletionParameters: Encodable {

public init(
type: String = "function",
function: ChatFunction)
{
function: ChatFunction
) {
self.type = type
self.function = function
}
Expand All @@ -301,8 +303,8 @@ public struct ChatCompletionParameters: Encodable {
name: String,
strict: Bool?,
description: String?,
parameters: JSONSchema?)
{
parameters: JSONSchema?
) {
self.name = name
self.strict = strict
self.description = description
Expand Down Expand Up @@ -355,8 +357,8 @@ public struct ChatCompletionParameters: Encodable {

public init(
voice: String,
format: String)
{
format: String
) {
self.voice = voice
self.format = format
}
Expand Down Expand Up @@ -418,6 +420,20 @@ public struct ChatCompletionParameters: Encodable {
case low
}

public enum ThinkingType: String, Encodable {
case disabled
case enabled
case auto
}

public struct Thinking: Encodable {
public let type: ThinkingType

public init(type: ThinkingType) {
self.type = type
}
}

/// A list of messages comprising the conversation so far. [Example Python code](https://cookbook.openai.com/examples/how_to_format_inputs_to_chatgpt_models)
public var messages: [Message]
/// ID of the model to use. See the [model endpoint compatibility](https://platform.openai.com/docs/models/how-we-use-your-data) table for details on which models work with the Chat API.
Expand Down Expand Up @@ -531,11 +547,14 @@ public struct ChatCompletionParameters: Encodable {
case temperature
case topP = "top_p"
case user
case thinking
}

/// If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#event_stream_format) as they become available, with the stream terminated by a data: [DONE] message. [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions ).
/// Defaults to false.
var stream: Bool? = nil
/// Options for streaming response. Only set this when you set stream: true
var streamOptions: StreamOptions?

var thinking: Thinking?
}
Loading