Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
0DF957862BB543F100DD2013 /* AIProxyIntroView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0DF957852BB543F100DD2013 /* AIProxyIntroView.swift */; };
7B029E372C6893FD0025681A /* ChatStructuredOutputProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B029E362C6893FD0025681A /* ChatStructuredOutputProvider.swift */; };
7B029E392C68940D0025681A /* ChatStructuredOutputDemoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B029E382C68940D0025681A /* ChatStructuredOutputDemoView.swift */; };
7B029E3C2C69BE990025681A /* ChatStructuredOutputToolProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B029E3B2C69BE990025681A /* ChatStructuredOutputToolProvider.swift */; };
7B029E3E2C69BEA70025681A /* ChatStructureOutputToolDemoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B029E3D2C69BEA70025681A /* ChatStructureOutputToolDemoView.swift */; };
7B1268052B08246400400694 /* AssistantConfigurationDemoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B1268042B08246400400694 /* AssistantConfigurationDemoView.swift */; };
7B1268072B08247C00400694 /* AssistantConfigurationProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B1268062B08247C00400694 /* AssistantConfigurationProvider.swift */; };
7B3DDCC52BAAA722004B5C96 /* AssistantsListDemoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B3DDCC42BAAA722004B5C96 /* AssistantsListDemoView.swift */; };
Expand Down Expand Up @@ -92,6 +94,8 @@
0DF957852BB543F100DD2013 /* AIProxyIntroView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AIProxyIntroView.swift; sourceTree = "<group>"; };
7B029E362C6893FD0025681A /* ChatStructuredOutputProvider.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatStructuredOutputProvider.swift; sourceTree = "<group>"; };
7B029E382C68940D0025681A /* ChatStructuredOutputDemoView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatStructuredOutputDemoView.swift; sourceTree = "<group>"; };
7B029E3B2C69BE990025681A /* ChatStructuredOutputToolProvider.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatStructuredOutputToolProvider.swift; sourceTree = "<group>"; };
7B029E3D2C69BEA70025681A /* ChatStructureOutputToolDemoView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatStructureOutputToolDemoView.swift; sourceTree = "<group>"; };
7B1268042B08246400400694 /* AssistantConfigurationDemoView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AssistantConfigurationDemoView.swift; sourceTree = "<group>"; };
7B1268062B08247C00400694 /* AssistantConfigurationProvider.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AssistantConfigurationProvider.swift; sourceTree = "<group>"; };
7B3DDCC42BAAA722004B5C96 /* AssistantsListDemoView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AssistantsListDemoView.swift; sourceTree = "<group>"; };
Expand Down Expand Up @@ -189,6 +193,15 @@
path = ChatStructuredOutputs;
sourceTree = "<group>";
};
7B029E3A2C69BE750025681A /* ChatStructureOutputTool */ = {
isa = PBXGroup;
children = (
7B029E3B2C69BE990025681A /* ChatStructuredOutputToolProvider.swift */,
7B029E3D2C69BEA70025681A /* ChatStructureOutputToolDemoView.swift */,
);
path = ChatStructureOutputTool;
sourceTree = "<group>";
};
7B1268032B08241200400694 /* Assistants */ = {
isa = PBXGroup;
children = (
Expand Down Expand Up @@ -374,6 +387,7 @@
7B436B9F2AE2593D003CE281 /* ChatDemo */,
7B7239AC2AF9FEC300646679 /* ChatFunctionsCall */,
7B029E352C6893BF0025681A /* ChatStructuredOutputs */,
7B029E3A2C69BE750025681A /* ChatStructureOutputTool */,
7B72399E2AF625B700646679 /* ChatStreamFluidConversationDemo */,
7B436BA42AE77EF9003CE281 /* EmbeddingsDemo */,
7B436BA92AE788CA003CE281 /* FineTuningDemo */,
Expand Down Expand Up @@ -629,6 +643,7 @@
7B3DDCC52BAAA722004B5C96 /* AssistantsListDemoView.swift in Sources */,
7B3DDCC72BAAAD34004B5C96 /* AssistantThreadConfigurationProvider.swift in Sources */,
7B3DDCC92BAAAF96004B5C96 /* AssistantStreamDemoScreen.swift in Sources */,
7B029E3E2C69BEA70025681A /* ChatStructureOutputToolDemoView.swift in Sources */,
7B413DFE2BB6846D009684CD /* ChatMessageLoadingView.swift in Sources */,
7B436BB92AE7A2F2003CE281 /* ImagesDemoView.swift in Sources */,
7B436BB22AE79370003CE281 /* FilesProvider.swift in Sources */,
Expand All @@ -639,6 +654,7 @@
7BBE7E912AFCA52A0096A693 /* ChatVisionDemoView.swift in Sources */,
7B99C2EB2C07191200E701B3 /* AttachmentView.swift in Sources */,
7B50DD2B2C2A9D2F0070A64D /* LocalChatDemoView.swift in Sources */,
7B029E3C2C69BE990025681A /* ChatStructuredOutputToolProvider.swift in Sources */,
7B436BAB2AE788F1003CE281 /* FineTuningJobProvider.swift in Sources */,
7B7239A42AF6289900646679 /* ChatStreamFluidConversationDemoView.swift in Sources */,
7BA788FC2AE23B42008825D5 /* AudioDemoView.swift in Sources */,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
//
// ChatStructureOutputToolDemoView.swift
// SwiftOpenAIExample
//
// Created by James Rochabrun on 8/11/24.
//

import Foundation
import SwiftOpenAI
import SwiftUI

struct ChatStructureOutputToolDemoView: View {

@State private var chatProvider: ChatStructuredOutputToolProvider
@State private var isLoading = false
@State private var prompt = ""

init(service: OpenAIService) {
chatProvider = .init(service: service)
}

var body: some View {
ScrollViewReader { proxy in
VStack {
List(chatProvider.chatDisplayMessages) { message in
ChatMessageView(message: message)
.listRowSeparator(.hidden)
}
.listStyle(.plain)
.onChange(of: chatProvider.chatDisplayMessages.last?.content) {
let lastMessage = chatProvider.chatDisplayMessages.last
if let id = lastMessage?.id {
proxy.scrollTo(id, anchor: .bottom)
}
}
textArea
}
}
}

var textArea: some View {
HStack(spacing: 0) {
VStack(alignment: .leading, spacing: 0) {
textField
.padding(.vertical, Sizes.spacingExtraSmall)
.padding(.horizontal, Sizes.spacingSmall)
}
.padding(.vertical, Sizes.spacingExtraSmall)
.padding(.horizontal, Sizes.spacingExtraSmall)
.background(
RoundedRectangle(cornerRadius: 20)
.stroke(.gray, lineWidth: 1)
)
.padding(.horizontal, Sizes.spacingMedium)
textAreSendButton
}
.padding(.horizontal)
.disabled(isLoading)
}

var textField: some View {
TextField(
"How Can I help you today?",
text: $prompt,
axis: .vertical)
}

var textAreSendButton: some View {
Button {
Task {
/// Loading UI
isLoading = true
defer { isLoading = false }
// Clears text field.
let userPrompt = prompt
prompt = ""
try await chatProvider.startChat(prompt: userPrompt)
}
} label: {
Image(systemName: "paperplane")
}
.buttonStyle(.bordered)
.tint(ThemeColor.tintColor)
.disabled(prompt.isEmpty)
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,251 @@
//
// ChatStructuredOutputToolProvider.swift
// SwiftOpenAIExample
//
// Created by James Rochabrun on 8/11/24.
//

import Foundation
import SwiftOpenAI
import SwiftUI

enum StructuredToolCall: String, CaseIterable {

case structureUI = "structured_ui"

var functionTool: ChatCompletionParameters.Tool {
switch self {
case .structureUI:
return .init(
function: .init(
name: self.rawValue,
strict: true,
description: "Dynamically generated UI",
parameters: structureUISchema))
}
}

var structureUISchema: JSONSchema {
JSONSchema(
type: .object,
properties: [
"type": JSONSchema(
type: .string,
description: "The type of the UI component",
additionalProperties: false,
enum: ["div", "button", "header", "section", "field", "form"]
),
"label": JSONSchema(
type: .string,
description: "The label of the UI component, used for buttons or form fields",
additionalProperties: false
),
"children": JSONSchema(
type: .array,
description: "Nested UI components",
items: JSONSchema(ref: "#"),
additionalProperties: false
),
"attributes": JSONSchema(
type: .array,
description: "Arbitrary attributes for the UI component, suitable for any element",
items: JSONSchema(
type: .object,
properties: [
"name": JSONSchema(
type: .string,
description: "The name of the attribute, for example onClick or className",
additionalProperties: false
),
"value": JSONSchema(
type: .string,
description: "The value of the attribute",
additionalProperties: false
)
],
required: ["name", "value"],
additionalProperties: false
),
additionalProperties: false
)
],
required: ["type", "label", "children", "attributes"],
additionalProperties: false
)
}
}

@Observable
final class ChatStructuredOutputToolProvider {

// MARK: - Init

init(service: OpenAIService) {
self.service = service
}

// MARK: - Public

var chatDisplayMessages: [ChatMessageDisplayModel] = []
let systemMessage = ChatCompletionParameters.Message(role: .system, content: .text("You are a math tutor"))

func startChat(
prompt: String)
async throws
{
await startNewUserDisplayMessage(prompt)
await startNewAssistantEmptyDisplayMessage()

let userMessage = createUserMessage(prompt)
chatMessageParameters.append(userMessage)

let parameters = ChatCompletionParameters(
messages: [systemMessage] + chatMessageParameters,
model: .gpt4o20240806,
tools: StructuredToolCall.allCases.map { $0.functionTool })

do {

let chat = try await service.startChat(parameters: parameters)
guard let assistantMessage = chat.choices.first?.message else { return }
let content = assistantMessage.content ?? ""
await updateLastAssistantMessage(.init(content: .content(.init(text: content)), origin: .received(.gpt)))
if let toolCalls = assistantMessage.toolCalls {

availableFunctions = [.structureUI: getStructureOutput(arguments:)]
// Append the `assistantMessage` in to the `chatMessageParameters` to extend the conversation
let parameterAssistantMessage = ChatCompletionParameters.Message(
role: .assistant,
content: .text(content), toolCalls: assistantMessage.toolCalls)

chatMessageParameters.append(parameterAssistantMessage)

/// # Step 4: send the info for each function call and function response to the model
for toolCall in toolCalls {
let name = toolCall.function.name
let id = toolCall.id
let functionToCall = availableFunctions[StructuredToolCall(rawValue: name!)!]!
let arguments = toolCall.function.arguments
let content = functionToCall(arguments)
let toolMessage = ChatCompletionParameters.Message(
role: .tool,
content: .text(content),
name: name,
toolCallID: id)
chatMessageParameters.append(toolMessage)
}

/// # get a new response from the model where it can see the function response
await continueChat()
}

} catch let error as APIError {
// If an error occurs, update the UI to display the error message.
await updateLastAssistantMessage(.init(content: .error("\(error.displayDescription)"), origin: .received(.gpt)))
}
}

// MARK: - Private

private let service: OpenAIService
private var lastDisplayedMessageID: UUID?
private var chatMessageParameters: [ChatCompletionParameters.Message] = []
private var availableFunctions: [StructuredToolCall: ((String) -> String)] = [:]

// MARK: Tool functions

func getStructureOutput(arguments: String) -> String {
arguments
}
}

// MARK: UI related

extension ChatStructuredOutputToolProvider {

func createUserMessage(
_ prompt: String)
-> ChatCompletionParameters.Message
{
.init(role: .user, content: .text(prompt))
}

// MARK: - Private Methods

@MainActor
private func startNewUserDisplayMessage(_ prompt: String) {
let startingMessage = ChatMessageDisplayModel(
content: .content(.init(text: prompt)),
origin: .sent)
addMessage(startingMessage)
}

@MainActor
private func startNewAssistantEmptyDisplayMessage() {
let newMessage = ChatMessageDisplayModel(
content: .content(.init(text: "")),
origin: .received(.gpt))
addMessage(newMessage)
}

@MainActor
private func updateLastAssistantMessage(
_ message: ChatMessageDisplayModel)
{
guard let id = lastDisplayedMessageID, let index = chatDisplayMessages.firstIndex(where: { $0.id == id }) else { return }

var lastMessage = chatDisplayMessages[index]

switch message.content {
case .content(let newMedia):
switch lastMessage.content {
case .content(let lastMedia):
var updatedMedia = lastMedia
if let newText = newMedia.text,
var lastMediaText = lastMedia.text {
lastMediaText += newText
updatedMedia.text = lastMediaText
} else {
updatedMedia.text = ""
}
if let urls = newMedia.urls {
updatedMedia.urls = urls
}
lastMessage.content = .content(updatedMedia)
case .error:
break
}
case .error:
lastMessage.content = message.content
}

chatDisplayMessages[index] = ChatMessageDisplayModel(
id: id,
content: lastMessage.content,
origin: message.origin)
}

@MainActor
private func addMessage(_ message: ChatMessageDisplayModel) {
let newMessageId = message.id
lastDisplayedMessageID = newMessageId
withAnimation {
chatDisplayMessages.append(message)
}
}

func continueChat() async {

let paramsForChat = ChatCompletionParameters(
messages: chatMessageParameters,
model: .gpt4o)
do {
let chat = try await service.startChat(parameters: paramsForChat)
guard let assistantMessage = chat.choices.first?.message else { return }
await updateLastAssistantMessage(.init(content: .content(.init(text: assistantMessage.content)), origin: .received(.gpt)))
} catch {
// If an error occurs, update the UI to display the error message.
await updateLastAssistantMessage(.init(content: .error("\(error)"), origin: .received(.gpt)))
}
}
}
Loading