Skip to content

Commit

Permalink
feat: add chat api (#99)
Browse files Browse the repository at this point in the history
  • Loading branch information
aallam authored Mar 2, 2023
1 parent eeae2aa commit 0345e53
Show file tree
Hide file tree
Showing 31 changed files with 768 additions and 29 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ Use your `OpenAI` instance to make API requests. [Learn more](guides/GettingStar

- [Models](guides/GettingStarted.md#models)
- [Completions](guides/GettingStarted.md#completions)
- [Chat](guides/GettingStarted.md#chat)
- [Edits](guides/GettingStarted.md#edits)
- [Images](guides/GettingStarted.md#images)
- [Embeddings](guides/GettingStarted.md#embeddings)
Expand Down
27 changes: 26 additions & 1 deletion guides/GettingStarted.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@ Use your `OpenAI` instance to make API requests.
- [List models](#list-models)
- [Retrieve a model](#retrieve-a-model)
- [Completions](#completions)
- [Create Completion](#create-completion)
- [Create completion](#create-completion)
- [Chat](#chat)
- [Create chat completion](#create-chat-completion)
- [Edits](#edits)
- [Create edits](#create-edits)
- [Images](#images)
Expand Down Expand Up @@ -80,6 +82,29 @@ val completion: TextCompletion = openAI.completion(completionRequest)
val completions: Flow<TextCompletion> = openAI.completions(completionRequest)
```

## Chat

Given a chat conversation, the model will return a chat completion response.

### Create chat completion `beta`

Creates a completion for the chat message.

```kotlin
val chatCompletionRequest = ChatCompletionRequest(
model = ModelId("gpt-3.5-turbo"),
messages = listOf(
ChatMessage(
role = ChatRole.User,
content = "Hello!"
)
)
)
val completion: ChatCompletion = openAI.chatCompletion(chatCompletionRequest)
// or, as flow
val completions: Flow<ChatCompletionChunk> = openAI.chatCompletions(chatCompletionRequest)
```

## Edits

Given a prompt and an instruction, the model will return an edited version of the prompt.
Expand Down
1 change: 1 addition & 0 deletions openai-client/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ kotlin {
optIn("okio.ExperimentalFileSystem")
optIn("kotlinx.coroutines.ExperimentalCoroutinesApi")
optIn("com.aallam.openai.api.ExperimentalOpenAI")
optIn("com.aallam.openai.api.BetaOpenAI")
}
}
val commonMain by getting {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package com.aallam.openai.client

import com.aallam.openai.api.BetaOpenAI
import com.aallam.openai.api.chat.ChatCompletion
import com.aallam.openai.api.chat.ChatCompletionChunk
import com.aallam.openai.api.chat.ChatCompletionRequest
import kotlinx.coroutines.flow.Flow

/**
* Given a chat conversation, the model will return a chat completion response.
*/
public interface Chat {

/**
* Creates a completion for the chat message.
*/
@BetaOpenAI
public suspend fun chatCompletion(request: ChatCompletionRequest): ChatCompletion

/**
* Stream variant of [chatCompletion].
*/
@BetaOpenAI
public fun chatCompletions(request: ChatCompletionRequest): Flow<ChatCompletionChunk>
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package com.aallam.openai.client

import com.aallam.openai.api.ExperimentalOpenAI
import com.aallam.openai.api.BetaOpenAI
import com.aallam.openai.api.image.*

/**
Expand All @@ -12,41 +12,41 @@ public interface Images {
* Creates an image given a prompt.
* Get images as URLs.
*/
@ExperimentalOpenAI
@BetaOpenAI
public suspend fun imageURL(creation: ImageCreation): List<ImageURL>

/**
* Creates an image given a prompt.
* Get images as base 64 JSON.
*/
@ExperimentalOpenAI
@BetaOpenAI
public suspend fun imageJSON(creation: ImageCreation): List<ImageJSON>

/**
* Creates an edited or extended image given an original image and a prompt.
* Get images as URLs.
*/
@ExperimentalOpenAI
@BetaOpenAI
public suspend fun imageURL(edit: ImageEdit): List<ImageURL>

/**
* Creates an edited or extended image given an original image and a prompt.
* Get images as base 64 JSON.
*/
@ExperimentalOpenAI
@BetaOpenAI
public suspend fun imageJSON(edit: ImageEdit): List<ImageJSON>

/**
* Creates a variation of a given image.
* Get images as URLs.
*/
@ExperimentalOpenAI
@BetaOpenAI
public suspend fun imageURL(variation: ImageVariation): List<ImageURL>

/**
* Creates a variation of a given image.
* Get images as base 64 JSON.
*/
@ExperimentalOpenAI
@BetaOpenAI
public suspend fun imageJSON(variation: ImageVariation): List<ImageJSON>
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import com.aallam.openai.client.internal.http.HttpTransport
/**
* OpenAI API.
*/
public interface OpenAI : Completions, Files, Edits, Embeddings, Models, Moderations, FineTunes, Images
public interface OpenAI : Completions, Files, Edits, Embeddings, Models, Moderations, FineTunes, Images, Chat

/**
* Creates an instance of [OpenAI].
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,4 +19,5 @@ internal class OpenAIApi(
Models by ModelsApi(requester),
Moderations by ModerationsApi(requester),
FineTunes by FineTunesApi(requester),
Images by ImagesApi(requester)
Images by ImagesApi(requester),
Chat by ChatApi(requester)
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
package com.aallam.openai.client.internal.api

import com.aallam.openai.api.chat.ChatCompletion
import com.aallam.openai.api.chat.ChatCompletionChunk
import com.aallam.openai.api.chat.ChatCompletionRequest
import com.aallam.openai.client.Chat
import com.aallam.openai.client.internal.extension.streamEventsFrom
import com.aallam.openai.client.internal.extension.streamRequestOf
import com.aallam.openai.client.internal.http.HttpRequester
import com.aallam.openai.client.internal.http.perform
import io.ktor.client.call.*
import io.ktor.client.request.*
import io.ktor.http.*
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.flow

internal class ChatApi(private val requester: HttpRequester) : Chat {
override suspend fun chatCompletion(request: ChatCompletionRequest): ChatCompletion {
return requester.perform {
it.post {
url(path = ChatCompletionsPathV1)
setBody(request)
contentType(ContentType.Application.Json)
}.body()
}
}

override fun chatCompletions(request: ChatCompletionRequest): Flow<ChatCompletionChunk> {
val builder = HttpRequestBuilder().apply {
method = HttpMethod.Post
url(path = ChatCompletionsPathV1)
setBody(streamRequestOf(request))
contentType(ContentType.Application.Json)
accept(ContentType.Text.EventStream)
headers {
append(HttpHeaders.CacheControl, "no-cache")
append(HttpHeaders.Connection, "keep-alive")
}
}
return flow {
requester.perform(builder) { response -> streamEventsFrom(response) }
}
}

companion object {
private const val ChatCompletionsPathV1 = "v1/chat/completions"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ import com.aallam.openai.client.internal.extension.streamEventsFrom
import com.aallam.openai.client.internal.extension.toStreamRequest
import com.aallam.openai.client.internal.http.HttpRequester
import com.aallam.openai.client.internal.http.perform
import io.ktor.client.call.body
import io.ktor.client.call.*
import io.ktor.client.request.*
import io.ktor.http.*
import io.ktor.util.InternalAPI
import io.ktor.util.*
import kotlinx.coroutines.*
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.flow
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,27 +4,27 @@ import com.aallam.openai.api.completion.CompletionRequest
import com.aallam.openai.api.file.FileSource
import com.aallam.openai.client.internal.JsonLenient
import io.ktor.client.request.forms.*
import io.ktor.http.ContentType
import io.ktor.utils.io.core.writeFully
import kotlinx.serialization.json.JsonElement
import kotlinx.serialization.json.JsonObject
import kotlinx.serialization.json.JsonPrimitive
import kotlinx.serialization.json.jsonObject
import okio.*
import io.ktor.http.*
import io.ktor.utils.io.core.*
import kotlinx.serialization.json.*
import okio.buffer
import okio.use

/**
* Adds `stream` parameter to the request.
* Tokens will be sent as data-only [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format)
* as they become available, with the stream terminated by a data: `[DONE]` message.
*/
internal fun CompletionRequest.toStreamRequest(): JsonElement {
val json = JsonLenient.encodeToJsonElement(CompletionRequest.serializer(), this)
return streamRequestOf(json)
}

internal inline fun <reified T> streamRequestOf(serializable: T): JsonElement {
val enableStream = "stream" to JsonPrimitive(true)
val jsonElement = JsonLenient.encodeToJsonElement(CompletionRequest.serializer(), this)
val map = jsonElement.jsonObject.toMutableMap().also { it += enableStream }
val json = JsonLenient.encodeToJsonElement(serializable)
val map = json.jsonObject.toMutableMap().also { it += enableStream }
return JsonObject(map)
}


internal fun FormBuilder.appendFileSource(key: String, fileSource: FileSource) {
append(key, fileSource.name, ContentType.Application.OctetStream) {
fileSource.source.buffer().use { source ->
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
package com.aallam.openai.client

import com.aallam.openai.api.chat.ChatCompletion
import com.aallam.openai.api.chat.ChatCompletionChunk
import com.aallam.openai.api.chat.ChatRole
import com.aallam.openai.api.chat.chatCompletionRequest
import com.aallam.openai.api.model.ModelId
import kotlinx.coroutines.flow.launchIn
import kotlinx.coroutines.flow.onEach
import kotlinx.coroutines.test.runTest
import kotlin.test.Test
import kotlin.test.assertNotEquals
import kotlin.test.assertTrue

class TestChatCompletions : TestOpenAI() {

@Test
fun chatCompletions() {
runTest {
val request = chatCompletionRequest {
model = ModelId("gpt-3.5-turbo")
messages {
message {
role = ChatRole.System
content = "You are a helpful assistant.!"
}
message {
role = ChatRole.User
content = "Who won the world series in 2020?"
}
message {
role = ChatRole.Assistant
content = "The Los Angeles Dodgers won the World Series in 2020."
}
message {
role = ChatRole.User
content = "Where was it played?"
}
}
}

val completion = openAI.chatCompletion(request)
assertTrue { completion.choices.isNotEmpty() }

val results = mutableListOf<ChatCompletionChunk>()
openAI.chatCompletions(request).onEach { results += it }.launchIn(this).join()

assertNotEquals(0, results.size)
}
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package com.aallam.openai.client

import com.aallam.openai.api.ExperimentalOpenAI
import com.aallam.openai.api.file.Purpose
import com.aallam.openai.api.file.fileSource
import com.aallam.openai.api.file.fileUpload
Expand All @@ -18,7 +17,6 @@ import kotlin.test.assertEquals
import kotlin.test.assertNotNull
import kotlin.test.assertTrue

@OptIn(ExperimentalOpenAI::class)
class TestFineTunes : TestOpenAI() {

@Test
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
package com.aallam.openai.api

import kotlin.annotation.AnnotationTarget.*

/**
* This annotation marks a library API as beta.
*
* Any usage of a declaration annotated with `@BetaOpenAI` must be accepted either by annotating that
* usage with the [OptIn] annotation, e.g. `@OptIn(BetaOpenAI::class)`, or by using the compiler
* argument `-Xopt-in=com.aallam.openai.api.BetaOpenAI`.
*/
@Target(
CLASS,
ANNOTATION_CLASS,
PROPERTY,
FIELD,
LOCAL_VARIABLE,
VALUE_PARAMETER,
CONSTRUCTOR,
FUNCTION,
PROPERTY_GETTER,
PROPERTY_SETTER,
TYPEALIAS
)
@Retention(AnnotationRetention.BINARY)
@RequiresOptIn(message = "This API is marked beta by OpenAI, It can be incompatibly changed in the future.")
public annotation class BetaOpenAI
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,5 @@ import kotlin.annotation.AnnotationTarget.*
TYPEALIAS
)
@Retention(AnnotationRetention.BINARY)
@RequiresOptIn(message = "This OpenAI API is experimental, It can be incompatibly changed in the future.")
@RequiresOptIn(message = "This library API is experimental, It can be incompatibly changed in the future.")
public annotation class ExperimentalOpenAI
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
package com.aallam.openai.api.chat;

import com.aallam.openai.api.BetaOpenAI
import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable

/**
* A completion generated by OpenAI.
*
* [documentation](https://platform.openai.com/docs/api-reference/chat/create)
*/
@BetaOpenAI
@Serializable
public data class ChatChoice internal constructor(
/**
* Chat choice index.
*/
@SerialName("index") public val index: Int? = null,
/**
* The generated chat message.
*/
@SerialName("message") public val message: ChatMessage? = null,

/**
* The reason why OpenAI stopped generating.
*/
@SerialName("finish_reason") public val finishReason: String? = null,
)
Loading

0 comments on commit 0345e53

Please sign in to comment.