Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -194,8 +194,9 @@ public struct AudioTranscriptionParameters: Encodable {
/// The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use [log probability](https://en.wikipedia.org/wiki/Log_probability) to automatically increase the temperature until certain thresholds are hit. Defaults to 0
let temperature: Double?

public enum Model: String {
case whisperOne = "whisper-1"
public enum Model {
case whisperOne
case custom(model: String)
}

public init(
Expand Down Expand Up @@ -252,8 +253,9 @@ public struct AudioTranslationParameters: Encodable {
/// The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use [log probability](https://en.wikipedia.org/wiki/Log_probability) to automatically increase the temperature until certain thresholds are hit. Defaults to 0
let temperature: Double?

public enum Model: String {
case whisperOne = "whisper-1"
public enum Model {
case whisperOne
case custom(model: String)
}

public init(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,17 @@ public struct AudioTranscriptionParameters: Encodable {
/// The timestamp granularities to populate for this transcription. response_format must be set verbose_json to use timestamp granularities. Either or both of these options are supported: word, or segment. Note: There is no additional latency for segment timestamps, but generating word timestamps incurs additional latency.
let timestampGranularities: [String]?

public enum Model: String {
case whisperOne = "whisper-1"
public enum Model {
case whisperOne
case custom(model: String)
var value: String {
switch self {
case .whisperOne:
"whisper-1"
case .custom(let model):
model
}
}
}

enum CodingKeys: String, CodingKey {
Expand All @@ -54,7 +63,7 @@ public struct AudioTranscriptionParameters: Encodable {
{
self.fileName = fileName
self.file = file
self.model = model.rawValue
self.model = model.value
self.prompt = prompt
self.responseFormat = responseFormat
self.temperature = temperature
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,18 @@ public struct AudioTranslationParameters: Encodable {
/// The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use [log probability](https://en.wikipedia.org/wiki/Log_probability) to automatically increase the temperature until certain thresholds are hit. Defaults to 0
let temperature: Double?

public enum Model: String {
case whisperOne = "whisper-1"
public enum Model {
case whisperOne
case custom(model: String)

var value: String {
switch self {
case .whisperOne:
"whisper-1"
case .custom(let model):
model
}
}
}

enum CodingKeys: String, CodingKey {
Expand All @@ -45,7 +55,7 @@ public struct AudioTranslationParameters: Encodable {
{
self.fileName = fileName
self.file = file
self.model = model.rawValue
self.model = model.value
self.prompt = prompt
self.responseFormat = responseFormat
self.temperature = temperature
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ public struct ChatCompletionParameters: Encodable {
public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(url, forKey: .url)
try container.encode(detail, forKey: .detail)
try container.encodeIfPresent(detail, forKey: .detail)
}

public init(url: URL, detail: String? = nil) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ public struct ResponseModel: Decodable {
public let reasoning: Reasoning?

/// The status of the response generation. One of completed, failed, in_progress, or incomplete.
public let status: String
public let status: String?

/// What sampling temperature to use, between 0 and 2.
/// Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.
Expand Down