diff --git a/images/anchor-copy-button.svg b/images/anchor-copy-button.svg new file mode 100644 index 00000000..bab9d747 --- /dev/null +++ b/images/anchor-copy-button.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/images/arrow_down.svg b/images/arrow_down.svg new file mode 100644 index 00000000..c0388dee --- /dev/null +++ b/images/arrow_down.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/images/copy-icon.svg b/images/copy-icon.svg new file mode 100644 index 00000000..61440f0a --- /dev/null +++ b/images/copy-icon.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/images/copy-successful-icon.svg b/images/copy-successful-icon.svg new file mode 100644 index 00000000..1865f739 --- /dev/null +++ b/images/copy-successful-icon.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/images/footer-go-to-link.svg b/images/footer-go-to-link.svg new file mode 100644 index 00000000..0137e223 --- /dev/null +++ b/images/footer-go-to-link.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/images/go-to-top-icon.svg b/images/go-to-top-icon.svg new file mode 100644 index 00000000..d987f3ea --- /dev/null +++ b/images/go-to-top-icon.svg @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/images/logo-icon.svg b/images/logo-icon.svg new file mode 100644 index 00000000..1fea0877 --- /dev/null +++ b/images/logo-icon.svg @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/images/nav-icons/abstract-class-kotlin.svg b/images/nav-icons/abstract-class-kotlin.svg new file mode 100644 index 00000000..a2069b8f --- /dev/null +++ b/images/nav-icons/abstract-class-kotlin.svg @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/images/nav-icons/abstract-class.svg b/images/nav-icons/abstract-class.svg new file mode 100644 index 00000000..60182030 --- /dev/null +++ b/images/nav-icons/abstract-class.svg @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/images/nav-icons/annotation-kotlin.svg b/images/nav-icons/annotation-kotlin.svg new file mode 100644 index 00000000..932f1d3d --- /dev/null +++ b/images/nav-icons/annotation-kotlin.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/images/nav-icons/annotation.svg b/images/nav-icons/annotation.svg new file mode 100644 index 00000000..b80c54b4 --- /dev/null +++ b/images/nav-icons/annotation.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/images/nav-icons/class-kotlin.svg b/images/nav-icons/class-kotlin.svg new file mode 100644 index 00000000..46a21f65 --- /dev/null +++ b/images/nav-icons/class-kotlin.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/images/nav-icons/class.svg b/images/nav-icons/class.svg new file mode 100644 index 00000000..3f1ad167 --- /dev/null +++ b/images/nav-icons/class.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/images/nav-icons/enum-kotlin.svg b/images/nav-icons/enum-kotlin.svg new file mode 100644 index 00000000..4a854596 --- /dev/null +++ b/images/nav-icons/enum-kotlin.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/images/nav-icons/enum.svg b/images/nav-icons/enum.svg new file mode 100644 index 00000000..fa7f2476 --- /dev/null +++ b/images/nav-icons/enum.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/images/nav-icons/exception-class.svg b/images/nav-icons/exception-class.svg new file mode 100644 index 00000000..c0b2bdeb --- /dev/null +++ b/images/nav-icons/exception-class.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/images/nav-icons/field-value.svg b/images/nav-icons/field-value.svg new file mode 100644 index 00000000..20449c94 --- /dev/null +++ b/images/nav-icons/field-value.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/images/nav-icons/field-variable.svg b/images/nav-icons/field-variable.svg new file mode 100644 index 00000000..3b074500 --- /dev/null +++ b/images/nav-icons/field-variable.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/images/nav-icons/function.svg b/images/nav-icons/function.svg new file mode 100644 index 00000000..f0da64a0 --- /dev/null +++ b/images/nav-icons/function.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/images/nav-icons/interface-kotlin.svg b/images/nav-icons/interface-kotlin.svg new file mode 100644 index 00000000..bf07a148 --- /dev/null +++ b/images/nav-icons/interface-kotlin.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/images/nav-icons/interface.svg b/images/nav-icons/interface.svg new file mode 100644 index 00000000..32063ba2 --- /dev/null +++ b/images/nav-icons/interface.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/images/nav-icons/object.svg b/images/nav-icons/object.svg new file mode 100644 index 00000000..9f427de4 --- /dev/null +++ b/images/nav-icons/object.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/images/theme-toggle.svg b/images/theme-toggle.svg new file mode 100644 index 00000000..2a8d750e --- /dev/null +++ b/images/theme-toggle.svg @@ -0,0 +1,4 @@ + + + \ No newline at end of file diff --git a/index.html b/index.html new file mode 100644 index 00000000..19425ac5 --- /dev/null +++ b/index.html @@ -0,0 +1,91 @@ + + + + + All modules + + + + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

All modules:

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
+
+
+
+ +
+
+ + diff --git a/navigation.html b/navigation.html new file mode 100644 index 00000000..ed961ce8 --- /dev/null +++ b/navigation.html @@ -0,0 +1,967 @@ +
+
+ +
+ +
+
+ Audio +
+
+
+
+ Chat +
+
+
+
+ Closeable +
+
+
+ +
+
+
+ Edits +
+
+
+ +
+
+
+ Files +
+
+
+
+ FineTunes +
+
+
+ +
+
+
+ Images +
+
+
+ +
+
+
+ Models +
+
+
+ +
+
+
+ OpenAI +
+
+
+
+ OpenAI() +
+
+
+ +
+
+ +
+
+ Companion +
+
+
+
+ +
+
+ Http +
+
+
+
+ Socks +
+
+
+
+ +
+
+ +
+
+ +
+ +
+ +
+ +
+ +
+
+ +
+
+
+ OpenAIDsl +
+
+
+ +
+ +
+ +
+
+
+ ChatChunk +
+
+
+ +
+ + + + + + + +
+
+ ChatDelta +
+
+
+ +
+
+ +
+ + +
+
+ ChatRole +
+
+
+ Companion +
+
+
+
+ +
+
+ +
+
+ Companion +
+
+
+
+ Default +
+
+
+
+ Named +
+
+
+ +
+ +
+
+ Companion +
+
+ +
+
+ +
+ +
+ +
+
+ +
+
+ Companion +
+
+
+
+ +
+
+ +
+
+ +
+
+
+ Status +
+
+
+ Companion +
+
+
+
+
+ Usage +
+
+
+
+ +
+
+ Edit +
+
+
+ +
+
+ +
+ +
+ + +
+ +
+
+ File +
+
+
+
+ FileId +
+
+
+ +
+
+ +
+ +
+ +
+
+ +
+
+ +
+ +
+
+ Purpose +
+
+
+
+ +
+
+ FineTune +
+
+
+ +
+
+ +
+
+ +
+ + +
+ +
+
+
+ +
+
+ ErrorInfo +
+
+
+ +
+
+ +
+ + + + +
+ +
+
+ NEpochs +
+
+
+ Companion +
+
+
+
+
+
+ Level +
+
+
+ Companion +
+
+
+
+ +
+ +
+ +
+
+ +
+ +
+
+ ImageEdit +
+
+
+ +
+ +
+
+ ImageJSON +
+
+
+
+ ImageSize +
+
+
+ Companion +
+
+
+
+
+ ImageURL +
+
+
+ +
+ + +
+ +
+ +
+
+ Logger +
+
+
+ Default +
+
+
+
+ Simple +
+
+
+
+ Empty +
+
+
+
+
+ LogLevel +
+
+
+ All +
+
+
+
+ Headers +
+
+
+
+ Body +
+
+
+
+ Info +
+
+
+
+ None +
+
+
+
+
+ +
+
+ Model +
+
+
+
+ ModelId +
+
+
+ +
+
+ +
+
diff --git a/openai-client/com.aallam.openai.client.extension/distance.html b/openai-client/com.aallam.openai.client.extension/distance.html new file mode 100644 index 00000000..159dbd5a --- /dev/null +++ b/openai-client/com.aallam.openai.client.extension/distance.html @@ -0,0 +1,66 @@ + + + + + distance + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

distance

+
+

Calculate the distance between two embeddings, corresponding to 1.0 - similarity.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client.extension/index.html b/openai-client/com.aallam.openai.client.extension/index.html new file mode 100644 index 00000000..fa2d47bf --- /dev/null +++ b/openai-client/com.aallam.openai.client.extension/index.html @@ -0,0 +1,104 @@ + + + + + com.aallam.openai.client.extension + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Calculate the distance between two embeddings, corresponding to 1.0 - similarity.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Compute the similarity of two embeddings using cosine similarity.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client.extension/similarity.html b/openai-client/com.aallam.openai.client.extension/similarity.html new file mode 100644 index 00000000..5e24d903 --- /dev/null +++ b/openai-client/com.aallam.openai.client.extension/similarity.html @@ -0,0 +1,66 @@ + + + + + similarity + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

similarity

+
+

Compute the similarity of two embeddings using cosine similarity.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-audio/index.html b/openai-client/com.aallam.openai.client/-audio/index.html new file mode 100644 index 00000000..4514d97f --- /dev/null +++ b/openai-client/com.aallam.openai.client/-audio/index.html @@ -0,0 +1,105 @@ + + + + + Audio + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Audio

+
interface Audio

Learn how to turn audio into text.

Inheritors

+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun transcription(request: TranscriptionRequest): Transcription

Transcribes audio into the input language.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun translation(request: TranslationRequest): Translation

Translates audio into English.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-audio/transcription.html b/openai-client/com.aallam.openai.client/-audio/transcription.html new file mode 100644 index 00000000..1275b91b --- /dev/null +++ b/openai-client/com.aallam.openai.client/-audio/transcription.html @@ -0,0 +1,66 @@ + + + + + transcription + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

transcription

+
+
abstract suspend fun transcription(request: TranscriptionRequest): Transcription

Transcribes audio into the input language.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-audio/translation.html b/openai-client/com.aallam.openai.client/-audio/translation.html new file mode 100644 index 00000000..7549d587 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-audio/translation.html @@ -0,0 +1,66 @@ + + + + + translation + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

translation

+
+
abstract suspend fun translation(request: TranslationRequest): Translation

Translates audio into English.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-chat/chat-completion.html b/openai-client/com.aallam.openai.client/-chat/chat-completion.html new file mode 100644 index 00000000..62e1480a --- /dev/null +++ b/openai-client/com.aallam.openai.client/-chat/chat-completion.html @@ -0,0 +1,66 @@ + + + + + chatCompletion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

chatCompletion

+
+
abstract suspend fun chatCompletion(request: ChatCompletionRequest): ChatCompletion

Creates a completion for the chat message.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-chat/chat-completions.html b/openai-client/com.aallam.openai.client/-chat/chat-completions.html new file mode 100644 index 00000000..5f53216c --- /dev/null +++ b/openai-client/com.aallam.openai.client/-chat/chat-completions.html @@ -0,0 +1,66 @@ + + + + + chatCompletions + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

chatCompletions

+
+

Stream variant of chatCompletion.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-chat/index.html b/openai-client/com.aallam.openai.client/-chat/index.html new file mode 100644 index 00000000..ba9f4334 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-chat/index.html @@ -0,0 +1,105 @@ + + + + + Chat + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Chat

+
interface Chat

Given a chat conversation, the model will return a chat completion response.

Inheritors

+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun chatCompletion(request: ChatCompletionRequest): ChatCompletion

Creates a completion for the chat message.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Stream variant of chatCompletion.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-closeable/close.html b/openai-client/com.aallam.openai.client/-closeable/close.html new file mode 100644 index 00000000..8dc77545 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-closeable/close.html @@ -0,0 +1,70 @@ + + + + + close + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

close

+
+
+
+
expect abstract fun close()

Closes underlying resources

actual abstract fun close()
actual abstract fun close()
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-closeable/index.html b/openai-client/com.aallam.openai.client/-closeable/index.html new file mode 100644 index 00000000..33f67925 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-closeable/index.html @@ -0,0 +1,97 @@ + + + + + Closeable + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Closeable

+
+
+
expect interface Closeable

Defines a closeable resource. This will be replaced by AutoCloseable once it becomes stable.

Inheritors

actual interface Closeable
actual typealias Closeable = AutoCloseable
actual interface Closeable
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
+
+
expect abstract fun close()

Closes underlying resources

actual abstract fun close()
actual abstract fun close()
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-completions/completion.html b/openai-client/com.aallam.openai.client/-completions/completion.html new file mode 100644 index 00000000..a0943005 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-completions/completion.html @@ -0,0 +1,66 @@ + + + + + completion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

completion

+
+
abstract suspend fun completion(request: CompletionRequest): TextCompletion

This is the main endpoint of the API. Returns the predicted completion for the given prompt, and can also return the probabilities of alternative tokens at each position if requested.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-completions/completions.html b/openai-client/com.aallam.openai.client/-completions/completions.html new file mode 100644 index 00000000..0a32b67b --- /dev/null +++ b/openai-client/com.aallam.openai.client/-completions/completions.html @@ -0,0 +1,66 @@ + + + + + completions + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

completions

+
+
abstract fun completions(request: CompletionRequest): Flow<TextCompletion>

Stream variant of completion.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-completions/index.html b/openai-client/com.aallam.openai.client/-completions/index.html new file mode 100644 index 00000000..d067abc7 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-completions/index.html @@ -0,0 +1,105 @@ + + + + + Completions + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Completions

+
interface Completions

Given a prompt, the model will return one or more predicted completions, and can also return the probabilities of alternative tokens at each position.

Inheritors

+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun completion(request: CompletionRequest): TextCompletion

This is the main endpoint of the API. Returns the predicted completion for the given prompt, and can also return the probabilities of alternative tokens at each position if requested.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract fun completions(request: CompletionRequest): Flow<TextCompletion>

Stream variant of completion.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-edits/edit.html b/openai-client/com.aallam.openai.client/-edits/edit.html new file mode 100644 index 00000000..9729b954 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-edits/edit.html @@ -0,0 +1,66 @@ + + + + + edit + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

edit

+
+
abstract suspend fun edit(request: EditsRequest): Edit

Deprecated

Edits is deprecated. Chat completions is the recommend replacement.

Creates a new edit for the provided input, instruction, and parameters.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-edits/index.html b/openai-client/com.aallam.openai.client/-edits/index.html new file mode 100644 index 00000000..0b9e7a03 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-edits/index.html @@ -0,0 +1,90 @@ + + + + + Edits + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Edits

+
interface Edits

Given a prompt and an instruction, the model will return an edited version of the prompt.

Inheritors

+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun edit(request: EditsRequest): Edit

Creates a new edit for the provided input, instruction, and parameters.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-embeddings/embeddings.html b/openai-client/com.aallam.openai.client/-embeddings/embeddings.html new file mode 100644 index 00000000..04b5eb2b --- /dev/null +++ b/openai-client/com.aallam.openai.client/-embeddings/embeddings.html @@ -0,0 +1,66 @@ + + + + + embeddings + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

embeddings

+
+
abstract suspend fun embeddings(request: EmbeddingRequest): EmbeddingResponse

Creates an embedding vector representing the input text.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-embeddings/index.html b/openai-client/com.aallam.openai.client/-embeddings/index.html new file mode 100644 index 00000000..c801b5f8 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-embeddings/index.html @@ -0,0 +1,90 @@ + + + + + Embeddings + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Embeddings

+
interface Embeddings

Get a vector representation of a given input that can be easily consumed by machine learning models and algorithms.

Inheritors

+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun embeddings(request: EmbeddingRequest): EmbeddingResponse

Creates an embedding vector representing the input text.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-files/delete.html b/openai-client/com.aallam.openai.client/-files/delete.html new file mode 100644 index 00000000..d1a4d73c --- /dev/null +++ b/openai-client/com.aallam.openai.client/-files/delete.html @@ -0,0 +1,66 @@ + + + + + delete + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

delete

+
+
abstract suspend fun delete(fileId: FileId): Boolean

Delete a file. Only owners of organizations can delete files currently.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-files/download.html b/openai-client/com.aallam.openai.client/-files/download.html new file mode 100644 index 00000000..8ac5d71e --- /dev/null +++ b/openai-client/com.aallam.openai.client/-files/download.html @@ -0,0 +1,66 @@ + + + + + download + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

download

+
+
abstract suspend fun download(fileId: FileId): ByteArray

Returns the contents of the specified fileId.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-files/file.html b/openai-client/com.aallam.openai.client/-files/file.html new file mode 100644 index 00000000..0b5ca9d6 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-files/file.html @@ -0,0 +1,66 @@ + + + + + file + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

file

+
+
abstract suspend fun file(request: FileUpload): File

Upload a file that contains document(s) to be used across various endpoints/features. Currently, the size of all the files uploaded by one organization can be up to 1 GB.


abstract suspend fun file(fileId: FileId): File?

Returns information about a specific file.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-files/files.html b/openai-client/com.aallam.openai.client/-files/files.html new file mode 100644 index 00000000..45e00ba4 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-files/files.html @@ -0,0 +1,66 @@ + + + + + files + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

files

+
+
abstract suspend fun files(): List<File>

Returns a list of files that belong to the user's organization.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-files/index.html b/openai-client/com.aallam.openai.client/-files/index.html new file mode 100644 index 00000000..6437905e --- /dev/null +++ b/openai-client/com.aallam.openai.client/-files/index.html @@ -0,0 +1,135 @@ + + + + + Files + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Files

+
interface Files

Files are used to upload documents that can be used across features like Answers, Searches, and Classifications

Inheritors

+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun delete(fileId: FileId): Boolean

Delete a file. Only owners of organizations can delete files currently.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun download(fileId: FileId): ByteArray

Returns the contents of the specified fileId.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun file(fileId: FileId): File?

Returns information about a specific file.

abstract suspend fun file(request: FileUpload): File

Upload a file that contains document(s) to be used across various endpoints/features. Currently, the size of all the files uploaded by one organization can be up to 1 GB.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun files(): List<File>

Returns a list of files that belong to the user's organization.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-fine-tunes/cancel.html b/openai-client/com.aallam.openai.client/-fine-tunes/cancel.html new file mode 100644 index 00000000..dec718bf --- /dev/null +++ b/openai-client/com.aallam.openai.client/-fine-tunes/cancel.html @@ -0,0 +1,66 @@ + + + + + cancel + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

cancel

+
+
abstract suspend fun cancel(fineTuneId: FineTuneId): FineTune?

Deprecated

Use FineTuning instead.

Immediately cancel a fine-tune job.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-fine-tunes/delete.html b/openai-client/com.aallam.openai.client/-fine-tunes/delete.html new file mode 100644 index 00000000..fe10d000 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-fine-tunes/delete.html @@ -0,0 +1,66 @@ + + + + + delete + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

delete

+
+
abstract suspend fun delete(fineTuneModel: ModelId): Boolean

Deprecated

Use FineTuning instead.

Delete a fine-tuned model. You must have the Owner role in your organization.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-fine-tunes/fine-tune-events-flow.html b/openai-client/com.aallam.openai.client/-fine-tunes/fine-tune-events-flow.html new file mode 100644 index 00000000..1b80e00d --- /dev/null +++ b/openai-client/com.aallam.openai.client/-fine-tunes/fine-tune-events-flow.html @@ -0,0 +1,66 @@ + + + + + fineTuneEventsFlow + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fineTuneEventsFlow

+
+
abstract fun fineTuneEventsFlow(fineTuneId: FineTuneId): Flow<FineTuneEvent>

Deprecated

Use FineTuning instead.

Get fine-grained status updates for fine-tune job.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-fine-tunes/fine-tune-events.html b/openai-client/com.aallam.openai.client/-fine-tunes/fine-tune-events.html new file mode 100644 index 00000000..5cca529b --- /dev/null +++ b/openai-client/com.aallam.openai.client/-fine-tunes/fine-tune-events.html @@ -0,0 +1,66 @@ + + + + + fineTuneEvents + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fineTuneEvents

+
+
abstract suspend fun fineTuneEvents(fineTuneId: FineTuneId): List<FineTuneEvent>

Deprecated

Use FineTuning instead.

Get fine-grained status updates for fine-tune job.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-fine-tunes/fine-tune.html b/openai-client/com.aallam.openai.client/-fine-tunes/fine-tune.html new file mode 100644 index 00000000..8e7e6f16 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-fine-tunes/fine-tune.html @@ -0,0 +1,66 @@ + + + + + fineTune + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fineTune

+
+
abstract suspend fun fineTune(request: FineTuneRequest): FineTune

Deprecated

Use FineTuning instead.

Creates a job that fine-tunes a specified model from a given dataset.

Response includes details of the enqueued job including job status and the name of the fine-tuned models once complete.


abstract suspend fun fineTune(fineTuneId: FineTuneId): FineTune?

Deprecated

Use FineTuning instead.

Gets info about to fine-tune job.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-fine-tunes/fine-tunes.html b/openai-client/com.aallam.openai.client/-fine-tunes/fine-tunes.html new file mode 100644 index 00000000..bbe7d4c7 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-fine-tunes/fine-tunes.html @@ -0,0 +1,66 @@ + + + + + fineTunes + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fineTunes

+
+
abstract suspend fun fineTunes(): List<FineTune>

Deprecated

Use FineTuning instead.

List your organization's fine-tuning jobs.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-fine-tunes/index.html b/openai-client/com.aallam.openai.client/-fine-tunes/index.html new file mode 100644 index 00000000..5c44c516 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-fine-tunes/index.html @@ -0,0 +1,165 @@ + + + + + FineTunes + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTunes

+
interface FineTunes

Manage fine-tuning jobs to tailor a model to your specific training data.

Inheritors

+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun cancel(fineTuneId: FineTuneId): FineTune?

Immediately cancel a fine-tune job.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun delete(fineTuneModel: ModelId): Boolean

Delete a fine-tuned model. You must have the Owner role in your organization.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun fineTune(fineTuneId: FineTuneId): FineTune?

Gets info about to fine-tune job.

abstract suspend fun fineTune(request: FineTuneRequest): FineTune

Creates a job that fine-tunes a specified model from a given dataset.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun fineTuneEvents(fineTuneId: FineTuneId): List<FineTuneEvent>

Get fine-grained status updates for fine-tune job.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract fun fineTuneEventsFlow(fineTuneId: FineTuneId): Flow<FineTuneEvent>

Get fine-grained status updates for fine-tune job.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun fineTunes(): List<FineTune>

List your organization's fine-tuning jobs.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-fine-tuning/cancel.html b/openai-client/com.aallam.openai.client/-fine-tuning/cancel.html new file mode 100644 index 00000000..6bd47830 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-fine-tuning/cancel.html @@ -0,0 +1,66 @@ + + + + + cancel + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

cancel

+
+
abstract suspend fun cancel(id: FineTuningId): FineTuningJob?

Immediately cancel a fine-tune job.

Parameters

id

The ID of the fine-tuning job to cancel.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-fine-tuning/fine-tuning-events.html b/openai-client/com.aallam.openai.client/-fine-tuning/fine-tuning-events.html new file mode 100644 index 00000000..5583ad70 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-fine-tuning/fine-tuning-events.html @@ -0,0 +1,66 @@ + + + + + fineTuningEvents + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fineTuningEvents

+
+
abstract suspend fun fineTuningEvents(id: FineTuningId, after: String? = null, limit: Int? = null): PaginatedList<FineTuningJobEvent>

Get status updates for a fine-tuning job.

Parameters

id

The ID of the fine-tuning job to get events for.

after

Identifier for the last event from the previous pagination request.

limit

Number of events to retrieve.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-fine-tuning/fine-tuning-job.html b/openai-client/com.aallam.openai.client/-fine-tuning/fine-tuning-job.html new file mode 100644 index 00000000..8af8177e --- /dev/null +++ b/openai-client/com.aallam.openai.client/-fine-tuning/fine-tuning-job.html @@ -0,0 +1,66 @@ + + + + + fineTuningJob + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fineTuningJob

+
+
abstract suspend fun fineTuningJob(request: FineTuningRequest): FineTuningJob

Creates a job that fine-tunes a specified model from a given dataset.

Response includes details of the enqueued job including job status and the name of the fine-tuned models once complete.


abstract suspend fun fineTuningJob(id: FineTuningId): FineTuningJob?

Get info about a fine-tuning job.

Parameters

id

The ID of the fine-tuning job.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-fine-tuning/fine-tuning-jobs.html b/openai-client/com.aallam.openai.client/-fine-tuning/fine-tuning-jobs.html new file mode 100644 index 00000000..2d3e48e8 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-fine-tuning/fine-tuning-jobs.html @@ -0,0 +1,66 @@ + + + + + fineTuningJobs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fineTuningJobs

+
+
abstract suspend fun fineTuningJobs(after: String? = null, limit: Int? = null): List<FineTuningJob>

List your organization's fine-tuning jobs.

Parameters

after

Identifier for the last job from the previous pagination request.

limit

Number of fine-tuning jobs to retrieve.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-fine-tuning/index.html b/openai-client/com.aallam.openai.client/-fine-tuning/index.html new file mode 100644 index 00000000..d1df83f8 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-fine-tuning/index.html @@ -0,0 +1,135 @@ + + + + + FineTuning + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuning

+
interface FineTuning

Manage fine-tuning jobs to tailor a model to your specific training data.

Inheritors

+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun cancel(id: FineTuningId): FineTuningJob?

Immediately cancel a fine-tune job.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun fineTuningEvents(id: FineTuningId, after: String? = null, limit: Int? = null): PaginatedList<FineTuningJobEvent>

Get status updates for a fine-tuning job.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun fineTuningJob(id: FineTuningId): FineTuningJob?

Get info about a fine-tuning job.

abstract suspend fun fineTuningJob(request: FineTuningRequest): FineTuningJob

Creates a job that fine-tunes a specified model from a given dataset.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun fineTuningJobs(after: String? = null, limit: Int? = null): List<FineTuningJob>

List your organization's fine-tuning jobs.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-images/image-j-s-o-n.html b/openai-client/com.aallam.openai.client/-images/image-j-s-o-n.html new file mode 100644 index 00000000..f10cd0a1 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-images/image-j-s-o-n.html @@ -0,0 +1,66 @@ + + + + + imageJSON + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

imageJSON

+
+
abstract suspend fun imageJSON(creation: ImageCreation): List<ImageJSON>

Creates an image given a prompt. Get images as base 64 JSON.


abstract suspend fun imageJSON(edit: ImageEdit): List<ImageJSON>

Creates an edited or extended image given an original image and a prompt. Get images as base 64 JSON.


abstract suspend fun imageJSON(variation: ImageVariation): List<ImageJSON>

Creates a variation of a given image. Get images as base 64 JSON.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-images/image-u-r-l.html b/openai-client/com.aallam.openai.client/-images/image-u-r-l.html new file mode 100644 index 00000000..418ba789 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-images/image-u-r-l.html @@ -0,0 +1,66 @@ + + + + + imageURL + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

imageURL

+
+
abstract suspend fun imageURL(creation: ImageCreation): List<ImageURL>

Creates an image given a prompt. Get images as URLs.


abstract suspend fun imageURL(edit: ImageEdit): List<ImageURL>

Creates an edited or extended image given an original image and a prompt. Get images as URLs.


abstract suspend fun imageURL(variation: ImageVariation): List<ImageURL>

Creates a variation of a given image. Get images as URLs.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-images/index.html b/openai-client/com.aallam.openai.client/-images/index.html new file mode 100644 index 00000000..abc9dbce --- /dev/null +++ b/openai-client/com.aallam.openai.client/-images/index.html @@ -0,0 +1,105 @@ + + + + + Images + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Images

+
interface Images

Given a prompt and/or an input image, the model will generate a new image.

Inheritors

+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun imageJSON(creation: ImageCreation): List<ImageJSON>

Creates an image given a prompt. Get images as base 64 JSON.

abstract suspend fun imageJSON(edit: ImageEdit): List<ImageJSON>

Creates an edited or extended image given an original image and a prompt. Get images as base 64 JSON.

abstract suspend fun imageJSON(variation: ImageVariation): List<ImageJSON>

Creates a variation of a given image. Get images as base 64 JSON.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun imageURL(creation: ImageCreation): List<ImageURL>

Creates an image given a prompt. Get images as URLs.

abstract suspend fun imageURL(edit: ImageEdit): List<ImageURL>

Creates an edited or extended image given an original image and a prompt. Get images as URLs.

abstract suspend fun imageURL(variation: ImageVariation): List<ImageURL>

Creates a variation of a given image. Get images as URLs.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-logging-config/-logging-config.html b/openai-client/com.aallam.openai.client/-logging-config/-logging-config.html new file mode 100644 index 00000000..1224d8aa --- /dev/null +++ b/openai-client/com.aallam.openai.client/-logging-config/-logging-config.html @@ -0,0 +1,66 @@ + + + + + LoggingConfig + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

LoggingConfig

+
+
constructor(logLevel: LogLevel = LogLevel.Headers, logger: Logger = Logger.Simple, sanitize: Boolean = true)
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-logging-config/index.html b/openai-client/com.aallam.openai.client/-logging-config/index.html new file mode 100644 index 00000000..d748a3d7 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-logging-config/index.html @@ -0,0 +1,139 @@ + + + + + LoggingConfig + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

LoggingConfig

+
class LoggingConfig(val logLevel: LogLevel = LogLevel.Headers, val logger: Logger = Logger.Simple, val sanitize: Boolean = true)

Defines the configuration parameters for logging.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(logLevel: LogLevel = LogLevel.Headers, logger: Logger = Logger.Simple, sanitize: Boolean = true)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

the logger instance to be used by the HTTP client.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

the level of logging to be used by the HTTP client.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val sanitize: Boolean = true

flag indicating whether to sanitize sensitive information (i.e., authorization header) in the logs

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-logging-config/log-level.html b/openai-client/com.aallam.openai.client/-logging-config/log-level.html new file mode 100644 index 00000000..814649bd --- /dev/null +++ b/openai-client/com.aallam.openai.client/-logging-config/log-level.html @@ -0,0 +1,66 @@ + + + + + logLevel + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

logLevel

+
+ +
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-logging-config/logger.html b/openai-client/com.aallam.openai.client/-logging-config/logger.html new file mode 100644 index 00000000..f7e1104e --- /dev/null +++ b/openai-client/com.aallam.openai.client/-logging-config/logger.html @@ -0,0 +1,66 @@ + + + + + logger + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

logger

+
+ +
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-logging-config/sanitize.html b/openai-client/com.aallam.openai.client/-logging-config/sanitize.html new file mode 100644 index 00000000..5f629bc9 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-logging-config/sanitize.html @@ -0,0 +1,66 @@ + + + + + sanitize + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

sanitize

+
+
val sanitize: Boolean = true
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-models/index.html b/openai-client/com.aallam.openai.client/-models/index.html new file mode 100644 index 00000000..4c10330a --- /dev/null +++ b/openai-client/com.aallam.openai.client/-models/index.html @@ -0,0 +1,105 @@ + + + + + Models + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Models

+
interface Models

List and describe the various models available in the API. You can refer to the Models documentation to understand what models are available and the differences between them.

Inheritors

+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun model(modelId: ModelId): Model

Retrieves a model instance, providing basic information about the model such as the owner and permissioning.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun models(): List<Model>

Lists the currently available models, and provides basic information about each one such as the owner and availability.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-models/model.html b/openai-client/com.aallam.openai.client/-models/model.html new file mode 100644 index 00000000..1168af51 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-models/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+
abstract suspend fun model(modelId: ModelId): Model

Retrieves a model instance, providing basic information about the model such as the owner and permissioning.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-models/models.html b/openai-client/com.aallam.openai.client/-models/models.html new file mode 100644 index 00000000..76575b2c --- /dev/null +++ b/openai-client/com.aallam.openai.client/-models/models.html @@ -0,0 +1,66 @@ + + + + + models + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

models

+
+
abstract suspend fun models(): List<Model>

Lists the currently available models, and provides basic information about each one such as the owner and availability.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-moderations/index.html b/openai-client/com.aallam.openai.client/-moderations/index.html new file mode 100644 index 00000000..5316f5ed --- /dev/null +++ b/openai-client/com.aallam.openai.client/-moderations/index.html @@ -0,0 +1,90 @@ + + + + + Moderations + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Moderations

+
interface Moderations

Given an input text, outputs if the model classifies it as violating OpenAI's content policy.

Inheritors

+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun moderations(request: ModerationRequest): TextModeration

Classifies if text violates OpenAI's Content Policy.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-moderations/moderations.html b/openai-client/com.aallam.openai.client/-moderations/moderations.html new file mode 100644 index 00000000..153bddc4 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-moderations/moderations.html @@ -0,0 +1,66 @@ + + + + + moderations + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

moderations

+
+
abstract suspend fun moderations(request: ModerationRequest): TextModeration

Classifies if text violates OpenAI's Content Policy.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-config/-open-a-i-config.html b/openai-client/com.aallam.openai.client/-open-a-i-config/-open-a-i-config.html new file mode 100644 index 00000000..66634ac4 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-config/-open-a-i-config.html @@ -0,0 +1,66 @@ + + + + + OpenAIConfig + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIConfig

+
+
constructor(token: String, logging: LoggingConfig = LoggingConfig(), timeout: Timeout = Timeout(socket = 30.seconds), organization: String? = null, headers: Map<String, String> = emptyMap(), host: OpenAIHost = OpenAIHost.OpenAI, proxy: ProxyConfig? = null, retry: RetryStrategy = RetryStrategy(), httpClientConfig: HttpClientConfig<*>.() -> Unit = {})

Parameters

token

OpenAI Token

logging

client logging configuration

timeout

http client timeout

headers

extra http headers

organization

OpenAI organization ID

host

OpenAI host configuration.

proxy

HTTP proxy url

retry

rate limit retry configuration

httpClientConfig

additional custom client configuration


constructor(token: String, logLevel: LogLevel = LogLevel.Headers, logger: Logger = Logger.Simple, timeout: Timeout = Timeout(socket = 30.seconds), organization: String? = null, headers: Map<String, String> = emptyMap(), host: OpenAIHost = OpenAIHost.OpenAI, proxy: ProxyConfig? = null, retry: RetryStrategy = RetryStrategy(), httpClientConfig: HttpClientConfig<*>.() -> Unit = {})

Deprecated

Use primary constructor with LoggingConfig instead.

Replace with

import com.aallam.openai.api.logging.Logger
import com.openai.config.LoggingConfig
OpenAIConfig(token, LoggingConfig(logLevel, logger), timeout, organization, headers, host, proxy, retry)
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-config/headers.html b/openai-client/com.aallam.openai.client/-open-a-i-config/headers.html new file mode 100644 index 00000000..0550c45c --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-config/headers.html @@ -0,0 +1,66 @@ + + + + + headers + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

headers

+
+

Parameters

headers

extra http headers

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-config/host.html b/openai-client/com.aallam.openai.client/-open-a-i-config/host.html new file mode 100644 index 00000000..69dedd85 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-config/host.html @@ -0,0 +1,66 @@ + + + + + host + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

host

+
+

Parameters

host

OpenAI host configuration

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-config/http-client-config.html b/openai-client/com.aallam.openai.client/-open-a-i-config/http-client-config.html new file mode 100644 index 00000000..51992538 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-config/http-client-config.html @@ -0,0 +1,66 @@ + + + + + httpClientConfig + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

httpClientConfig

+
+
val httpClientConfig: HttpClientConfig<*>.() -> Unit

Parameters

httpClientConfig

additional custom client configuration

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-config/index.html b/openai-client/com.aallam.openai.client/-open-a-i-config/index.html new file mode 100644 index 00000000..11ddd078 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-config/index.html @@ -0,0 +1,229 @@ + + + + + OpenAIConfig + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIConfig

+
class OpenAIConfig(val token: String, val logging: LoggingConfig = LoggingConfig(), val timeout: Timeout = Timeout(socket = 30.seconds), val organization: String? = null, val headers: Map<String, String> = emptyMap(), val host: OpenAIHost = OpenAIHost.OpenAI, val proxy: ProxyConfig? = null, val retry: RetryStrategy = RetryStrategy(), val httpClientConfig: HttpClientConfig<*>.() -> Unit = {})

OpenAI client configuration.

Parameters

token

OpenAI Token

logging

client logging configuration

timeout

http client timeout

headers

extra http headers

organization

OpenAI organization ID

host

OpenAI host configuration.

proxy

HTTP proxy url

retry

rate limit retry configuration

httpClientConfig

additional custom client configuration

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(token: String, logLevel: LogLevel = LogLevel.Headers, logger: Logger = Logger.Simple, timeout: Timeout = Timeout(socket = 30.seconds), organization: String? = null, headers: Map<String, String> = emptyMap(), host: OpenAIHost = OpenAIHost.OpenAI, proxy: ProxyConfig? = null, retry: RetryStrategy = RetryStrategy(), httpClientConfig: HttpClientConfig<*>.() -> Unit = {})
constructor(token: String, logging: LoggingConfig = LoggingConfig(), timeout: Timeout = Timeout(socket = 30.seconds), organization: String? = null, headers: Map<String, String> = emptyMap(), host: OpenAIHost = OpenAIHost.OpenAI, proxy: ProxyConfig? = null, retry: RetryStrategy = RetryStrategy(), httpClientConfig: HttpClientConfig<*>.() -> Unit = {})
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val httpClientConfig: HttpClientConfig<*>.() -> Unit
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val organization: String? = null
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val proxy: ProxyConfig? = null
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-config/logging.html b/openai-client/com.aallam.openai.client/-open-a-i-config/logging.html new file mode 100644 index 00000000..827c07ee --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-config/logging.html @@ -0,0 +1,66 @@ + + + + + logging + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

logging

+
+

Parameters

logging

client logging configuration

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-config/organization.html b/openai-client/com.aallam.openai.client/-open-a-i-config/organization.html new file mode 100644 index 00000000..4231e2fb --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-config/organization.html @@ -0,0 +1,66 @@ + + + + + organization + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

organization

+
+
val organization: String? = null

Parameters

organization

OpenAI organization ID

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-config/proxy.html b/openai-client/com.aallam.openai.client/-open-a-i-config/proxy.html new file mode 100644 index 00000000..0d470b40 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-config/proxy.html @@ -0,0 +1,66 @@ + + + + + proxy + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

proxy

+
+
val proxy: ProxyConfig? = null

Parameters

proxy

HTTP proxy url

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-config/retry.html b/openai-client/com.aallam.openai.client/-open-a-i-config/retry.html new file mode 100644 index 00000000..14707dce --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-config/retry.html @@ -0,0 +1,66 @@ + + + + + retry + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

retry

+
+

Parameters

retry

rate limit retry configuration

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-config/timeout.html b/openai-client/com.aallam.openai.client/-open-a-i-config/timeout.html new file mode 100644 index 00000000..8ea4d617 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-config/timeout.html @@ -0,0 +1,66 @@ + + + + + timeout + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

timeout

+
+

Parameters

timeout

http client timeout

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-config/token.html b/openai-client/com.aallam.openai.client/-open-a-i-config/token.html new file mode 100644 index 00000000..fafbe73b --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-config/token.html @@ -0,0 +1,66 @@ + + + + + token + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

token

+
+

Parameters

token

OpenAI Token

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-host/-companion/-open-a-i.html b/openai-client/com.aallam.openai.client/-open-a-i-host/-companion/-open-a-i.html new file mode 100644 index 00000000..0aa5d366 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-host/-companion/-open-a-i.html @@ -0,0 +1,66 @@ + + + + + OpenAI + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAI

+
+

A pre-configured instance of OpenAIHost with the base URL set as https://api.openai.com/v1/.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-host/-companion/azure.html b/openai-client/com.aallam.openai.client/-open-a-i-host/-companion/azure.html new file mode 100644 index 00000000..d45a6bd6 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-host/-companion/azure.html @@ -0,0 +1,66 @@ + + + + + azure + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

azure

+
+
fun azure(resourceName: String, deploymentId: String, apiVersion: String): OpenAIHost

Creates an instance of OpenAIHost configured for Azure hosting with the given resource name, deployment ID, and API version.

Parameters

resourceName

The name of your Azure OpenAI Resource.

deploymentId

The name of your model deployment.

apiVersion

The API version to use for this operation. This parameter should follow the YYYY-MM-DD format.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-host/-companion/index.html b/openai-client/com.aallam.openai.client/-open-a-i-host/-companion/index.html new file mode 100644 index 00000000..5695311a --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-host/-companion/index.html @@ -0,0 +1,109 @@ + + + + + Companion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Companion

+
object Companion
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

A pre-configured instance of OpenAIHost with the base URL set as https://api.openai.com/v1/.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
fun azure(resourceName: String, deploymentId: String, apiVersion: String): OpenAIHost

Creates an instance of OpenAIHost configured for Azure hosting with the given resource name, deployment ID, and API version.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-host/-open-a-i-host.html b/openai-client/com.aallam.openai.client/-open-a-i-host/-open-a-i-host.html new file mode 100644 index 00000000..61c1ef4d --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-host/-open-a-i-host.html @@ -0,0 +1,66 @@ + + + + + OpenAIHost + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIHost

+
+
constructor(baseUrl: String, queryParams: Map<String, String> = emptyMap())
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-host/base-url.html b/openai-client/com.aallam.openai.client/-open-a-i-host/base-url.html new file mode 100644 index 00000000..28e437bb --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-host/base-url.html @@ -0,0 +1,66 @@ + + + + + baseUrl + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

baseUrl

+
+

Base URL configuration. This is the root URL that will be used for all API requests to OpenAI. The URL can include a base path, but in that case, the base path should always end with a /. For example, a valid base URL would be "https://api.openai.com/v1/"

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-host/index.html b/openai-client/com.aallam.openai.client/-open-a-i-host/index.html new file mode 100644 index 00000000..02e644d9 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-host/index.html @@ -0,0 +1,143 @@ + + + + + OpenAIHost + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIHost

+
class OpenAIHost(val baseUrl: String, val queryParams: Map<String, String> = emptyMap())

A class to configure the OpenAI host. It provides a mechanism to customize the base URL and additional query parameters used in OpenAI API requests.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(baseUrl: String, queryParams: Map<String, String> = emptyMap())
+
+
+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
object Companion
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Base URL configuration. This is the root URL that will be used for all API requests to OpenAI. The URL can include a base path, but in that case, the base path should always end with a /. For example, a valid base URL would be "https://api.openai.com/v1/"

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Additional query parameters to be appended to all API requests to OpenAI. These can be used to provide additional configuration or context for the API requests.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i-host/query-params.html b/openai-client/com.aallam.openai.client/-open-a-i-host/query-params.html new file mode 100644 index 00000000..b4af6e29 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i-host/query-params.html @@ -0,0 +1,66 @@ + + + + + queryParams + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

queryParams

+
+

Additional query parameters to be appended to all API requests to OpenAI. These can be used to provide additional configuration or context for the API requests.

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i.html b/openai-client/com.aallam.openai.client/-open-a-i.html new file mode 100644 index 00000000..413ee573 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i.html @@ -0,0 +1,66 @@ + + + + + OpenAI + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAI

+
+
fun OpenAI(token: String, logging: LoggingConfig = LoggingConfig(), timeout: Timeout = Timeout(socket = 30.seconds), organization: String? = null, headers: Map<String, String> = emptyMap(), host: OpenAIHost = OpenAIHost.OpenAI, proxy: ProxyConfig? = null, retry: RetryStrategy = RetryStrategy(), httpClientConfig: HttpClientConfig<*>.() -> Unit = {}): OpenAI

Creates an instance of OpenAI.

Parameters

token

secret API key

logging

client logging configuration

timeout

http client timeout

headers

extra http headers

organization

OpenAI organization ID

host

OpenAI host configuration.

proxy

HTTP proxy url

retry

rate limit retry configuration

httpClientConfig

additional custom client configuration


fun OpenAI(config: OpenAIConfig): OpenAI

Creates an instance of OpenAI.

Parameters

config

client config

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-open-a-i/index.html b/openai-client/com.aallam.openai.client/-open-a-i/index.html new file mode 100644 index 00000000..0cae57a2 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-open-a-i/index.html @@ -0,0 +1,465 @@ + + + + + OpenAI + + + + + + + + + + + + + + + +
+
+
+
+
+
+ + +
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun cancel(fineTuneId: FineTuneId): FineTune?
abstract suspend fun cancel(id: FineTuningId): FineTuningJob?

Immediately cancel a fine-tune job.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun chatCompletion(request: ChatCompletionRequest): ChatCompletion

Creates a completion for the chat message.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Stream variant of chatCompletion.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
expect abstract fun close()

Closes underlying resources

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun completion(request: CompletionRequest): TextCompletion

This is the main endpoint of the API. Returns the predicted completion for the given prompt, and can also return the probabilities of alternative tokens at each position if requested.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract fun completions(request: CompletionRequest): Flow<TextCompletion>

Stream variant of completion.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun delete(fileId: FileId): Boolean

Delete a file. Only owners of organizations can delete files currently.

abstract suspend fun delete(fineTuneModel: ModelId): Boolean

Delete a fine-tuned model. You must have the Owner role in your organization.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun download(fileId: FileId): ByteArray

Returns the contents of the specified fileId.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun edit(request: EditsRequest): Edit

Creates a new edit for the provided input, instruction, and parameters.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun embeddings(request: EmbeddingRequest): EmbeddingResponse

Creates an embedding vector representing the input text.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun file(fileId: FileId): File?

Returns information about a specific file.

abstract suspend fun file(request: FileUpload): File

Upload a file that contains document(s) to be used across various endpoints/features. Currently, the size of all the files uploaded by one organization can be up to 1 GB.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun files(): List<File>

Returns a list of files that belong to the user's organization.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun fineTune(fineTuneId: FineTuneId): FineTune?

Gets info about to fine-tune job.

abstract suspend fun fineTune(request: FineTuneRequest): FineTune

Creates a job that fine-tunes a specified model from a given dataset.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun fineTuneEvents(fineTuneId: FineTuneId): List<FineTuneEvent>

Get fine-grained status updates for fine-tune job.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract fun fineTuneEventsFlow(fineTuneId: FineTuneId): Flow<FineTuneEvent>

Get fine-grained status updates for fine-tune job.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun fineTunes(): List<FineTune>

List your organization's fine-tuning jobs.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun fineTuningEvents(id: FineTuningId, after: String? = null, limit: Int? = null): PaginatedList<FineTuningJobEvent>

Get status updates for a fine-tuning job.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun fineTuningJob(id: FineTuningId): FineTuningJob?

Get info about a fine-tuning job.

abstract suspend fun fineTuningJob(request: FineTuningRequest): FineTuningJob

Creates a job that fine-tunes a specified model from a given dataset.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun fineTuningJobs(after: String? = null, limit: Int? = null): List<FineTuningJob>

List your organization's fine-tuning jobs.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun imageJSON(creation: ImageCreation): List<ImageJSON>

Creates an image given a prompt. Get images as base 64 JSON.

abstract suspend fun imageJSON(edit: ImageEdit): List<ImageJSON>

Creates an edited or extended image given an original image and a prompt. Get images as base 64 JSON.

abstract suspend fun imageJSON(variation: ImageVariation): List<ImageJSON>

Creates a variation of a given image. Get images as base 64 JSON.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun imageURL(creation: ImageCreation): List<ImageURL>

Creates an image given a prompt. Get images as URLs.

abstract suspend fun imageURL(edit: ImageEdit): List<ImageURL>

Creates an edited or extended image given an original image and a prompt. Get images as URLs.

abstract suspend fun imageURL(variation: ImageVariation): List<ImageURL>

Creates a variation of a given image. Get images as URLs.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun model(modelId: ModelId): Model

Retrieves a model instance, providing basic information about the model such as the owner and permissioning.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun models(): List<Model>

Lists the currently available models, and provides basic information about each one such as the owner and availability.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun moderations(request: ModerationRequest): TextModeration

Classifies if text violates OpenAI's Content Policy.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun transcription(request: TranscriptionRequest): Transcription

Transcribes audio into the input language.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract suspend fun translation(request: TranslationRequest): Translation

Translates audio into English.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-proxy-config/-http/-http.html b/openai-client/com.aallam.openai.client/-proxy-config/-http/-http.html new file mode 100644 index 00000000..2ef80c24 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-proxy-config/-http/-http.html @@ -0,0 +1,66 @@ + + + + + Http + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Http

+
+
constructor(url: String)
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-proxy-config/-http/index.html b/openai-client/com.aallam.openai.client/-proxy-config/-http/index.html new file mode 100644 index 00000000..14fa6c0c --- /dev/null +++ b/openai-client/com.aallam.openai.client/-proxy-config/-http/index.html @@ -0,0 +1,109 @@ + + + + + Http + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Http

+
class Http(val url: String) : ProxyConfig

Creates an HTTP proxy from url.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(url: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val url: String
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-proxy-config/-http/url.html b/openai-client/com.aallam.openai.client/-proxy-config/-http/url.html new file mode 100644 index 00000000..4b7e2c19 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-proxy-config/-http/url.html @@ -0,0 +1,66 @@ + + + + + url + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

url

+
+
val url: String
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-proxy-config/-socks/-socks.html b/openai-client/com.aallam.openai.client/-proxy-config/-socks/-socks.html new file mode 100644 index 00000000..bae8b4a2 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-proxy-config/-socks/-socks.html @@ -0,0 +1,66 @@ + + + + + Socks + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Socks

+
+
constructor(host: String, port: Int)
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-proxy-config/-socks/host.html b/openai-client/com.aallam.openai.client/-proxy-config/-socks/host.html new file mode 100644 index 00000000..7449e134 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-proxy-config/-socks/host.html @@ -0,0 +1,66 @@ + + + + + host + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

host

+
+ +
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-proxy-config/-socks/index.html b/openai-client/com.aallam.openai.client/-proxy-config/-socks/index.html new file mode 100644 index 00000000..89179f4e --- /dev/null +++ b/openai-client/com.aallam.openai.client/-proxy-config/-socks/index.html @@ -0,0 +1,124 @@ + + + + + Socks + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Socks

+
class Socks(val host: String, val port: Int) : ProxyConfig

Create socks proxy from host and port.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(host: String, port: Int)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val port: Int
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-proxy-config/-socks/port.html b/openai-client/com.aallam.openai.client/-proxy-config/-socks/port.html new file mode 100644 index 00000000..d7ee2bce --- /dev/null +++ b/openai-client/com.aallam.openai.client/-proxy-config/-socks/port.html @@ -0,0 +1,66 @@ + + + + + port + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

port

+
+
val port: Int
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-proxy-config/index.html b/openai-client/com.aallam.openai.client/-proxy-config/index.html new file mode 100644 index 00000000..8b066c7c --- /dev/null +++ b/openai-client/com.aallam.openai.client/-proxy-config/index.html @@ -0,0 +1,105 @@ + + + + + ProxyConfig + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ProxyConfig

+
interface ProxyConfig

Proxy configuration.

Inheritors

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
class Http(val url: String) : ProxyConfig

Creates an HTTP proxy from url.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class Socks(val host: String, val port: Int) : ProxyConfig

Create socks proxy from host and port.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-retry-strategy/-retry-strategy.html b/openai-client/com.aallam.openai.client/-retry-strategy/-retry-strategy.html new file mode 100644 index 00000000..103a5490 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-retry-strategy/-retry-strategy.html @@ -0,0 +1,66 @@ + + + + + RetryStrategy + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

RetryStrategy

+
+
constructor(maxRetries: Int = 3, base: Double = 2.0, maxDelay: Duration = 60.seconds)

Parameters

maxRetries

the maximum amount of retries to perform for a request

base

retry base value

maxDelay

max retry delay

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-retry-strategy/base.html b/openai-client/com.aallam.openai.client/-retry-strategy/base.html new file mode 100644 index 00000000..923ab6a2 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-retry-strategy/base.html @@ -0,0 +1,66 @@ + + + + + base + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

base

+
+
val base: Double = 2.0

Parameters

base

retry base value

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-retry-strategy/index.html b/openai-client/com.aallam.openai.client/-retry-strategy/index.html new file mode 100644 index 00000000..eaa914f4 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-retry-strategy/index.html @@ -0,0 +1,139 @@ + + + + + RetryStrategy + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

RetryStrategy

+
class RetryStrategy(val maxRetries: Int = 3, val base: Double = 2.0, val maxDelay: Duration = 60.seconds)

Specifies the retry strategy

Parameters

maxRetries

the maximum amount of retries to perform for a request

base

retry base value

maxDelay

max retry delay

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(maxRetries: Int = 3, base: Double = 2.0, maxDelay: Duration = 60.seconds)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val base: Double = 2.0
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val maxRetries: Int = 3
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-retry-strategy/max-delay.html b/openai-client/com.aallam.openai.client/-retry-strategy/max-delay.html new file mode 100644 index 00000000..593c6312 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-retry-strategy/max-delay.html @@ -0,0 +1,66 @@ + + + + + maxDelay + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

maxDelay

+
+

Parameters

maxDelay

max retry delay

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/-retry-strategy/max-retries.html b/openai-client/com.aallam.openai.client/-retry-strategy/max-retries.html new file mode 100644 index 00000000..9bf2d4a4 --- /dev/null +++ b/openai-client/com.aallam.openai.client/-retry-strategy/max-retries.html @@ -0,0 +1,66 @@ + + + + + maxRetries + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

maxRetries

+
+
val maxRetries: Int = 3

Parameters

maxRetries

the maximum amount of retries to perform for a request

+
+ +
+
+ + + diff --git a/openai-client/com.aallam.openai.client/index.html b/openai-client/com.aallam.openai.client/index.html new file mode 100644 index 00000000..e04e4cc7 --- /dev/null +++ b/openai-client/com.aallam.openai.client/index.html @@ -0,0 +1,368 @@ + + + + + com.aallam.openai.client + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
interface Audio

Learn how to turn audio into text.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
interface Chat

Given a chat conversation, the model will return a chat completion response.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
+
+
expect interface Closeable

Defines a closeable resource. This will be replaced by AutoCloseable once it becomes stable.

actual interface Closeable
actual typealias Closeable = AutoCloseable
actual interface Closeable
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
interface Completions

Given a prompt, the model will return one or more predicted completions, and can also return the probabilities of alternative tokens at each position.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
interface Edits

Given a prompt and an instruction, the model will return an edited version of the prompt.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
interface Embeddings

Get a vector representation of a given input that can be easily consumed by machine learning models and algorithms.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
interface Files

Files are used to upload documents that can be used across features like Answers, Searches, and Classifications

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
interface FineTunes

Manage fine-tuning jobs to tailor a model to your specific training data.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
interface FineTuning

Manage fine-tuning jobs to tailor a model to your specific training data.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
interface Images

Given a prompt and/or an input image, the model will generate a new image.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class LoggingConfig(val logLevel: LogLevel = LogLevel.Headers, val logger: Logger = Logger.Simple, val sanitize: Boolean = true)

Defines the configuration parameters for logging.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
interface Models

List and describe the various models available in the API. You can refer to the Models documentation to understand what models are available and the differences between them.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
interface Moderations

Given an input text, outputs if the model classifies it as violating OpenAI's content policy.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+ +
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class OpenAIConfig(val token: String, val logging: LoggingConfig = LoggingConfig(), val timeout: Timeout = Timeout(socket = 30.seconds), val organization: String? = null, val headers: Map<String, String> = emptyMap(), val host: OpenAIHost = OpenAIHost.OpenAI, val proxy: ProxyConfig? = null, val retry: RetryStrategy = RetryStrategy(), val httpClientConfig: HttpClientConfig<*>.() -> Unit = {})

OpenAI client configuration.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class OpenAIHost(val baseUrl: String, val queryParams: Map<String, String> = emptyMap())

A class to configure the OpenAI host. It provides a mechanism to customize the base URL and additional query parameters used in OpenAI API requests.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
interface ProxyConfig

Proxy configuration.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class RetryStrategy(val maxRetries: Int = 3, val base: Double = 2.0, val maxDelay: Duration = 60.seconds)

Specifies the retry strategy

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
fun OpenAI(config: OpenAIConfig): OpenAI
fun OpenAI(token: String, logging: LoggingConfig = LoggingConfig(), timeout: Timeout = Timeout(socket = 30.seconds), organization: String? = null, headers: Map<String, String> = emptyMap(), host: OpenAIHost = OpenAIHost.OpenAI, proxy: ProxyConfig? = null, retry: RetryStrategy = RetryStrategy(), httpClientConfig: HttpClientConfig<*>.() -> Unit = {}): OpenAI

Creates an instance of OpenAI.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/index.html b/openai-client/index.html new file mode 100644 index 00000000..4cd0af13 --- /dev/null +++ b/openai-client/index.html @@ -0,0 +1,109 @@ + + + + + openai-client + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

openai-client

+
+

Packages

+
+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
js
+ +
native
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-client/navigation.html b/openai-client/navigation.html new file mode 100644 index 00000000..65cf8e7d --- /dev/null +++ b/openai-client/navigation.html @@ -0,0 +1,967 @@ +
+
+ +
+ +
+
+ Audio +
+
+
+
+ Chat +
+
+
+
+ Closeable +
+
+
+ +
+
+
+ Edits +
+
+
+ +
+
+
+ Files +
+
+
+
+ FineTunes +
+
+
+ +
+
+
+ Images +
+
+
+ +
+
+
+ Models +
+
+
+ +
+
+
+ OpenAI +
+
+
+
+ OpenAI() +
+
+
+ +
+
+ +
+
+ Companion +
+
+
+
+ +
+
+ Http +
+
+
+
+ Socks +
+
+
+
+ +
+
+ +
+
+ +
+ +
+ +
+ +
+ +
+
+ +
+
+
+ OpenAIDsl +
+
+
+ +
+ +
+ +
+
+
+ ChatChunk +
+
+
+ +
+ + + + + + + +
+
+ ChatDelta +
+
+
+ +
+
+ +
+ + +
+
+ ChatRole +
+
+
+ Companion +
+
+
+
+ +
+
+ +
+
+ Companion +
+
+
+
+ Default +
+
+
+
+ Named +
+
+
+ +
+ +
+
+ Companion +
+
+ +
+
+ +
+ +
+ +
+
+ +
+
+ Companion +
+
+
+
+ +
+
+ +
+
+ +
+
+
+ Status +
+
+
+ Companion +
+
+
+
+
+ Usage +
+
+
+
+ +
+
+ Edit +
+
+
+ +
+
+ +
+ +
+ + +
+ +
+
+ File +
+
+
+
+ FileId +
+
+
+ +
+
+ +
+ +
+ +
+
+ +
+
+ +
+ +
+
+ Purpose +
+
+
+
+ +
+
+ FineTune +
+
+
+ +
+
+ +
+
+ +
+ + +
+ +
+
+
+ +
+
+ ErrorInfo +
+
+
+ +
+
+ +
+ + + + +
+ +
+
+ NEpochs +
+
+
+ Companion +
+
+
+
+
+
+ Level +
+
+
+ Companion +
+
+
+
+ +
+ +
+ +
+
+ +
+ +
+
+ ImageEdit +
+
+
+ +
+ +
+
+ ImageJSON +
+
+
+
+ ImageSize +
+
+
+ Companion +
+
+
+
+
+ ImageURL +
+
+
+ +
+ + +
+ +
+ +
+
+ Logger +
+
+
+ Default +
+
+
+
+ Simple +
+
+
+
+ Empty +
+
+
+
+
+ LogLevel +
+
+
+ All +
+
+
+
+ Headers +
+
+
+
+ Body +
+
+
+
+ Info +
+
+
+
+ None +
+
+
+
+
+ +
+
+ Model +
+
+
+
+ ModelId +
+
+
+ +
+
+ +
+
diff --git a/openai-core/com.aallam.openai.api.audio/-audio-response-format/-audio-response-format.html b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-audio-response-format.html new file mode 100644 index 00000000..8c0cf966 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-audio-response-format.html @@ -0,0 +1,66 @@ + + + + + AudioResponseFormat + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

AudioResponseFormat

+
+
constructor(value: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-json.html b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-json.html new file mode 100644 index 00000000..e79f3c64 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-json.html @@ -0,0 +1,66 @@ + + + + + Json + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Json

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-srt.html b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-srt.html new file mode 100644 index 00000000..c7aa0389 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-srt.html @@ -0,0 +1,66 @@ + + + + + Srt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Srt

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-text.html b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-text.html new file mode 100644 index 00000000..a2b04a8d --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-text.html @@ -0,0 +1,66 @@ + + + + + Text + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Text

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-verbose-json.html b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-verbose-json.html new file mode 100644 index 00000000..2ea45bc1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-verbose-json.html @@ -0,0 +1,66 @@ + + + + + VerboseJson + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

VerboseJson

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-vtt.html b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-vtt.html new file mode 100644 index 00000000..919bdb72 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/-vtt.html @@ -0,0 +1,66 @@ + + + + + Vtt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Vtt

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/index.html b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/index.html new file mode 100644 index 00000000..1475b3a6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-audio-response-format/-companion/index.html @@ -0,0 +1,150 @@ + + + + + Companion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Companion

+
object Companion
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+ +
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-audio-response-format/index.html b/openai-core/com.aallam.openai.api.audio/-audio-response-format/index.html new file mode 100644 index 00000000..c4a47f49 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-audio-response-format/index.html @@ -0,0 +1,128 @@ + + + + + AudioResponseFormat + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

AudioResponseFormat

+
@Serializable
value class AudioResponseFormat(val value: String)
+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(value: String)
+
+
+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
object Companion
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-audio-response-format/value.html b/openai-core/com.aallam.openai.api.audio/-audio-response-format/value.html new file mode 100644 index 00000000..c0f8e2ca --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-audio-response-format/value.html @@ -0,0 +1,66 @@ + + + + + value + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

value

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-segment/-segment.html b/openai-core/com.aallam.openai.api.audio/-segment/-segment.html new file mode 100644 index 00000000..ada6a42c --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-segment/-segment.html @@ -0,0 +1,66 @@ + + + + + Segment + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Segment

+
+
constructor(id: Int, seek: Int, start: Double, end: Double, text: String, tokens: List<Int>, temperature: Double, avgLogprob: Double, compressionRatio: Double, noSpeechProb: Double, transient: Boolean? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-segment/avg-logprob.html b/openai-core/com.aallam.openai.api.audio/-segment/avg-logprob.html new file mode 100644 index 00000000..bbed131e --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-segment/avg-logprob.html @@ -0,0 +1,66 @@ + + + + + avgLogprob + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

avgLogprob

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-segment/compression-ratio.html b/openai-core/com.aallam.openai.api.audio/-segment/compression-ratio.html new file mode 100644 index 00000000..79ee41eb --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-segment/compression-ratio.html @@ -0,0 +1,66 @@ + + + + + compressionRatio + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

compressionRatio

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-segment/end.html b/openai-core/com.aallam.openai.api.audio/-segment/end.html new file mode 100644 index 00000000..d9898699 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-segment/end.html @@ -0,0 +1,66 @@ + + + + + end + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

end

+
+
val end: Double
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-segment/id.html b/openai-core/com.aallam.openai.api.audio/-segment/id.html new file mode 100644 index 00000000..8be3b562 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-segment/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: Int
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-segment/index.html b/openai-core/com.aallam.openai.api.audio/-segment/index.html new file mode 100644 index 00000000..2ac0e025 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-segment/index.html @@ -0,0 +1,259 @@ + + + + + Segment + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Segment

+
@Serializable
data class Segment(val id: Int, val seek: Int, val start: Double, val end: Double, val text: String, val tokens: List<Int>, val temperature: Double, val avgLogprob: Double, val compressionRatio: Double, val noSpeechProb: Double, val transient: Boolean? = null)
+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: Int, seek: Int, start: Double, end: Double, text: String, tokens: List<Int>, temperature: Double, avgLogprob: Double, compressionRatio: Double, noSpeechProb: Double, transient: Boolean? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val end: Double
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: Int
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val seek: Int
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val transient: Boolean? = null
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-segment/no-speech-prob.html b/openai-core/com.aallam.openai.api.audio/-segment/no-speech-prob.html new file mode 100644 index 00000000..5956d212 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-segment/no-speech-prob.html @@ -0,0 +1,66 @@ + + + + + noSpeechProb + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

noSpeechProb

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-segment/seek.html b/openai-core/com.aallam.openai.api.audio/-segment/seek.html new file mode 100644 index 00000000..c916e2b3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-segment/seek.html @@ -0,0 +1,66 @@ + + + + + seek + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

seek

+
+
val seek: Int
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-segment/start.html b/openai-core/com.aallam.openai.api.audio/-segment/start.html new file mode 100644 index 00000000..d163f538 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-segment/start.html @@ -0,0 +1,66 @@ + + + + + start + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

start

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-segment/temperature.html b/openai-core/com.aallam.openai.api.audio/-segment/temperature.html new file mode 100644 index 00000000..1de4d8e5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-segment/temperature.html @@ -0,0 +1,66 @@ + + + + + temperature + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

temperature

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-segment/text.html b/openai-core/com.aallam.openai.api.audio/-segment/text.html new file mode 100644 index 00000000..61664717 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-segment/text.html @@ -0,0 +1,66 @@ + + + + + text + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

text

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-segment/tokens.html b/openai-core/com.aallam.openai.api.audio/-segment/tokens.html new file mode 100644 index 00000000..65d84318 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-segment/tokens.html @@ -0,0 +1,66 @@ + + + + + tokens + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

tokens

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-segment/transient.html b/openai-core/com.aallam.openai.api.audio/-segment/transient.html new file mode 100644 index 00000000..322f64d2 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-segment/transient.html @@ -0,0 +1,66 @@ + + + + + transient + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

transient

+
+
val transient: Boolean? = null
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/-transcription-request-builder.html b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/-transcription-request-builder.html new file mode 100644 index 00000000..b41fffa7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/-transcription-request-builder.html @@ -0,0 +1,66 @@ + + + + + TranscriptionRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

TranscriptionRequestBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/audio.html b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/audio.html new file mode 100644 index 00000000..4a3bb0fc --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/audio.html @@ -0,0 +1,66 @@ + + + + + audio + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

audio

+
+

The audio file to transcribe, in one of these formats: mp3, mp4, mpeg, mpga, m4a, wav, or webm.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/build.html b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/build.html new file mode 100644 index 00000000..7adbfb4f --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+

Builder of TranscriptionRequest instances.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/index.html b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/index.html new file mode 100644 index 00000000..a385f602 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/index.html @@ -0,0 +1,203 @@ + + + + + TranscriptionRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

TranscriptionRequestBuilder

+ +
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The audio file to transcribe, in one of these formats: mp3, mp4, mpeg, mpga, m4a, wav, or webm.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The language of the input audio. Supplying the input language in ISO-639-1 format will improve accuracy and latency.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

ID of the model to use. Only whisper-1 is currently available.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

An optional text to guide the model's style or continue a previous audio segment. The prompt should match the audio language.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Builder of TranscriptionRequest instances.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/language.html b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/language.html new file mode 100644 index 00000000..12fefe44 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/language.html @@ -0,0 +1,66 @@ + + + + + language + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

language

+
+

The language of the input audio. Supplying the input language in ISO-639-1 format will improve accuracy and latency.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/model.html b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/model.html new file mode 100644 index 00000000..61046c98 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

ID of the model to use. Only whisper-1 is currently available.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/prompt.html b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/prompt.html new file mode 100644 index 00000000..373b763b --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/prompt.html @@ -0,0 +1,66 @@ + + + + + prompt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

prompt

+
+

An optional text to guide the model's style or continue a previous audio segment. The prompt should match the audio language.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/response-format.html b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/response-format.html new file mode 100644 index 00000000..b8b1d197 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/response-format.html @@ -0,0 +1,66 @@ + + + + + responseFormat + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

responseFormat

+
+

The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/temperature.html b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/temperature.html new file mode 100644 index 00000000..026100e8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request-builder/temperature.html @@ -0,0 +1,66 @@ + + + + + temperature + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

temperature

+
+

The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request/-transcription-request.html b/openai-core/com.aallam.openai.api.audio/-transcription-request/-transcription-request.html new file mode 100644 index 00000000..3cbeda7c --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request/-transcription-request.html @@ -0,0 +1,66 @@ + + + + + TranscriptionRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

TranscriptionRequest

+
+
constructor(audio: FileSource, model: ModelId, prompt: String? = null, responseFormat: AudioResponseFormat? = null, temperature: Double? = null, language: String? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request/audio.html b/openai-core/com.aallam.openai.api.audio/-transcription-request/audio.html new file mode 100644 index 00000000..c6d2d7a4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request/audio.html @@ -0,0 +1,66 @@ + + + + + audio + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

audio

+
+

The audio file to transcribe, in one of these formats: mp3, mp4, mpeg, mpga, m4a, wav, or webm.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request/index.html b/openai-core/com.aallam.openai.api.audio/-transcription-request/index.html new file mode 100644 index 00000000..48be7ddb --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request/index.html @@ -0,0 +1,184 @@ + + + + + TranscriptionRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

TranscriptionRequest

+
class TranscriptionRequest(val audio: FileSource, val model: ModelId, val prompt: String? = null, val responseFormat: AudioResponseFormat? = null, val temperature: Double? = null, val language: String? = null)

Request to transcribe audio into the input language.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(audio: FileSource, model: ModelId, prompt: String? = null, responseFormat: AudioResponseFormat? = null, temperature: Double? = null, language: String? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The audio file to transcribe, in one of these formats: mp3, mp4, mpeg, mpga, m4a, wav, or webm.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val language: String? = null

The language of the input audio. Supplying the input language in ISO-639-1 format will improve accuracy and latency.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

ID of the model to use. Only whisper-1 is currently available.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val prompt: String? = null

An optional text to guide the model's style or continue a previous audio segment. The prompt should match the audio language.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val temperature: Double? = null

The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request/language.html b/openai-core/com.aallam.openai.api.audio/-transcription-request/language.html new file mode 100644 index 00000000..ddebcf39 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request/language.html @@ -0,0 +1,66 @@ + + + + + language + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

language

+
+
val language: String? = null

The language of the input audio. Supplying the input language in ISO-639-1 format will improve accuracy and latency.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request/model.html b/openai-core/com.aallam.openai.api.audio/-transcription-request/model.html new file mode 100644 index 00000000..d49067f9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

ID of the model to use. Only whisper-1 is currently available.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request/prompt.html b/openai-core/com.aallam.openai.api.audio/-transcription-request/prompt.html new file mode 100644 index 00000000..f4c53692 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request/prompt.html @@ -0,0 +1,66 @@ + + + + + prompt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

prompt

+
+
val prompt: String? = null

An optional text to guide the model's style or continue a previous audio segment. The prompt should match the audio language.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request/response-format.html b/openai-core/com.aallam.openai.api.audio/-transcription-request/response-format.html new file mode 100644 index 00000000..5f2dfdaa --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request/response-format.html @@ -0,0 +1,66 @@ + + + + + responseFormat + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

responseFormat

+
+

The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription-request/temperature.html b/openai-core/com.aallam.openai.api.audio/-transcription-request/temperature.html new file mode 100644 index 00000000..da14b901 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription-request/temperature.html @@ -0,0 +1,66 @@ + + + + + temperature + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

temperature

+
+
val temperature: Double? = null

The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription/-transcription.html b/openai-core/com.aallam.openai.api.audio/-transcription/-transcription.html new file mode 100644 index 00000000..1f0c1d51 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription/-transcription.html @@ -0,0 +1,66 @@ + + + + + Transcription + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Transcription

+
+
constructor(text: String, language: String? = null, duration: Double? = null, segments: List<Segment>? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription/duration.html b/openai-core/com.aallam.openai.api.audio/-transcription/duration.html new file mode 100644 index 00000000..324711d3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription/duration.html @@ -0,0 +1,66 @@ + + + + + duration + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

duration

+
+
val duration: Double? = null
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription/index.html b/openai-core/com.aallam.openai.api.audio/-transcription/index.html new file mode 100644 index 00000000..a18cee24 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription/index.html @@ -0,0 +1,154 @@ + + + + + Transcription + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Transcription

+
@Serializable
data class Transcription(val text: String, val language: String? = null, val duration: Double? = null, val segments: List<Segment>? = null)

Create transcription response.

text format depends on TranscriptionRequest's responseFormat. Remaining field are provided only in case of response format verbose_json.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(text: String, language: String? = null, duration: Double? = null, segments: List<Segment>? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val duration: Double? = null
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val language: String? = null
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val segments: List<Segment>? = null
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Transcription text. The format depends on TranscriptionRequest's responseFormat.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription/language.html b/openai-core/com.aallam.openai.api.audio/-transcription/language.html new file mode 100644 index 00000000..6592a06e --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription/language.html @@ -0,0 +1,66 @@ + + + + + language + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

language

+
+
val language: String? = null
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription/segments.html b/openai-core/com.aallam.openai.api.audio/-transcription/segments.html new file mode 100644 index 00000000..2d7d3ffe --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription/segments.html @@ -0,0 +1,66 @@ + + + + + segments + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

segments

+
+
val segments: List<Segment>? = null
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-transcription/text.html b/openai-core/com.aallam.openai.api.audio/-transcription/text.html new file mode 100644 index 00000000..952eaa63 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-transcription/text.html @@ -0,0 +1,66 @@ + + + + + text + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

text

+
+

Transcription text. The format depends on TranscriptionRequest's responseFormat.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request-builder/-translation-request-builder.html b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/-translation-request-builder.html new file mode 100644 index 00000000..9060e206 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/-translation-request-builder.html @@ -0,0 +1,66 @@ + + + + + TranslationRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

TranslationRequestBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request-builder/audio.html b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/audio.html new file mode 100644 index 00000000..02a0f5e8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/audio.html @@ -0,0 +1,66 @@ + + + + + audio + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

audio

+
+

The audio file to transcribe, in one of these formats: mp3, mp4, mpeg, mpga, m4a, wav, or webm.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request-builder/build.html b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/build.html new file mode 100644 index 00000000..f2e4276a --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+

Builder of TranslationRequest instances.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request-builder/index.html b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/index.html new file mode 100644 index 00000000..dd092ffc --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/index.html @@ -0,0 +1,188 @@ + + + + + TranslationRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

TranslationRequestBuilder

+ +
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The audio file to transcribe, in one of these formats: mp3, mp4, mpeg, mpga, m4a, wav, or webm.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

ID of the model to use. Only whisper-1 is currently available.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

An optional text to guide the model's style or continue a previous audio segment. The prompt should match the audio language.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Builder of TranslationRequest instances.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request-builder/model.html b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/model.html new file mode 100644 index 00000000..12320f0e --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

ID of the model to use. Only whisper-1 is currently available.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request-builder/prompt.html b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/prompt.html new file mode 100644 index 00000000..992a50e4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/prompt.html @@ -0,0 +1,66 @@ + + + + + prompt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

prompt

+
+

An optional text to guide the model's style or continue a previous audio segment. The prompt should match the audio language.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request-builder/response-format.html b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/response-format.html new file mode 100644 index 00000000..e81346d7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/response-format.html @@ -0,0 +1,66 @@ + + + + + responseFormat + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

responseFormat

+
+

The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request-builder/temperature.html b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/temperature.html new file mode 100644 index 00000000..dc9ba7bb --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request-builder/temperature.html @@ -0,0 +1,66 @@ + + + + + temperature + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

temperature

+
+

The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request/-translation-request.html b/openai-core/com.aallam.openai.api.audio/-translation-request/-translation-request.html new file mode 100644 index 00000000..16007e29 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request/-translation-request.html @@ -0,0 +1,66 @@ + + + + + TranslationRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

TranslationRequest

+
+
constructor(audio: FileSource, model: ModelId, prompt: String? = null, responseFormat: String? = null, temperature: Double? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request/audio.html b/openai-core/com.aallam.openai.api.audio/-translation-request/audio.html new file mode 100644 index 00000000..65bdb2e0 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request/audio.html @@ -0,0 +1,66 @@ + + + + + audio + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

audio

+
+

The audio file to translate, in one of these formats: mp3, mp4, mpeg, mpga, m4a, wav, or webm.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request/index.html b/openai-core/com.aallam.openai.api.audio/-translation-request/index.html new file mode 100644 index 00000000..d68cceb1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request/index.html @@ -0,0 +1,169 @@ + + + + + TranslationRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

TranslationRequest

+
class TranslationRequest(val audio: FileSource, val model: ModelId, val prompt: String? = null, val responseFormat: String? = null, val temperature: Double? = null)

Request to translate an audio into english.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(audio: FileSource, model: ModelId, prompt: String? = null, responseFormat: String? = null, temperature: Double? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The audio file to translate, in one of these formats: mp3, mp4, mpeg, mpga, m4a, wav, or webm.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

ID of the model to use. Only whisper-1 is currently available.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val prompt: String? = null

An optional text to guide the model's style or continue a previous audio segment. The prompt should be in English.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val responseFormat: String? = null

The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val temperature: Double? = null

The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request/model.html b/openai-core/com.aallam.openai.api.audio/-translation-request/model.html new file mode 100644 index 00000000..76e315dc --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

ID of the model to use. Only whisper-1 is currently available.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request/prompt.html b/openai-core/com.aallam.openai.api.audio/-translation-request/prompt.html new file mode 100644 index 00000000..c387fb97 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request/prompt.html @@ -0,0 +1,66 @@ + + + + + prompt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

prompt

+
+
val prompt: String? = null

An optional text to guide the model's style or continue a previous audio segment. The prompt should be in English.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request/response-format.html b/openai-core/com.aallam.openai.api.audio/-translation-request/response-format.html new file mode 100644 index 00000000..37e8b105 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request/response-format.html @@ -0,0 +1,66 @@ + + + + + responseFormat + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

responseFormat

+
+
val responseFormat: String? = null

The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt.

Default: json

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation-request/temperature.html b/openai-core/com.aallam.openai.api.audio/-translation-request/temperature.html new file mode 100644 index 00000000..1e8a832b --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation-request/temperature.html @@ -0,0 +1,66 @@ + + + + + temperature + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

temperature

+
+
val temperature: Double? = null

The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.

Default: 0

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation/-translation.html b/openai-core/com.aallam.openai.api.audio/-translation/-translation.html new file mode 100644 index 00000000..195e3e58 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation/-translation.html @@ -0,0 +1,66 @@ + + + + + Translation + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Translation

+
+
constructor(text: String, language: String? = null, duration: Double? = null, segments: List<Segment>? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation/duration.html b/openai-core/com.aallam.openai.api.audio/-translation/duration.html new file mode 100644 index 00000000..4edda36f --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation/duration.html @@ -0,0 +1,66 @@ + + + + + duration + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

duration

+
+
val duration: Double? = null
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation/index.html b/openai-core/com.aallam.openai.api.audio/-translation/index.html new file mode 100644 index 00000000..b9ddaa4e --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation/index.html @@ -0,0 +1,154 @@ + + + + + Translation + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Translation

+
@Serializable
data class Translation(val text: String, val language: String? = null, val duration: Double? = null, val segments: List<Segment>? = null)

Create translation response.

text format depends on TranslationRequest's responseFormat. Remaining field are provided only in case of response format verbose_json.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(text: String, language: String? = null, duration: Double? = null, segments: List<Segment>? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val duration: Double? = null
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val language: String? = null
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val segments: List<Segment>? = null
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Translation text.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation/language.html b/openai-core/com.aallam.openai.api.audio/-translation/language.html new file mode 100644 index 00000000..01e388be --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation/language.html @@ -0,0 +1,66 @@ + + + + + language + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

language

+
+
val language: String? = null
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation/segments.html b/openai-core/com.aallam.openai.api.audio/-translation/segments.html new file mode 100644 index 00000000..003547e0 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation/segments.html @@ -0,0 +1,66 @@ + + + + + segments + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

segments

+
+
val segments: List<Segment>? = null
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/-translation/text.html b/openai-core/com.aallam.openai.api.audio/-translation/text.html new file mode 100644 index 00000000..66660e87 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/-translation/text.html @@ -0,0 +1,66 @@ + + + + + text + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

text

+
+

Translation text.

The format depends on TranslationRequest's responseFormat.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/index.html b/openai-core/com.aallam.openai.api.audio/index.html new file mode 100644 index 00000000..eb1edc76 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/index.html @@ -0,0 +1,228 @@ + + + + + com.aallam.openai.api.audio + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class AudioResponseFormat(val value: String)
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class Segment(val id: Int, val seek: Int, val start: Double, val end: Double, val text: String, val tokens: List<Int>, val temperature: Double, val avgLogprob: Double, val compressionRatio: Double, val noSpeechProb: Double, val transient: Boolean? = null)
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class Transcription(val text: String, val language: String? = null, val duration: Double? = null, val segments: List<Segment>? = null)

Create transcription response.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class TranscriptionRequest(val audio: FileSource, val model: ModelId, val prompt: String? = null, val responseFormat: AudioResponseFormat? = null, val temperature: Double? = null, val language: String? = null)

Request to transcribe audio into the input language.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+ +
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class Translation(val text: String, val language: String? = null, val duration: Double? = null, val segments: List<Segment>? = null)

Create translation response.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class TranslationRequest(val audio: FileSource, val model: ModelId, val prompt: String? = null, val responseFormat: String? = null, val temperature: Double? = null)

Request to translate an audio into english.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+ +
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Creates a transcription request.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Creates a translation request.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/transcription-request.html b/openai-core/com.aallam.openai.api.audio/transcription-request.html new file mode 100644 index 00000000..43df7888 --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/transcription-request.html @@ -0,0 +1,66 @@ + + + + + transcriptionRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

transcriptionRequest

+
+

Creates a transcription request.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.audio/translation-request.html b/openai-core/com.aallam.openai.api.audio/translation-request.html new file mode 100644 index 00000000..60e55cca --- /dev/null +++ b/openai-core/com.aallam.openai.api.audio/translation-request.html @@ -0,0 +1,66 @@ + + + + + translationRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

translationRequest

+
+

Creates a translation request.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-choice/--index--.html b/openai-core/com.aallam.openai.api.chat/-chat-choice/--index--.html new file mode 100644 index 00000000..c2f2bef7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-choice/--index--.html @@ -0,0 +1,66 @@ + + + + + index + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

index

+
+
val index: Int

Chat choice index.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-choice/-chat-choice.html b/openai-core/com.aallam.openai.api.chat/-chat-choice/-chat-choice.html new file mode 100644 index 00000000..da1373e9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-choice/-chat-choice.html @@ -0,0 +1,66 @@ + + + + + ChatChoice + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatChoice

+
+
constructor(index: Int, message: ChatMessage, finishReason: FinishReason)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-choice/finish-reason.html b/openai-core/com.aallam.openai.api.chat/-chat-choice/finish-reason.html new file mode 100644 index 00000000..5a56ed7b --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-choice/finish-reason.html @@ -0,0 +1,66 @@ + + + + + finishReason + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

finishReason

+
+

The reason why OpenAI stopped generating.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-choice/index.html b/openai-core/com.aallam.openai.api.chat/-chat-choice/index.html new file mode 100644 index 00000000..71b0a828 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-choice/index.html @@ -0,0 +1,139 @@ + + + + + ChatChoice + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatChoice

+
@Serializable
data class ChatChoice(val index: Int, val message: ChatMessage, val finishReason: FinishReason)

A completion generated by OpenAI.

documentation

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(index: Int, message: ChatMessage, finishReason: FinishReason)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The reason why OpenAI stopped generating.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val index: Int

Chat choice index.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The generated chat message.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-choice/message.html b/openai-core/com.aallam.openai.api.chat/-chat-choice/message.html new file mode 100644 index 00000000..81bc4239 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-choice/message.html @@ -0,0 +1,66 @@ + + + + + message + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

message

+
+

The generated chat message.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-chunk/--index--.html b/openai-core/com.aallam.openai.api.chat/-chat-chunk/--index--.html new file mode 100644 index 00000000..6cf8a890 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-chunk/--index--.html @@ -0,0 +1,66 @@ + + + + + index + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

index

+
+
val index: Int

Chat choice index.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-chunk/-chat-chunk.html b/openai-core/com.aallam.openai.api.chat/-chat-chunk/-chat-chunk.html new file mode 100644 index 00000000..da5d7125 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-chunk/-chat-chunk.html @@ -0,0 +1,66 @@ + + + + + ChatChunk + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatChunk

+
+
constructor(index: Int, delta: ChatDelta, finishReason: FinishReason?)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-chunk/delta.html b/openai-core/com.aallam.openai.api.chat/-chat-chunk/delta.html new file mode 100644 index 00000000..7af4ce7b --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-chunk/delta.html @@ -0,0 +1,66 @@ + + + + + delta + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

delta

+
+

The generated chat message.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-chunk/finish-reason.html b/openai-core/com.aallam.openai.api.chat/-chat-chunk/finish-reason.html new file mode 100644 index 00000000..6cdecb4f --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-chunk/finish-reason.html @@ -0,0 +1,66 @@ + + + + + finishReason + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

finishReason

+
+

The reason why OpenAI stopped generating.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-chunk/index.html b/openai-core/com.aallam.openai.api.chat/-chat-chunk/index.html new file mode 100644 index 00000000..005a4e04 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-chunk/index.html @@ -0,0 +1,139 @@ + + + + + ChatChunk + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatChunk

+
@Serializable
data class ChatChunk(val index: Int, val delta: ChatDelta, val finishReason: FinishReason?)

A completion generated by OpenAI.

documentation

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(index: Int, delta: ChatDelta, finishReason: FinishReason?)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The generated chat message.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The reason why OpenAI stopped generating.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val index: Int

Chat choice index.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/-chat-completion-chunk.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/-chat-completion-chunk.html new file mode 100644 index 00000000..ed683e88 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/-chat-completion-chunk.html @@ -0,0 +1,66 @@ + + + + + ChatCompletionChunk + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatCompletionChunk

+
+
constructor(id: String, created: Int, model: ModelId, choices: List<ChatChunk>, usage: Usage? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/choices.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/choices.html new file mode 100644 index 00000000..a9580fa6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/choices.html @@ -0,0 +1,66 @@ + + + + + choices + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

choices

+
+

A list of generated completions

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/created.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/created.html new file mode 100644 index 00000000..b8e92830 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/created.html @@ -0,0 +1,66 @@ + + + + + created + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

created

+
+

The creation time in epoch milliseconds.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/id.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/id.html new file mode 100644 index 00000000..31ac5c65 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: String

A unique id assigned to this completion

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/index.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/index.html new file mode 100644 index 00000000..3ca0904d --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/index.html @@ -0,0 +1,169 @@ + + + + + ChatCompletionChunk + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatCompletionChunk

+
@Serializable
data class ChatCompletionChunk(val id: String, val created: Int, val model: ModelId, val choices: List<ChatChunk>, val usage: Usage? = null)

An object containing a response from the chat stream completion api.

documentation

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: String, created: Int, model: ModelId, choices: List<ChatChunk>, usage: Usage? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

A list of generated completions

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The creation time in epoch milliseconds.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: String

A unique id assigned to this completion

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The model used.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val usage: Usage? = null

Text completion usage data.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/model.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/model.html new file mode 100644 index 00000000..7c3a3e44 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

The model used.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/usage.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/usage.html new file mode 100644 index 00000000..30a872b8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-chunk/usage.html @@ -0,0 +1,66 @@ + + + + + usage + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

usage

+
+
val usage: Usage? = null

Text completion usage data.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/-chat-completion-function-builder.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/-chat-completion-function-builder.html new file mode 100644 index 00000000..600268e0 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/-chat-completion-function-builder.html @@ -0,0 +1,66 @@ + + + + + ChatCompletionFunctionBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatCompletionFunctionBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/build.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/build.html new file mode 100644 index 00000000..ff348866 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/description.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/description.html new file mode 100644 index 00000000..d0c3cda8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/description.html @@ -0,0 +1,66 @@ + + + + + description + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

description

+
+

The description of what the function does.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/index.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/index.html new file mode 100644 index 00000000..73761e00 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/index.html @@ -0,0 +1,158 @@ + + + + + ChatCompletionFunctionBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatCompletionFunctionBuilder

+ +
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The description of what the function does.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var name: String?

The name of the function to be called.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The parameters the function accepts.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/name.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/name.html new file mode 100644 index 00000000..f09592e6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/name.html @@ -0,0 +1,66 @@ + + + + + name + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

name

+
+
var name: String?

The name of the function to be called.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/parameters.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/parameters.html new file mode 100644 index 00000000..7164a5e2 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-function-builder/parameters.html @@ -0,0 +1,66 @@ + + + + + parameters + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

parameters

+
+

The parameters the function accepts.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-function/-chat-completion-function.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-function/-chat-completion-function.html new file mode 100644 index 00000000..a31a2603 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-function/-chat-completion-function.html @@ -0,0 +1,66 @@ + + + + + ChatCompletionFunction + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatCompletionFunction

+
+
constructor(name: String, description: String? = null, parameters: Parameters)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-function/description.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-function/description.html new file mode 100644 index 00000000..d7a92ad3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-function/description.html @@ -0,0 +1,66 @@ + + + + + description + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

description

+
+
val description: String? = null

The description of what the function does.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-function/index.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-function/index.html new file mode 100644 index 00000000..e5f60b50 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-function/index.html @@ -0,0 +1,139 @@ + + + + + ChatCompletionFunction + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatCompletionFunction

+
@Serializable
data class ChatCompletionFunction(val name: String, val description: String? = null, val parameters: Parameters)
+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(name: String, description: String? = null, parameters: Parameters)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val description: String? = null

The description of what the function does.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The parameters the functions accepts, described as a JSON Schema object. See the guide for examples, and the JSON Schema reference for documentation about the format.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-function/name.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-function/name.html new file mode 100644 index 00000000..c39bb83f --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-function/name.html @@ -0,0 +1,66 @@ + + + + + name + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

name

+
+

The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-function/parameters.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-function/parameters.html new file mode 100644 index 00000000..e9e70d7d --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-function/parameters.html @@ -0,0 +1,66 @@ + + + + + parameters + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

parameters

+
+

The parameters the functions accepts, described as a JSON Schema object. See the guide for examples, and the JSON Schema reference for documentation about the format.

To describe a function that accepts no parameters, provide Parameters.Empty`.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/-chat-completion-request-builder.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/-chat-completion-request-builder.html new file mode 100644 index 00000000..549b31fc --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/-chat-completion-request-builder.html @@ -0,0 +1,66 @@ + + + + + ChatCompletionRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatCompletionRequestBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/build.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/build.html new file mode 100644 index 00000000..95734b63 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/frequency-penalty.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/frequency-penalty.html new file mode 100644 index 00000000..ec750647 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/frequency-penalty.html @@ -0,0 +1,66 @@ + + + + + frequencyPenalty + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

frequencyPenalty

+
+

Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.

Read more

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/function-call.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/function-call.html new file mode 100644 index 00000000..093bee14 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/function-call.html @@ -0,0 +1,66 @@ + + + + + functionCall + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

functionCall

+
+

Controls how the model responds to function calls. FunctionMode.None means the model does not call a function, and responds to the end-user. FunctionMode.Auto means the model can pick between an end-user or calling a function. Specifying a particular function via FunctionMode.Named forces the model to call that function. FunctionMode.None is the default when no functions are present. FunctionMode.Auto is the default if functions are present.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/functions.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/functions.html new file mode 100644 index 00000000..e86360ca --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/functions.html @@ -0,0 +1,66 @@ + + + + + functions + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

functions

+
+
fun functions(block: FunctionsBuilder.() -> Unit)

A list of functions the model may generate JSON inputs for.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/index.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/index.html new file mode 100644 index 00000000..9a5015d7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/index.html @@ -0,0 +1,338 @@ + + + + + ChatCompletionRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatCompletionRequestBuilder

+

Creates a completion for the chat message.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Controls how the model responds to function calls. FunctionMode.None means the model does not call a function, and responds to the end-user. FunctionMode.Auto means the model can pick between an end-user or calling a function. Specifying a particular function via FunctionMode.Named forces the model to call that function. FunctionMode.None is the default when no functions are present. FunctionMode.Auto is the default if functions are present.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

A list of functions the model may generate JSON inputs for.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Modify the likelihood of specified tokens appearing in the completion.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The maximum number of tokens allowed for the generated answer. By default, the number of tokens the model can return will be (4096 - prompt tokens).

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The messages to generate chat completions for.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

ID of the model to use.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var n: Int?

How many chat completion choices to generate for each input message.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Up to 4 sequences where the API will stop generating further tokens.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var topP: Double?

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var user: String?

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
fun functions(block: FunctionsBuilder.() -> Unit)

A list of functions the model may generate JSON inputs for.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The messages to generate chat completions for.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/logit-bias.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/logit-bias.html new file mode 100644 index 00000000..3402024f --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/logit-bias.html @@ -0,0 +1,66 @@ + + + + + logitBias + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

logitBias

+
+

Modify the likelihood of specified tokens appearing in the completion.

Accepts a json object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/max-tokens.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/max-tokens.html new file mode 100644 index 00000000..f9128945 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/max-tokens.html @@ -0,0 +1,66 @@ + + + + + maxTokens + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

maxTokens

+
+

The maximum number of tokens allowed for the generated answer. By default, the number of tokens the model can return will be (4096 - prompt tokens).

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/messages.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/messages.html new file mode 100644 index 00000000..56dad06f --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/messages.html @@ -0,0 +1,66 @@ + + + + + messages + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

messages

+
+

The messages to generate chat completions for.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/model.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/model.html new file mode 100644 index 00000000..8e7ab2e8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

ID of the model to use.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/n.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/n.html new file mode 100644 index 00000000..cec481e1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/n.html @@ -0,0 +1,66 @@ + + + + + n + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

n

+
+
var n: Int?

How many chat completion choices to generate for each input message.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/presence-penalty.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/presence-penalty.html new file mode 100644 index 00000000..c6a79969 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/presence-penalty.html @@ -0,0 +1,66 @@ + + + + + presencePenalty + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

presencePenalty

+
+

Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.

Read more

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/stop.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/stop.html new file mode 100644 index 00000000..ce75a967 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/stop.html @@ -0,0 +1,66 @@ + + + + + stop + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

stop

+
+

Up to 4 sequences where the API will stop generating further tokens.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/temperature.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/temperature.html new file mode 100644 index 00000000..0bfe29a4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/temperature.html @@ -0,0 +1,66 @@ + + + + + temperature + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

temperature

+
+

What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.

We generally recommend altering this or topP but not both.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/top-p.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/top-p.html new file mode 100644 index 00000000..a0080778 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/top-p.html @@ -0,0 +1,66 @@ + + + + + topP + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

topP

+
+
var topP: Double?

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

We generally recommend altering this or temperature but not both.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/user.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/user.html new file mode 100644 index 00000000..3c182904 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request-builder/user.html @@ -0,0 +1,66 @@ + + + + + user + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

user

+
+
var user: String?

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/-chat-completion-request.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/-chat-completion-request.html new file mode 100644 index 00000000..35208f22 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/-chat-completion-request.html @@ -0,0 +1,66 @@ + + + + + ChatCompletionRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatCompletionRequest

+
+
constructor(model: ModelId, messages: List<ChatMessage>, temperature: Double? = null, topP: Double? = null, n: Int? = null, stop: List<String>? = null, maxTokens: Int? = null, presencePenalty: Double? = null, frequencyPenalty: Double? = null, logitBias: Map<String, Int>? = null, user: String? = null, functions: List<ChatCompletionFunction>? = null, functionCall: FunctionMode? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/frequency-penalty.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/frequency-penalty.html new file mode 100644 index 00000000..89c0ecf4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/frequency-penalty.html @@ -0,0 +1,66 @@ + + + + + frequencyPenalty + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

frequencyPenalty

+
+

Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.

Read more

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/function-call.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/function-call.html new file mode 100644 index 00000000..962fd82d --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/function-call.html @@ -0,0 +1,66 @@ + + + + + functionCall + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

functionCall

+
+

Controls how the model responds to function calls. FunctionMode.None means the model does not call a function, and responds to the end-user. FunctionMode.Auto means the model can pick between an end-user or calling a function. Specifying a particular function via FunctionMode.Named forces the model to call that function. FunctionMode.None is the default when no functions are present. FunctionMode.Auto is the default if functions are present.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/functions.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/functions.html new file mode 100644 index 00000000..44233476 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/functions.html @@ -0,0 +1,66 @@ + + + + + functions + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

functions

+
+

A list of functions the model may generate JSON inputs for.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/index.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/index.html new file mode 100644 index 00000000..27ff7df4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/index.html @@ -0,0 +1,289 @@ + + + + + ChatCompletionRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatCompletionRequest

+
@Serializable
class ChatCompletionRequest(val model: ModelId, val messages: List<ChatMessage>, val temperature: Double? = null, val topP: Double? = null, val n: Int? = null, val stop: List<String>? = null, val maxTokens: Int? = null, val presencePenalty: Double? = null, val frequencyPenalty: Double? = null, val logitBias: Map<String, Int>? = null, val user: String? = null, val functions: List<ChatCompletionFunction>? = null, val functionCall: FunctionMode? = null)

Creates a completion for the chat message.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(model: ModelId, messages: List<ChatMessage>, temperature: Double? = null, topP: Double? = null, n: Int? = null, stop: List<String>? = null, maxTokens: Int? = null, presencePenalty: Double? = null, frequencyPenalty: Double? = null, logitBias: Map<String, Int>? = null, user: String? = null, functions: List<ChatCompletionFunction>? = null, functionCall: FunctionMode? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Controls how the model responds to function calls. FunctionMode.None means the model does not call a function, and responds to the end-user. FunctionMode.Auto means the model can pick between an end-user or calling a function. Specifying a particular function via FunctionMode.Named forces the model to call that function. FunctionMode.None is the default when no functions are present. FunctionMode.Auto is the default if functions are present.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

A list of functions the model may generate JSON inputs for.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val logitBias: Map<String, Int>? = null

Modify the likelihood of specified tokens appearing in the completion.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val maxTokens: Int? = null

The maximum number of tokens allowed for the generated answer. By default, the number of tokens the model can return will be (4096 - prompt tokens).

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The messages to generate chat completions for.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

ID of the model to use.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val n: Int? = null

How many chat completion choices to generate for each input message.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val presencePenalty: Double? = null

Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val stop: List<String>? = null

Up to 4 sequences where the API will stop generating further tokens.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val temperature: Double? = null

What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val topP: Double? = null

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val user: String? = null

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/logit-bias.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/logit-bias.html new file mode 100644 index 00000000..ef530187 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/logit-bias.html @@ -0,0 +1,66 @@ + + + + + logitBias + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

logitBias

+
+
val logitBias: Map<String, Int>? = null

Modify the likelihood of specified tokens appearing in the completion.

Accepts a json object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/max-tokens.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/max-tokens.html new file mode 100644 index 00000000..c1fbb15d --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/max-tokens.html @@ -0,0 +1,66 @@ + + + + + maxTokens + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

maxTokens

+
+
val maxTokens: Int? = null

The maximum number of tokens allowed for the generated answer. By default, the number of tokens the model can return will be (4096 - prompt tokens).

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/messages.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/messages.html new file mode 100644 index 00000000..9c82ad36 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/messages.html @@ -0,0 +1,66 @@ + + + + + messages + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

messages

+
+

The messages to generate chat completions for.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/model.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/model.html new file mode 100644 index 00000000..df361791 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

ID of the model to use.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/n.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/n.html new file mode 100644 index 00000000..0f7ffb78 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/n.html @@ -0,0 +1,66 @@ + + + + + n + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

n

+
+
val n: Int? = null

How many chat completion choices to generate for each input message.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/presence-penalty.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/presence-penalty.html new file mode 100644 index 00000000..eb1f9c7c --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/presence-penalty.html @@ -0,0 +1,66 @@ + + + + + presencePenalty + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

presencePenalty

+
+
val presencePenalty: Double? = null

Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.

Read more

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/stop.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/stop.html new file mode 100644 index 00000000..4a4cd4bd --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/stop.html @@ -0,0 +1,66 @@ + + + + + stop + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

stop

+
+
val stop: List<String>? = null

Up to 4 sequences where the API will stop generating further tokens.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/temperature.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/temperature.html new file mode 100644 index 00000000..7476b592 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/temperature.html @@ -0,0 +1,66 @@ + + + + + temperature + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

temperature

+
+
val temperature: Double? = null

What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.

We generally recommend altering this or topP but not both.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/top-p.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/top-p.html new file mode 100644 index 00000000..37490971 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/top-p.html @@ -0,0 +1,66 @@ + + + + + topP + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

topP

+
+
val topP: Double? = null

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

We generally recommend altering this or temperature but not both.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion-request/user.html b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/user.html new file mode 100644 index 00000000..b56b9fa7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion-request/user.html @@ -0,0 +1,66 @@ + + + + + user + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

user

+
+
val user: String? = null

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion/-chat-completion.html b/openai-core/com.aallam.openai.api.chat/-chat-completion/-chat-completion.html new file mode 100644 index 00000000..389a93d0 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion/-chat-completion.html @@ -0,0 +1,66 @@ + + + + + ChatCompletion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatCompletion

+
+
constructor(id: String, created: Int, model: ModelId, choices: List<ChatChoice>, usage: Usage? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion/choices.html b/openai-core/com.aallam.openai.api.chat/-chat-completion/choices.html new file mode 100644 index 00000000..6441f913 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion/choices.html @@ -0,0 +1,66 @@ + + + + + choices + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

choices

+
+

A list of generated completions

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion/created.html b/openai-core/com.aallam.openai.api.chat/-chat-completion/created.html new file mode 100644 index 00000000..193a47c5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion/created.html @@ -0,0 +1,66 @@ + + + + + created + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

created

+
+

The creation time in epoch milliseconds.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion/id.html b/openai-core/com.aallam.openai.api.chat/-chat-completion/id.html new file mode 100644 index 00000000..984dc25a --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: String

A unique id assigned to this completion

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion/index.html b/openai-core/com.aallam.openai.api.chat/-chat-completion/index.html new file mode 100644 index 00000000..6d1666a5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion/index.html @@ -0,0 +1,169 @@ + + + + + ChatCompletion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatCompletion

+
@Serializable
data class ChatCompletion(val id: String, val created: Int, val model: ModelId, val choices: List<ChatChoice>, val usage: Usage? = null)

An object containing a response from the chat completion api.

documentation

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: String, created: Int, model: ModelId, choices: List<ChatChoice>, usage: Usage? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

A list of generated completions

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The creation time in epoch milliseconds.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: String

A unique id assigned to this completion

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The model used.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val usage: Usage? = null

Text completion usage data.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion/model.html b/openai-core/com.aallam.openai.api.chat/-chat-completion/model.html new file mode 100644 index 00000000..2806f075 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

The model used.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-completion/usage.html b/openai-core/com.aallam.openai.api.chat/-chat-completion/usage.html new file mode 100644 index 00000000..57bf1177 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-completion/usage.html @@ -0,0 +1,66 @@ + + + + + usage + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

usage

+
+
val usage: Usage? = null

Text completion usage data.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-delta/-chat-delta.html b/openai-core/com.aallam.openai.api.chat/-chat-delta/-chat-delta.html new file mode 100644 index 00000000..61ee9f0a --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-delta/-chat-delta.html @@ -0,0 +1,66 @@ + + + + + ChatDelta + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatDelta

+
+
constructor(role: ChatRole? = null, content: String? = null, functionCall: FunctionCall? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-delta/content.html b/openai-core/com.aallam.openai.api.chat/-chat-delta/content.html new file mode 100644 index 00000000..91ebdf7a --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-delta/content.html @@ -0,0 +1,66 @@ + + + + + content + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

content

+
+
val content: String? = null

The contents of the message.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-delta/function-call.html b/openai-core/com.aallam.openai.api.chat/-chat-delta/function-call.html new file mode 100644 index 00000000..16f625ed --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-delta/function-call.html @@ -0,0 +1,66 @@ + + + + + functionCall + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

functionCall

+
+

The name and arguments of a function that should be called, as generated by the model.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-delta/index.html b/openai-core/com.aallam.openai.api.chat/-chat-delta/index.html new file mode 100644 index 00000000..fdfac50c --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-delta/index.html @@ -0,0 +1,139 @@ + + + + + ChatDelta + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatDelta

+
@Serializable
data class ChatDelta(val role: ChatRole? = null, val content: String? = null, val functionCall: FunctionCall? = null)

Generated chat message.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(role: ChatRole? = null, content: String? = null, functionCall: FunctionCall? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val content: String? = null

The contents of the message.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The name and arguments of a function that should be called, as generated by the model.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val role: ChatRole? = null

The role of the author of this message.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-delta/role.html b/openai-core/com.aallam.openai.api.chat/-chat-delta/role.html new file mode 100644 index 00000000..75d1e22a --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-delta/role.html @@ -0,0 +1,66 @@ + + + + + role + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

role

+
+
val role: ChatRole? = null

The role of the author of this message.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-message-builder/-chat-message-builder.html b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/-chat-message-builder.html new file mode 100644 index 00000000..c15af78f --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/-chat-message-builder.html @@ -0,0 +1,66 @@ + + + + + ChatMessageBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatMessageBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-message-builder/build.html b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/build.html new file mode 100644 index 00000000..fd6e0da0 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+

Create ChatMessageBuilder instance.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-message-builder/content.html b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/content.html new file mode 100644 index 00000000..3277152f --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/content.html @@ -0,0 +1,66 @@ + + + + + content + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

content

+
+

The contents of the message.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-message-builder/function-call.html b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/function-call.html new file mode 100644 index 00000000..76f97bf5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/function-call.html @@ -0,0 +1,66 @@ + + + + + functionCall + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

functionCall

+
+

The name and arguments of a function that should be called, as generated by the model.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-message-builder/index.html b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/index.html new file mode 100644 index 00000000..87e308b4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/index.html @@ -0,0 +1,173 @@ + + + + + ChatMessageBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatMessageBuilder

+

Builder of ChatMessageBuilder instances.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The contents of the message.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The name and arguments of a function that should be called, as generated by the model.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var name: String?

The name of the author of this message. name is required if role is `ChatRole.Function, and it should be the name of the function whose response is in the content. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The role of the author of this message.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Create ChatMessageBuilder instance.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-message-builder/name.html b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/name.html new file mode 100644 index 00000000..93dc2fc4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/name.html @@ -0,0 +1,66 @@ + + + + + name + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

name

+
+
var name: String?

The name of the author of this message. name is required if role is `ChatRole.Function, and it should be the name of the function whose response is in the content. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-message-builder/role.html b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/role.html new file mode 100644 index 00000000..c8f2e478 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-message-builder/role.html @@ -0,0 +1,66 @@ + + + + + role + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

role

+
+

The role of the author of this message.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-message/-chat-message.html b/openai-core/com.aallam.openai.api.chat/-chat-message/-chat-message.html new file mode 100644 index 00000000..31be7d4b --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-message/-chat-message.html @@ -0,0 +1,66 @@ + + + + + ChatMessage + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatMessage

+
+
constructor(role: ChatRole, content: String? = null, name: String? = null, functionCall: FunctionCall? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-message/content.html b/openai-core/com.aallam.openai.api.chat/-chat-message/content.html new file mode 100644 index 00000000..16dc58c4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-message/content.html @@ -0,0 +1,66 @@ + + + + + content + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

content

+
+
val content: String? = null

The contents of the message. This is required for requests, and optional for responses.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-message/function-call.html b/openai-core/com.aallam.openai.api.chat/-chat-message/function-call.html new file mode 100644 index 00000000..1aa4b2bc --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-message/function-call.html @@ -0,0 +1,66 @@ + + + + + functionCall + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

functionCall

+
+

The name and arguments of a function that should be called, as generated by the model.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-message/index.html b/openai-core/com.aallam.openai.api.chat/-chat-message/index.html new file mode 100644 index 00000000..98ab5590 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-message/index.html @@ -0,0 +1,154 @@ + + + + + ChatMessage + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatMessage

+
@Serializable
data class ChatMessage(val role: ChatRole, val content: String? = null, val name: String? = null, val functionCall: FunctionCall? = null)

The messages to generate chat completions for.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(role: ChatRole, content: String? = null, name: String? = null, functionCall: FunctionCall? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val content: String? = null

The contents of the message. This is required for requests, and optional for responses.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The name and arguments of a function that should be called, as generated by the model.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val name: String? = null

The name of the author of this message. name is required if role is `ChatRole.Function, and it should be the name of the function whose response is in the content. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The role of the author of this message.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-message/name.html b/openai-core/com.aallam.openai.api.chat/-chat-message/name.html new file mode 100644 index 00000000..90887473 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-message/name.html @@ -0,0 +1,66 @@ + + + + + name + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

name

+
+
val name: String? = null

The name of the author of this message. name is required if role is `ChatRole.Function, and it should be the name of the function whose response is in the content. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-message/role.html b/openai-core/com.aallam.openai.api.chat/-chat-message/role.html new file mode 100644 index 00000000..bdd639df --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-message/role.html @@ -0,0 +1,66 @@ + + + + + role + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

role

+
+

The role of the author of this message.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-messages-builder/-chat-messages-builder.html b/openai-core/com.aallam.openai.api.chat/-chat-messages-builder/-chat-messages-builder.html new file mode 100644 index 00000000..ea23134f --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-messages-builder/-chat-messages-builder.html @@ -0,0 +1,66 @@ + + + + + ChatMessagesBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatMessagesBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-messages-builder/index.html b/openai-core/com.aallam.openai.api.chat/-chat-messages-builder/index.html new file mode 100644 index 00000000..97a8d565 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-messages-builder/index.html @@ -0,0 +1,109 @@ + + + + + ChatMessagesBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatMessagesBuilder

+

Creates a list of ChatMessage.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
fun message(block: ChatMessageBuilder.() -> Unit)

Creates a ChatMessage instance.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-messages-builder/message.html b/openai-core/com.aallam.openai.api.chat/-chat-messages-builder/message.html new file mode 100644 index 00000000..efb48004 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-messages-builder/message.html @@ -0,0 +1,66 @@ + + + + + message + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

message

+
+
fun message(block: ChatMessageBuilder.() -> Unit)

Creates a ChatMessage instance.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-role/-chat-role.html b/openai-core/com.aallam.openai.api.chat/-chat-role/-chat-role.html new file mode 100644 index 00000000..80cd3561 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-role/-chat-role.html @@ -0,0 +1,66 @@ + + + + + ChatRole + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatRole

+
+
constructor(role: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/-assistant.html b/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/-assistant.html new file mode 100644 index 00000000..a5c2b260 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/-assistant.html @@ -0,0 +1,66 @@ + + + + + Assistant + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Assistant

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/-function.html b/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/-function.html new file mode 100644 index 00000000..aab8e11f --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/-function.html @@ -0,0 +1,66 @@ + + + + + Function + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Function

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/-system.html b/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/-system.html new file mode 100644 index 00000000..27fc21cf --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/-system.html @@ -0,0 +1,66 @@ + + + + + System + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

System

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/-user.html b/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/-user.html new file mode 100644 index 00000000..72139a6d --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/-user.html @@ -0,0 +1,66 @@ + + + + + User + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

User

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/index.html b/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/index.html new file mode 100644 index 00000000..38f6637b --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-role/-companion/index.html @@ -0,0 +1,135 @@ + + + + + Companion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Companion

+
object Companion
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-role/index.html b/openai-core/com.aallam.openai.api.chat/-chat-role/index.html new file mode 100644 index 00000000..2ccfe36f --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-role/index.html @@ -0,0 +1,128 @@ + + + + + ChatRole + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ChatRole

+
@Serializable
value class ChatRole(val role: String)

The role of the author of this message.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(role: String)
+
+
+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
object Companion
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-chat-role/role.html b/openai-core/com.aallam.openai.api.chat/-chat-role/role.html new file mode 100644 index 00000000..3231a8bc --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-chat-role/role.html @@ -0,0 +1,66 @@ + + + + + role + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

role

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-call/-function-call.html b/openai-core/com.aallam.openai.api.chat/-function-call/-function-call.html new file mode 100644 index 00000000..3d3c5c68 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-call/-function-call.html @@ -0,0 +1,66 @@ + + + + + FunctionCall + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FunctionCall

+
+
constructor(nameOrNull: String? = null, argumentsOrNull: String? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-call/arguments-as-json-or-null.html b/openai-core/com.aallam.openai.api.chat/-function-call/arguments-as-json-or-null.html new file mode 100644 index 00000000..28ed4a57 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-call/arguments-as-json-or-null.html @@ -0,0 +1,66 @@ + + + + + argumentsAsJsonOrNull + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

argumentsAsJsonOrNull

+
+
fun argumentsAsJsonOrNull(json: Json = Json): JsonObject?

Decodes the arguments JSON string into a JsonObject. If arguments is null, the function will return null.

Parameters

json

The Json object to be used for decoding, defaults to a default Json instance

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-call/arguments-as-json.html b/openai-core/com.aallam.openai.api.chat/-function-call/arguments-as-json.html new file mode 100644 index 00000000..d086b22b --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-call/arguments-as-json.html @@ -0,0 +1,66 @@ + + + + + argumentsAsJson + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

argumentsAsJson

+
+
fun argumentsAsJson(json: Json = Json): JsonObject

Decodes the arguments JSON string into a JsonObject. If arguments is null, the function will return null.

Parameters

json

The Json object to be used for decoding, defaults to a default Json instance

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-call/arguments-or-null.html b/openai-core/com.aallam.openai.api.chat/-function-call/arguments-or-null.html new file mode 100644 index 00000000..433c3f5e --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-call/arguments-or-null.html @@ -0,0 +1,66 @@ + + + + + argumentsOrNull + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

argumentsOrNull

+
+
val argumentsOrNull: String? = null

The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-call/arguments.html b/openai-core/com.aallam.openai.api.chat/-function-call/arguments.html new file mode 100644 index 00000000..153c3b34 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-call/arguments.html @@ -0,0 +1,66 @@ + + + + + arguments + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

arguments

+
+

The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-call/index.html b/openai-core/com.aallam.openai.api.chat/-function-call/index.html new file mode 100644 index 00000000..77164a86 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-call/index.html @@ -0,0 +1,188 @@ + + + + + FunctionCall + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FunctionCall

+
@Serializable
data class FunctionCall(val nameOrNull: String? = null, val argumentsOrNull: String? = null)

Represents a function invocation with its name and serialized arguments as generated by the model.

In scenarios such as a streaming variant of the chat API, both nameOrNull and argumentsOrNull can be null.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(nameOrNull: String? = null, argumentsOrNull: String? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val argumentsOrNull: String? = null

The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The name of the function to call.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val nameOrNull: String? = null

The name of the function to call.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
fun argumentsAsJson(json: Json = Json): JsonObject

Decodes the arguments JSON string into a JsonObject. If arguments is null, the function will return null.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
fun argumentsAsJsonOrNull(json: Json = Json): JsonObject?

Decodes the arguments JSON string into a JsonObject. If arguments is null, the function will return null.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-call/name-or-null.html b/openai-core/com.aallam.openai.api.chat/-function-call/name-or-null.html new file mode 100644 index 00000000..51270a7e --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-call/name-or-null.html @@ -0,0 +1,66 @@ + + + + + nameOrNull + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

nameOrNull

+
+
val nameOrNull: String? = null

The name of the function to call.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-call/name.html b/openai-core/com.aallam.openai.api.chat/-function-call/name.html new file mode 100644 index 00000000..3a5e8c29 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-call/name.html @@ -0,0 +1,66 @@ + + + + + name + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

name

+
+

The name of the function to call.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-mode/-companion/-auto.html b/openai-core/com.aallam.openai.api.chat/-function-mode/-companion/-auto.html new file mode 100644 index 00000000..d50c383e --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-mode/-companion/-auto.html @@ -0,0 +1,66 @@ + + + + + Auto + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Auto

+
+

Represents the auto mode.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-mode/-companion/-none.html b/openai-core/com.aallam.openai.api.chat/-function-mode/-companion/-none.html new file mode 100644 index 00000000..2b4e7355 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-mode/-companion/-none.html @@ -0,0 +1,66 @@ + + + + + None + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

None

+
+

Represents the none mode.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-mode/-companion/index.html b/openai-core/com.aallam.openai.api.chat/-function-mode/-companion/index.html new file mode 100644 index 00000000..161531f0 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-mode/-companion/index.html @@ -0,0 +1,105 @@ + + + + + Companion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Companion

+
object Companion

Provides default function call modes.

+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Represents the auto mode.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Represents the none mode.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-mode/-default/-default.html b/openai-core/com.aallam.openai.api.chat/-function-mode/-default/-default.html new file mode 100644 index 00000000..8c7db1e1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-mode/-default/-default.html @@ -0,0 +1,66 @@ + + + + + Default + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Default

+
+
constructor(value: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-mode/-default/index.html b/openai-core/com.aallam.openai.api.chat/-function-mode/-default/index.html new file mode 100644 index 00000000..46c00054 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-mode/-default/index.html @@ -0,0 +1,109 @@ + + + + + Default + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Default

+
value class Default(val value: String) : FunctionMode

Represents a function call mode. The value can be any string representing a specific function call mode.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(value: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-mode/-default/value.html b/openai-core/com.aallam.openai.api.chat/-function-mode/-default/value.html new file mode 100644 index 00000000..63f4f823 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-mode/-default/value.html @@ -0,0 +1,66 @@ + + + + + value + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

value

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-mode/-named/-named.html b/openai-core/com.aallam.openai.api.chat/-function-mode/-named/-named.html new file mode 100644 index 00000000..14f035f5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-mode/-named/-named.html @@ -0,0 +1,66 @@ + + + + + Named + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Named

+
+
constructor(name: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-mode/-named/index.html b/openai-core/com.aallam.openai.api.chat/-function-mode/-named/index.html new file mode 100644 index 00000000..91b8b4c2 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-mode/-named/index.html @@ -0,0 +1,109 @@ + + + + + Named + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Named

+
@Serializable
data class Named(val name: String) : FunctionMode

Represents a named function call mode. The name indicates a specific function that the model will call.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(name: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

the name of the function to call.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-mode/-named/name.html b/openai-core/com.aallam.openai.api.chat/-function-mode/-named/name.html new file mode 100644 index 00000000..0b5df619 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-mode/-named/name.html @@ -0,0 +1,66 @@ + + + + + name + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

name

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-function-mode/index.html b/openai-core/com.aallam.openai.api.chat/-function-mode/index.html new file mode 100644 index 00000000..1fc07f35 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-function-mode/index.html @@ -0,0 +1,120 @@ + + + + + FunctionMode + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FunctionMode

+
@Serializable(with = FunctionModeSerializer::class)
interface FunctionMode

This interface determines how the model handles function calls.

There are several modes of operation:

  • Default: In this mode, the model does not invoke any function None or decides itself Auto on calling a function or responding directly to the end-user. This mode becomes default if any functions are specified.

  • Named: In this mode, the model will call a specific function, denoted by the name attribute.

Inheritors

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
object Companion

Provides default function call modes.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
value class Default(val value: String) : FunctionMode

Represents a function call mode. The value can be any string representing a specific function call mode.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class Named(val name: String) : FunctionMode

Represents a named function call mode. The name indicates a specific function that the model will call.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-functions-builder/-functions-builder.html b/openai-core/com.aallam.openai.api.chat/-functions-builder/-functions-builder.html new file mode 100644 index 00000000..79252a1f --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-functions-builder/-functions-builder.html @@ -0,0 +1,66 @@ + + + + + FunctionsBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FunctionsBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-functions-builder/function.html b/openai-core/com.aallam.openai.api.chat/-functions-builder/function.html new file mode 100644 index 00000000..c23c11f4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-functions-builder/function.html @@ -0,0 +1,66 @@ + + + + + function + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

function

+
+

Creates a ChatMessage instance.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-functions-builder/index.html b/openai-core/com.aallam.openai.api.chat/-functions-builder/index.html new file mode 100644 index 00000000..953a5d25 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-functions-builder/index.html @@ -0,0 +1,109 @@ + + + + + FunctionsBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FunctionsBuilder

+ +
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Creates a ChatMessage instance.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-parameters/-companion/-empty.html b/openai-core/com.aallam.openai.api.chat/-parameters/-companion/-empty.html new file mode 100644 index 00000000..e0850ef2 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-parameters/-companion/-empty.html @@ -0,0 +1,66 @@ + + + + + Empty + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Empty

+
+

Represents a no params function. Equivalent to:

{"type": "object", "properties": {}}
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-parameters/-companion/build-json-object.html b/openai-core/com.aallam.openai.api.chat/-parameters/-companion/build-json-object.html new file mode 100644 index 00000000..f7daa71c --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-parameters/-companion/build-json-object.html @@ -0,0 +1,66 @@ + + + + + buildJsonObject + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

buildJsonObject

+
+
fun buildJsonObject(block: JsonObjectBuilder.() -> Unit): Parameters

Creates a Parameters instance using a JsonObjectBuilder.

Parameters

block

The JsonObjectBuilder to use.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-parameters/-companion/from-json-string.html b/openai-core/com.aallam.openai.api.chat/-parameters/-companion/from-json-string.html new file mode 100644 index 00000000..eeb994bc --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-parameters/-companion/from-json-string.html @@ -0,0 +1,66 @@ + + + + + fromJsonString + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fromJsonString

+
+

Creates a Parameters instance from a JSON string.

Parameters

json

The JSON string to parse.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-parameters/-companion/index.html b/openai-core/com.aallam.openai.api.chat/-parameters/-companion/index.html new file mode 100644 index 00000000..1382f213 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-parameters/-companion/index.html @@ -0,0 +1,124 @@ + + + + + Companion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Companion

+
object Companion
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Represents a no params function. Equivalent to:

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
fun buildJsonObject(block: JsonObjectBuilder.() -> Unit): Parameters

Creates a Parameters instance using a JsonObjectBuilder.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Creates a Parameters instance from a JSON string.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-parameters/-json-data-serializer/descriptor.html b/openai-core/com.aallam.openai.api.chat/-parameters/-json-data-serializer/descriptor.html new file mode 100644 index 00000000..b24e6091 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-parameters/-json-data-serializer/descriptor.html @@ -0,0 +1,66 @@ + + + + + descriptor + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

descriptor

+
+
open override val descriptor: SerialDescriptor
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-parameters/-json-data-serializer/deserialize.html b/openai-core/com.aallam.openai.api.chat/-parameters/-json-data-serializer/deserialize.html new file mode 100644 index 00000000..ef9bc6ef --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-parameters/-json-data-serializer/deserialize.html @@ -0,0 +1,66 @@ + + + + + deserialize + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

deserialize

+
+
open override fun deserialize(decoder: Decoder): Parameters

Deserializes Parameters from JSON format.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-parameters/-json-data-serializer/index.html b/openai-core/com.aallam.openai.api.chat/-parameters/-json-data-serializer/index.html new file mode 100644 index 00000000..3557afd6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-parameters/-json-data-serializer/index.html @@ -0,0 +1,124 @@ + + + + + JsonDataSerializer + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

JsonDataSerializer

+
object JsonDataSerializer : KSerializer<Parameters>

Custom serializer for the Parameters class.

+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open override val descriptor: SerialDescriptor
+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open override fun deserialize(decoder: Decoder): Parameters

Deserializes Parameters from JSON format.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open override fun serialize(encoder: Encoder, value: Parameters)

Serializes Parameters to JSON format.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-parameters/-json-data-serializer/serialize.html b/openai-core/com.aallam.openai.api.chat/-parameters/-json-data-serializer/serialize.html new file mode 100644 index 00000000..fba69c49 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-parameters/-json-data-serializer/serialize.html @@ -0,0 +1,66 @@ + + + + + serialize + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

serialize

+
+
open override fun serialize(encoder: Encoder, value: Parameters)

Serializes Parameters to JSON format.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-parameters/-parameters.html b/openai-core/com.aallam.openai.api.chat/-parameters/-parameters.html new file mode 100644 index 00000000..d425ea54 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-parameters/-parameters.html @@ -0,0 +1,66 @@ + + + + + Parameters + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Parameters

+
+
constructor(schema: JsonElement)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-parameters/index.html b/openai-core/com.aallam.openai.api.chat/-parameters/index.html new file mode 100644 index 00000000..16e23c53 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-parameters/index.html @@ -0,0 +1,143 @@ + + + + + Parameters + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Parameters

+
data class Parameters(val schema: JsonElement)

Represents parameters that a function accepts, described as a JSON Schema object.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(schema: JsonElement)
+
+
+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
object Companion
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
object JsonDataSerializer : KSerializer<Parameters>

Custom serializer for the Parameters class.

+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val schema: JsonElement

Json Schema object.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/-parameters/schema.html b/openai-core/com.aallam.openai.api.chat/-parameters/schema.html new file mode 100644 index 00000000..cf8c891c --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/-parameters/schema.html @@ -0,0 +1,66 @@ + + + + + schema + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

schema

+
+
val schema: JsonElement
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/chat-completion-function.html b/openai-core/com.aallam.openai.api.chat/chat-completion-function.html new file mode 100644 index 00000000..319332ff --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/chat-completion-function.html @@ -0,0 +1,66 @@ + + + + + chatCompletionFunction + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

chatCompletionFunction

+
+

The function to generate chat completion function instances.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/chat-completion-request.html b/openai-core/com.aallam.openai.api.chat/chat-completion-request.html new file mode 100644 index 00000000..11c6178c --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/chat-completion-request.html @@ -0,0 +1,66 @@ + + + + + chatCompletionRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

chatCompletionRequest

+
+

The messages to generate chat completions for.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/chat-message.html b/openai-core/com.aallam.openai.api.chat/chat-message.html new file mode 100644 index 00000000..331b2e23 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/chat-message.html @@ -0,0 +1,66 @@ + + + + + chatMessage + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

chatMessage

+
+

The messages to generate chat completions for.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.chat/index.html b/openai-core/com.aallam.openai.api.chat/index.html new file mode 100644 index 00000000..01c86505 --- /dev/null +++ b/openai-core/com.aallam.openai.api.chat/index.html @@ -0,0 +1,378 @@ + + + + + com.aallam.openai.api.chat + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class ChatChoice(val index: Int, val message: ChatMessage, val finishReason: FinishReason)

A completion generated by OpenAI.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class ChatChunk(val index: Int, val delta: ChatDelta, val finishReason: FinishReason?)

A completion generated by OpenAI.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class ChatCompletion(val id: String, val created: Int, val model: ModelId, val choices: List<ChatChoice>, val usage: Usage? = null)

An object containing a response from the chat completion api.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class ChatCompletionChunk(val id: String, val created: Int, val model: ModelId, val choices: List<ChatChunk>, val usage: Usage? = null)

An object containing a response from the chat stream completion api.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class ChatCompletionFunction(val name: String, val description: String? = null, val parameters: Parameters)
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class ChatCompletionRequest(val model: ModelId, val messages: List<ChatMessage>, val temperature: Double? = null, val topP: Double? = null, val n: Int? = null, val stop: List<String>? = null, val maxTokens: Int? = null, val presencePenalty: Double? = null, val frequencyPenalty: Double? = null, val logitBias: Map<String, Int>? = null, val user: String? = null, val functions: List<ChatCompletionFunction>? = null, val functionCall: FunctionMode? = null)

Creates a completion for the chat message.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Creates a completion for the chat message.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class ChatDelta(val role: ChatRole? = null, val content: String? = null, val functionCall: FunctionCall? = null)

Generated chat message.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class ChatMessage(val role: ChatRole, val content: String? = null, val name: String? = null, val functionCall: FunctionCall? = null)

The messages to generate chat completions for.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Builder of ChatMessageBuilder instances.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Creates a list of ChatMessage.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class ChatRole(val role: String)

The role of the author of this message.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class FunctionCall(val nameOrNull: String? = null, val argumentsOrNull: String? = null)

Represents a function invocation with its name and serialized arguments as generated by the model.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable(with = FunctionModeSerializer::class)
interface FunctionMode

This interface determines how the model handles function calls.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
data class Parameters(val schema: JsonElement)

Represents parameters that a function accepts, described as a JSON Schema object.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The function to generate chat completion function instances.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The messages to generate chat completions for.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The messages to generate chat completions for.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-choice/--index--.html b/openai-core/com.aallam.openai.api.completion/-choice/--index--.html new file mode 100644 index 00000000..10c3d77d --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-choice/--index--.html @@ -0,0 +1,66 @@ + + + + + index + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

index

+
+
val index: Int

This index of this completion in the returned list.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-choice/-choice.html b/openai-core/com.aallam.openai.api.completion/-choice/-choice.html new file mode 100644 index 00000000..c565e52a --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-choice/-choice.html @@ -0,0 +1,66 @@ + + + + + Choice + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Choice

+
+
constructor(text: String, index: Int, logprobs: Logprobs? = null, finishReason: FinishReason)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-choice/finish-reason.html b/openai-core/com.aallam.openai.api.completion/-choice/finish-reason.html new file mode 100644 index 00000000..ff82ee55 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-choice/finish-reason.html @@ -0,0 +1,66 @@ + + + + + finishReason + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

finishReason

+
+

The reason the model stopped generating tokens. This will be FinishReason.Stop if the model hit a natural stop point or a provided stop sequence, or FinishReason.Length if the maximum number of tokens specified in the request was reached.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-choice/index.html b/openai-core/com.aallam.openai.api.completion/-choice/index.html new file mode 100644 index 00000000..d7761454 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-choice/index.html @@ -0,0 +1,154 @@ + + + + + Choice + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Choice

+
@Serializable
data class Choice(val text: String, val index: Int, val logprobs: Logprobs? = null, val finishReason: FinishReason)

A completion generated by GPT-3.

documentation

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(text: String, index: Int, logprobs: Logprobs? = null, finishReason: FinishReason)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The reason the model stopped generating tokens. This will be FinishReason.Stop if the model hit a natural stop point or a provided stop sequence, or FinishReason.Length if the maximum number of tokens specified in the request was reached.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val index: Int

This index of this completion in the returned list.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val logprobs: Logprobs? = null

The log probabilities of the chosen tokens and the top CompletionRequest.logprobs tokens.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The generated text. Will include the prompt if CompletionRequest.echo is true

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-choice/logprobs.html b/openai-core/com.aallam.openai.api.completion/-choice/logprobs.html new file mode 100644 index 00000000..5dbf6774 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-choice/logprobs.html @@ -0,0 +1,66 @@ + + + + + logprobs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

logprobs

+
+
val logprobs: Logprobs? = null

Deprecated

removed from the response

The log probabilities of the chosen tokens and the top CompletionRequest.logprobs tokens.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-choice/text.html b/openai-core/com.aallam.openai.api.completion/-choice/text.html new file mode 100644 index 00000000..d7c1c725 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-choice/text.html @@ -0,0 +1,66 @@ + + + + + text + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

text

+
+

The generated text. Will include the prompt if CompletionRequest.echo is true

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/-completion-request-builder.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/-completion-request-builder.html new file mode 100644 index 00000000..2a8363d4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/-completion-request-builder.html @@ -0,0 +1,66 @@ + + + + + CompletionRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

CompletionRequestBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/best-of.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/best-of.html new file mode 100644 index 00000000..1dc9ad05 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/best-of.html @@ -0,0 +1,66 @@ + + + + + bestOf + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

bestOf

+
+
var bestOf: Int?

Generates bestOf completions server-side and returns the "best" (the one with the lowest log probability per token). Results cannot be streamed.

When used with n, bestOf controls the number of candidate completions and n specifies how many to return, bestOf must be greater than n.

Note: Because this parameter generates many completions, it can quickly consume your token quota. Use carefully and ensure that you have reasonable settings for maxTokens and stop.

Defaults to 1

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/build.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/build.html new file mode 100644 index 00000000..21300454 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/echo.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/echo.html new file mode 100644 index 00000000..b21b84c4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/echo.html @@ -0,0 +1,66 @@ + + + + + echo + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

echo

+
+

Echo back the prompt in addition to the completion.

Defaults to false.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/frequency-penalty.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/frequency-penalty.html new file mode 100644 index 00000000..a6e44b95 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/frequency-penalty.html @@ -0,0 +1,66 @@ + + + + + frequencyPenalty + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

frequencyPenalty

+
+

Number between 0 and 1 (default 0) that penalizes new tokens based on their existing frequency in the text so far. Decreases the model's likelihood to repeat the same line verbatim.

Defaults to 0.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/index.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/index.html new file mode 100644 index 00000000..53056c1c --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/index.html @@ -0,0 +1,338 @@ + + + + + CompletionRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

CompletionRequestBuilder

+

Builder of CompletionRequest instances.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
var bestOf: Int?

Generates bestOf completions server-side and returns the "best" (the one with the lowest log probability per token). Results cannot be streamed.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Echo back the prompt in addition to the completion.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Number between 0 and 1 (default 0) that penalizes new tokens based on their existing frequency in the text so far. Decreases the model's likelihood to repeat the same line verbatim.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Modify the likelihood of specified tokens appearing in the completion.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Include the log probabilities on the logprobs most likely tokens, as well the chosen tokens. For example, if logprobs is 10, the API will return a list of the 10 most likely tokens. The API will always return the logprob of the sampled token, so there may be up to logprobs+1 elements in the response.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The maximum number of tokens to generate. Requests can use up to 2048 tokens shared between prompt and completion. (One token is roughly 4 characters for normal English text)

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

ID of the model to use.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var n: Int?

How many completions to generate for each prompt.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Number between 0 and 1 (default 0) that penalizes new tokens based on whether they appear in the text so far. Increases the model's likelihood to talk about new topics.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The prompt(s) to generate completions for, encoded as a string, a list of strings, or a list of token lists.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The suffix that comes after a completion of inserted text.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var topP: Double?

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var user: String?

A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/logit-bias.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/logit-bias.html new file mode 100644 index 00000000..57a2d4de --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/logit-bias.html @@ -0,0 +1,66 @@ + + + + + logitBias + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

logitBias

+
+

Modify the likelihood of specified tokens appearing in the completion.

Accepts a json object that maps tokens (specified by their token ID in the GPT tokenizer) to an associated bias` value from -100 to 100. You can use this tokenizer tool (which works for both GPT-2 and GPT-3) to convert text to token IDs. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token.

As an example, you can pass {"50256": -100} to prevent the `<|endoftext|> token from being generated.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/logprobs.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/logprobs.html new file mode 100644 index 00000000..e7997433 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/logprobs.html @@ -0,0 +1,66 @@ + + + + + logprobs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

logprobs

+
+

Include the log probabilities on the logprobs most likely tokens, as well the chosen tokens. For example, if logprobs is 10, the API will return a list of the 10 most likely tokens. The API will always return the logprob of the sampled token, so there may be up to logprobs+1 elements in the response.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/max-tokens.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/max-tokens.html new file mode 100644 index 00000000..11bc0da1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/max-tokens.html @@ -0,0 +1,66 @@ + + + + + maxTokens + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

maxTokens

+
+

The maximum number of tokens to generate. Requests can use up to 2048 tokens shared between prompt and completion. (One token is roughly 4 characters for normal English text)

Defaults to 16.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/model.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/model.html new file mode 100644 index 00000000..b7225057 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

ID of the model to use.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/n.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/n.html new file mode 100644 index 00000000..6118744e --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/n.html @@ -0,0 +1,66 @@ + + + + + n + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

n

+
+
var n: Int?

How many completions to generate for each prompt.

Note: Because this parameter generates many completions, it can quickly consume your token quota. Use carefully and ensure that you have reasonable settings for maxTokens and stop.

Defaults to 1.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/presence-penalty.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/presence-penalty.html new file mode 100644 index 00000000..d67f306d --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/presence-penalty.html @@ -0,0 +1,66 @@ + + + + + presencePenalty + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

presencePenalty

+
+

Number between 0 and 1 (default 0) that penalizes new tokens based on whether they appear in the text so far. Increases the model's likelihood to talk about new topics.

Defaults to 0.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/prompt.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/prompt.html new file mode 100644 index 00000000..5b676967 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/prompt.html @@ -0,0 +1,66 @@ + + + + + prompt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

prompt

+
+

The prompt(s) to generate completions for, encoded as a string, a list of strings, or a list of token lists.

Note that <|endoftext|> is the document separator that the model sees during training, so if a prompt is not specified the model will generate as if from the beginning of a new document.

Defaults to <|endoftext|>.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/stop.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/stop.html new file mode 100644 index 00000000..4437127c --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/stop.html @@ -0,0 +1,66 @@ + + + + + stop + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

stop

+
+

Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/suffix.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/suffix.html new file mode 100644 index 00000000..1f6f83ed --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/suffix.html @@ -0,0 +1,66 @@ + + + + + suffix + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

suffix

+
+

The suffix that comes after a completion of inserted text.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/temperature.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/temperature.html new file mode 100644 index 00000000..ef9a3ed9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/temperature.html @@ -0,0 +1,66 @@ + + + + + temperature + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

temperature

+
+

What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

We generally recommend using this or topP but not both.

Defaults to 1.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/top-p.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/top-p.html new file mode 100644 index 00000000..e4330fe9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/top-p.html @@ -0,0 +1,66 @@ + + + + + topP + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

topP

+
+
var topP: Double?

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

We generally recommend using this or temperature but not both.

Defaults to 1.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request-builder/user.html b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/user.html new file mode 100644 index 00000000..9341e67c --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request-builder/user.html @@ -0,0 +1,66 @@ + + + + + user + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

user

+
+
var user: String?

A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/-completion-request.html b/openai-core/com.aallam.openai.api.completion/-completion-request/-completion-request.html new file mode 100644 index 00000000..1f7e74c6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/-completion-request.html @@ -0,0 +1,66 @@ + + + + + CompletionRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

CompletionRequest

+
+
constructor(model: ModelId, prompt: String? = null, maxTokens: Int? = null, temperature: Double? = null, topP: Double? = null, n: Int? = null, logprobs: Int? = null, echo: Boolean? = null, stop: List<String>? = null, presencePenalty: Double? = null, frequencyPenalty: Double? = null, bestOf: Int? = null, logitBias: Map<String, Int>? = null, user: String? = null, suffix: String? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/best-of.html b/openai-core/com.aallam.openai.api.completion/-completion-request/best-of.html new file mode 100644 index 00000000..87ace2d4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/best-of.html @@ -0,0 +1,66 @@ + + + + + bestOf + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

bestOf

+
+
val bestOf: Int? = null

Generates bestOf completions server-side and returns the "best" (the one with the lowest log probability per token). Results cannot be streamed.

When used with n, bestOf controls the number of candidate completions and n specifies how many to return, bestOf must be greater than n.

Note: Because this parameter generates many completions, it can quickly consume your token quota. Use carefully and ensure that you have reasonable settings for maxTokens and stop.

Defaults to 1

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/echo.html b/openai-core/com.aallam.openai.api.completion/-completion-request/echo.html new file mode 100644 index 00000000..dea3f44d --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/echo.html @@ -0,0 +1,66 @@ + + + + + echo + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

echo

+
+
val echo: Boolean? = null

Echo back the prompt in addition to the completion.

Defaults to false.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/frequency-penalty.html b/openai-core/com.aallam.openai.api.completion/-completion-request/frequency-penalty.html new file mode 100644 index 00000000..c854929f --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/frequency-penalty.html @@ -0,0 +1,66 @@ + + + + + frequencyPenalty + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

frequencyPenalty

+
+

Number between 0 and 1 (default 0) that penalizes new tokens based on their existing frequency in the text so far. Decreases the model's likelihood to repeat the same line verbatim.

Defaults to 0.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/index.html b/openai-core/com.aallam.openai.api.completion/-completion-request/index.html new file mode 100644 index 00000000..0bb3c35c --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/index.html @@ -0,0 +1,319 @@ + + + + + CompletionRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

CompletionRequest

+
@Serializable
class CompletionRequest(val model: ModelId, val prompt: String? = null, val maxTokens: Int? = null, val temperature: Double? = null, val topP: Double? = null, val n: Int? = null, val logprobs: Int? = null, val echo: Boolean? = null, val stop: List<String>? = null, val presencePenalty: Double? = null, val frequencyPenalty: Double? = null, val bestOf: Int? = null, val logitBias: Map<String, Int>? = null, val user: String? = null, val suffix: String? = null)

A request for OpenAI to generate a predicted completion for a prompt. All fields are Optional.

documentation

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(model: ModelId, prompt: String? = null, maxTokens: Int? = null, temperature: Double? = null, topP: Double? = null, n: Int? = null, logprobs: Int? = null, echo: Boolean? = null, stop: List<String>? = null, presencePenalty: Double? = null, frequencyPenalty: Double? = null, bestOf: Int? = null, logitBias: Map<String, Int>? = null, user: String? = null, suffix: String? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val bestOf: Int? = null

Generates bestOf completions server-side and returns the "best" (the one with the lowest log probability per token). Results cannot be streamed.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val echo: Boolean? = null

Echo back the prompt in addition to the completion.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Number between 0 and 1 (default 0) that penalizes new tokens based on their existing frequency in the text so far. Decreases the model's likelihood to repeat the same line verbatim.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val logitBias: Map<String, Int>? = null

Modify the likelihood of specified tokens appearing in the completion.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val logprobs: Int? = null

Include the log probabilities on the logprobs most likely tokens, as well the chosen tokens. For example, if logprobs is 10, the API will return a list of the 10 most likely tokens. The API will always return the logprob of the sampled token, so there may be up to logprobs+1 elements in the response.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val maxTokens: Int? = null

The maximum number of tokens to generate. Requests can use up to 2048 tokens shared between prompt and completion. (One token is roughly 4 characters for normal English text)

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

ID of the model to use.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val n: Int? = null

How many completions to generate for each prompt.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val presencePenalty: Double? = null

Number between 0 and 1 (default 0) that penalizes new tokens based on whether they appear in the text so far. Increases the model's likelihood to talk about new topics.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val prompt: String? = null

The prompt(s) to generate completions for, encoded as a string, a list of strings, or a list of token lists.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val stop: List<String>? = null

Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val suffix: String? = null

The suffix that comes after a completion of inserted text.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val temperature: Double? = null

What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val topP: Double? = null

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val user: String? = null

A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/logit-bias.html b/openai-core/com.aallam.openai.api.completion/-completion-request/logit-bias.html new file mode 100644 index 00000000..811064d8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/logit-bias.html @@ -0,0 +1,66 @@ + + + + + logitBias + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

logitBias

+
+
val logitBias: Map<String, Int>? = null

Modify the likelihood of specified tokens appearing in the completion.

Accepts a json object that maps tokens (specified by their token ID in the GPT tokenizer) to an associated bias` value from -100 to 100. You can use this tokenizer tool (which works for both GPT-2 and GPT-3) to convert text to token IDs. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token.

As an example, you can pass {"50256": -100} to prevent the `<|endoftext|> token from being generated.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/logprobs.html b/openai-core/com.aallam.openai.api.completion/-completion-request/logprobs.html new file mode 100644 index 00000000..030da1a4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/logprobs.html @@ -0,0 +1,66 @@ + + + + + logprobs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

logprobs

+
+
val logprobs: Int? = null

Include the log probabilities on the logprobs most likely tokens, as well the chosen tokens. For example, if logprobs is 10, the API will return a list of the 10 most likely tokens. The API will always return the logprob of the sampled token, so there may be up to logprobs+1 elements in the response.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/max-tokens.html b/openai-core/com.aallam.openai.api.completion/-completion-request/max-tokens.html new file mode 100644 index 00000000..4574d867 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/max-tokens.html @@ -0,0 +1,66 @@ + + + + + maxTokens + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

maxTokens

+
+
val maxTokens: Int? = null

The maximum number of tokens to generate. Requests can use up to 2048 tokens shared between prompt and completion. (One token is roughly 4 characters for normal English text)

Defaults to 16.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/model.html b/openai-core/com.aallam.openai.api.completion/-completion-request/model.html new file mode 100644 index 00000000..103e9608 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

ID of the model to use.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/n.html b/openai-core/com.aallam.openai.api.completion/-completion-request/n.html new file mode 100644 index 00000000..9c881d6e --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/n.html @@ -0,0 +1,66 @@ + + + + + n + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

n

+
+
val n: Int? = null

How many completions to generate for each prompt.

Note: Because this parameter generates many completions, it can quickly consume your token quota. Use carefully and ensure that you have reasonable settings for maxTokens and stop.

Defaults to 1.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/presence-penalty.html b/openai-core/com.aallam.openai.api.completion/-completion-request/presence-penalty.html new file mode 100644 index 00000000..4e6f2d1a --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/presence-penalty.html @@ -0,0 +1,66 @@ + + + + + presencePenalty + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

presencePenalty

+
+
val presencePenalty: Double? = null

Number between 0 and 1 (default 0) that penalizes new tokens based on whether they appear in the text so far. Increases the model's likelihood to talk about new topics.

Defaults to 0.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/prompt.html b/openai-core/com.aallam.openai.api.completion/-completion-request/prompt.html new file mode 100644 index 00000000..ce3ed8a0 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/prompt.html @@ -0,0 +1,66 @@ + + + + + prompt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

prompt

+
+
val prompt: String? = null

The prompt(s) to generate completions for, encoded as a string, a list of strings, or a list of token lists.

Note that <|endoftext|> is the document separator that the model sees during training, so if a prompt is not specified the model will generate as if from the beginning of a new document.

Defaults to <|endoftext|>.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/stop.html b/openai-core/com.aallam.openai.api.completion/-completion-request/stop.html new file mode 100644 index 00000000..9f50b824 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/stop.html @@ -0,0 +1,66 @@ + + + + + stop + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

stop

+
+
val stop: List<String>? = null

Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/suffix.html b/openai-core/com.aallam.openai.api.completion/-completion-request/suffix.html new file mode 100644 index 00000000..f17c1698 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/suffix.html @@ -0,0 +1,66 @@ + + + + + suffix + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

suffix

+
+
val suffix: String? = null

The suffix that comes after a completion of inserted text.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/temperature.html b/openai-core/com.aallam.openai.api.completion/-completion-request/temperature.html new file mode 100644 index 00000000..a4c9ca32 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/temperature.html @@ -0,0 +1,66 @@ + + + + + temperature + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

temperature

+
+
val temperature: Double? = null

What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

We generally recommend using this or topP but not both.

Defaults to 1.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/top-p.html b/openai-core/com.aallam.openai.api.completion/-completion-request/top-p.html new file mode 100644 index 00000000..06e7edf9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/top-p.html @@ -0,0 +1,66 @@ + + + + + topP + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

topP

+
+
val topP: Double? = null

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

We generally recommend using this or temperature but not both.

Defaults to 1.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-completion-request/user.html b/openai-core/com.aallam.openai.api.completion/-completion-request/user.html new file mode 100644 index 00000000..ae7b5d57 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-completion-request/user.html @@ -0,0 +1,66 @@ + + + + + user + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

user

+
+
val user: String? = null

A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-logprobs/-logprobs.html b/openai-core/com.aallam.openai.api.completion/-logprobs/-logprobs.html new file mode 100644 index 00000000..93f36364 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-logprobs/-logprobs.html @@ -0,0 +1,66 @@ + + + + + Logprobs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Logprobs

+
+
constructor(tokens: List<String>, tokenLogprobs: List<Double>, topLogprobs: List<Map<String, Double>>, textOffset: List<Int>)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-logprobs/index.html b/openai-core/com.aallam.openai.api.completion/-logprobs/index.html new file mode 100644 index 00000000..da6ec738 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-logprobs/index.html @@ -0,0 +1,154 @@ + + + + + Logprobs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Logprobs

+
@Serializable
data class Logprobs(val tokens: List<String>, val tokenLogprobs: List<Double>, val topLogprobs: List<Map<String, Double>>, val textOffset: List<Int>)

Log probabilities of different token options? Returned if CompletionRequest.logprobs is greater than zero.

documentation

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(tokens: List<String>, tokenLogprobs: List<Double>, topLogprobs: List<Map<String, Double>>, textOffset: List<Int>)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The character offset from the start of the returned text for each of the chosen tokens.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The log probability of each token in tokens

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The tokens chosen by the completion api

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

A map for each index in the completion result. The map contains the top CompletionRequest.logprobs tokens and their probabilities

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-logprobs/text-offset.html b/openai-core/com.aallam.openai.api.completion/-logprobs/text-offset.html new file mode 100644 index 00000000..42ea3c9b --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-logprobs/text-offset.html @@ -0,0 +1,66 @@ + + + + + textOffset + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

textOffset

+
+

The character offset from the start of the returned text for each of the chosen tokens.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-logprobs/token-logprobs.html b/openai-core/com.aallam.openai.api.completion/-logprobs/token-logprobs.html new file mode 100644 index 00000000..d5506848 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-logprobs/token-logprobs.html @@ -0,0 +1,66 @@ + + + + + tokenLogprobs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

tokenLogprobs

+
+

The log probability of each token in tokens

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-logprobs/tokens.html b/openai-core/com.aallam.openai.api.completion/-logprobs/tokens.html new file mode 100644 index 00000000..a55449f4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-logprobs/tokens.html @@ -0,0 +1,66 @@ + + + + + tokens + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

tokens

+
+

The tokens chosen by the completion api

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-logprobs/top-logprobs.html b/openai-core/com.aallam.openai.api.completion/-logprobs/top-logprobs.html new file mode 100644 index 00000000..40a8931d --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-logprobs/top-logprobs.html @@ -0,0 +1,66 @@ + + + + + topLogprobs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

topLogprobs

+
+

A map for each index in the completion result. The map contains the top CompletionRequest.logprobs tokens and their probabilities

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-text-completion/-text-completion.html b/openai-core/com.aallam.openai.api.completion/-text-completion/-text-completion.html new file mode 100644 index 00000000..b96402e4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-text-completion/-text-completion.html @@ -0,0 +1,66 @@ + + + + + TextCompletion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

TextCompletion

+
+
constructor(id: String, created: Long, model: ModelId, choices: List<Choice>, usage: Usage? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-text-completion/choices.html b/openai-core/com.aallam.openai.api.completion/-text-completion/choices.html new file mode 100644 index 00000000..af7beaa8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-text-completion/choices.html @@ -0,0 +1,66 @@ + + + + + choices + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

choices

+
+

A list of generated completions

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-text-completion/created.html b/openai-core/com.aallam.openai.api.completion/-text-completion/created.html new file mode 100644 index 00000000..45834772 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-text-completion/created.html @@ -0,0 +1,66 @@ + + + + + created + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

created

+
+

The creation time in epoch milliseconds.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-text-completion/id.html b/openai-core/com.aallam.openai.api.completion/-text-completion/id.html new file mode 100644 index 00000000..8aa56484 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-text-completion/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: String

A unique id assigned to this completion

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-text-completion/index.html b/openai-core/com.aallam.openai.api.completion/-text-completion/index.html new file mode 100644 index 00000000..889293e5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-text-completion/index.html @@ -0,0 +1,169 @@ + + + + + TextCompletion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

TextCompletion

+
@Serializable
data class TextCompletion(val id: String, val created: Long, val model: ModelId, val choices: List<Choice>, val usage: Usage? = null)

An object containing a response from the completion api.

documentation

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: String, created: Long, model: ModelId, choices: List<Choice>, usage: Usage? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

A list of generated completions

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The creation time in epoch milliseconds.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: String

A unique id assigned to this completion

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The GPT-3 model used

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val usage: Usage? = null

Text completion usage data.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-text-completion/model.html b/openai-core/com.aallam.openai.api.completion/-text-completion/model.html new file mode 100644 index 00000000..e4bfc23a --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-text-completion/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

The GPT-3 model used

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/-text-completion/usage.html b/openai-core/com.aallam.openai.api.completion/-text-completion/usage.html new file mode 100644 index 00000000..03fb5be9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/-text-completion/usage.html @@ -0,0 +1,66 @@ + + + + + usage + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

usage

+
+
val usage: Usage? = null

Text completion usage data.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/completion-request.html b/openai-core/com.aallam.openai.api.completion/completion-request.html new file mode 100644 index 00000000..c27dcfae --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/completion-request.html @@ -0,0 +1,66 @@ + + + + + completionRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

completionRequest

+
+

A request for OpenAI to generate a predicted completion for a prompt. All fields are Optional.

documentation

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.completion/index.html b/openai-core/com.aallam.openai.api.completion/index.html new file mode 100644 index 00000000..948dc94b --- /dev/null +++ b/openai-core/com.aallam.openai.api.completion/index.html @@ -0,0 +1,168 @@ + + + + + com.aallam.openai.api.completion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class Choice(val text: String, val index: Int, val logprobs: Logprobs? = null, val finishReason: FinishReason)

A completion generated by GPT-3.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class CompletionRequest(val model: ModelId, val prompt: String? = null, val maxTokens: Int? = null, val temperature: Double? = null, val topP: Double? = null, val n: Int? = null, val logprobs: Int? = null, val echo: Boolean? = null, val stop: List<String>? = null, val presencePenalty: Double? = null, val frequencyPenalty: Double? = null, val bestOf: Int? = null, val logitBias: Map<String, Int>? = null, val user: String? = null, val suffix: String? = null)

A request for OpenAI to generate a predicted completion for a prompt. All fields are Optional.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Builder of CompletionRequest instances.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class Logprobs(val tokens: List<String>, val tokenLogprobs: List<Double>, val topLogprobs: List<Map<String, Double>>, val textOffset: List<Int>)

Log probabilities of different token options? Returned if CompletionRequest.logprobs is greater than zero.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class TextCompletion(val id: String, val created: Long, val model: ModelId, val choices: List<Choice>, val usage: Usage? = null)

An object containing a response from the completion api.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

A request for OpenAI to generate a predicted completion for a prompt. All fields are Optional.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-delete-response/-delete-response.html b/openai-core/com.aallam.openai.api.core/-delete-response/-delete-response.html new file mode 100644 index 00000000..ba66c5e9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-delete-response/-delete-response.html @@ -0,0 +1,66 @@ + + + + + DeleteResponse + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

DeleteResponse

+
+
constructor(id: String, objectType: String, deleted: Boolean)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-delete-response/deleted.html b/openai-core/com.aallam.openai.api.core/-delete-response/deleted.html new file mode 100644 index 00000000..fe23bde3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-delete-response/deleted.html @@ -0,0 +1,66 @@ + + + + + deleted + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

deleted

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-delete-response/id.html b/openai-core/com.aallam.openai.api.core/-delete-response/id.html new file mode 100644 index 00000000..8085d93f --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-delete-response/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: String
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-delete-response/index.html b/openai-core/com.aallam.openai.api.core/-delete-response/index.html new file mode 100644 index 00000000..e9ea8f25 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-delete-response/index.html @@ -0,0 +1,139 @@ + + + + + DeleteResponse + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

DeleteResponse

+
@Serializable
class DeleteResponse(val id: String, val objectType: String, val deleted: Boolean)

Delete operation response.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: String, objectType: String, deleted: Boolean)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: String
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-delete-response/object-type.html b/openai-core/com.aallam.openai.api.core/-delete-response/object-type.html new file mode 100644 index 00000000..021b4240 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-delete-response/object-type.html @@ -0,0 +1,66 @@ + + + + + objectType + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

objectType

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-finish-reason/-companion/-function-call.html b/openai-core/com.aallam.openai.api.core/-finish-reason/-companion/-function-call.html new file mode 100644 index 00000000..821df286 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-finish-reason/-companion/-function-call.html @@ -0,0 +1,66 @@ + + + + + FunctionCall + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FunctionCall

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-finish-reason/-companion/-length.html b/openai-core/com.aallam.openai.api.core/-finish-reason/-companion/-length.html new file mode 100644 index 00000000..5cf79f7c --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-finish-reason/-companion/-length.html @@ -0,0 +1,66 @@ + + + + + Length + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Length

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-finish-reason/-companion/-stop.html b/openai-core/com.aallam.openai.api.core/-finish-reason/-companion/-stop.html new file mode 100644 index 00000000..01e2398d --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-finish-reason/-companion/-stop.html @@ -0,0 +1,66 @@ + + + + + Stop + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Stop

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-finish-reason/-companion/index.html b/openai-core/com.aallam.openai.api.core/-finish-reason/-companion/index.html new file mode 100644 index 00000000..264c976f --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-finish-reason/-companion/index.html @@ -0,0 +1,120 @@ + + + + + Companion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Companion

+
object Companion
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-finish-reason/-finish-reason.html b/openai-core/com.aallam.openai.api.core/-finish-reason/-finish-reason.html new file mode 100644 index 00000000..1cd01185 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-finish-reason/-finish-reason.html @@ -0,0 +1,66 @@ + + + + + FinishReason + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FinishReason

+
+
constructor(value: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-finish-reason/index.html b/openai-core/com.aallam.openai.api.core/-finish-reason/index.html new file mode 100644 index 00000000..3d716709 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-finish-reason/index.html @@ -0,0 +1,128 @@ + + + + + FinishReason + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FinishReason

+
@Serializable
value class FinishReason(val value: String)
+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(value: String)
+
+
+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
object Companion
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-finish-reason/value.html b/openai-core/com.aallam.openai.api.core/-finish-reason/value.html new file mode 100644 index 00000000..a6312cc2 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-finish-reason/value.html @@ -0,0 +1,66 @@ + + + + + value + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

value

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-list-response/-list-response.html b/openai-core/com.aallam.openai.api.core/-list-response/-list-response.html new file mode 100644 index 00000000..13a02ce3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-list-response/-list-response.html @@ -0,0 +1,66 @@ + + + + + ListResponse + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ListResponse

+
+
constructor(data: List<T>, usage: Usage? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-list-response/data.html b/openai-core/com.aallam.openai.api.core/-list-response/data.html new file mode 100644 index 00000000..df835277 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-list-response/data.html @@ -0,0 +1,66 @@ + + + + + data + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

data

+
+
val data: List<T>

List containing the actual results.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-list-response/index.html b/openai-core/com.aallam.openai.api.core/-list-response/index.html new file mode 100644 index 00000000..2814f618 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-list-response/index.html @@ -0,0 +1,124 @@ + + + + + ListResponse + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ListResponse

+
@Serializable
class ListResponse<T>(val data: List<T>, val usage: Usage? = null)

Response as List of T.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(data: List<T>, usage: Usage? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val data: List<T>

List containing the actual results.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val usage: Usage? = null

Embedding usage data.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-list-response/usage.html b/openai-core/com.aallam.openai.api.core/-list-response/usage.html new file mode 100644 index 00000000..fc5cb6a1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-list-response/usage.html @@ -0,0 +1,66 @@ + + + + + usage + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

usage

+
+
val usage: Usage? = null

Embedding usage data.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-organization-id/-organization-id.html b/openai-core/com.aallam.openai.api.core/-organization-id/-organization-id.html new file mode 100644 index 00000000..814d05bc --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-organization-id/-organization-id.html @@ -0,0 +1,66 @@ + + + + + OrganizationId + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OrganizationId

+
+
constructor(id: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-organization-id/id.html b/openai-core/com.aallam.openai.api.core/-organization-id/id.html new file mode 100644 index 00000000..1ea7d1c1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-organization-id/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: String
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-organization-id/index.html b/openai-core/com.aallam.openai.api.core/-organization-id/index.html new file mode 100644 index 00000000..11233048 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-organization-id/index.html @@ -0,0 +1,109 @@ + + + + + OrganizationId + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OrganizationId

+
@Serializable
value class OrganizationId(val id: String)

Organization identifier.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: String
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-paginated-list/-paginated-list.html b/openai-core/com.aallam.openai.api.core/-paginated-list/-paginated-list.html new file mode 100644 index 00000000..ab9b2d9f --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-paginated-list/-paginated-list.html @@ -0,0 +1,66 @@ + + + + + PaginatedList + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

PaginatedList

+
+
constructor(data: List<T>, hasMore: Boolean)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-paginated-list/data.html b/openai-core/com.aallam.openai.api.core/-paginated-list/data.html new file mode 100644 index 00000000..9962f75c --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-paginated-list/data.html @@ -0,0 +1,66 @@ + + + + + data + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

data

+
+
val data: List<T>
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-paginated-list/has-more.html b/openai-core/com.aallam.openai.api.core/-paginated-list/has-more.html new file mode 100644 index 00000000..c0418b60 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-paginated-list/has-more.html @@ -0,0 +1,66 @@ + + + + + hasMore + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

hasMore

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-paginated-list/index.html b/openai-core/com.aallam.openai.api.core/-paginated-list/index.html new file mode 100644 index 00000000..339f5def --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-paginated-list/index.html @@ -0,0 +1,278 @@ + + + + + PaginatedList + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

PaginatedList

+
@Serializable
data class PaginatedList<T>(val data: List<T>, val hasMore: Boolean) : List<T>
+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(data: List<T>, hasMore: Boolean)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val data: List<T>
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open override val size: Int
+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open operator override fun contains(element: T): Boolean
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open override fun containsAll(elements: Collection<T>): Boolean
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open operator override fun get(index: Int): T
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open override fun indexOf(element: T): Int
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open override fun isEmpty(): Boolean
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open operator override fun iterator(): Iterator<T>
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open override fun lastIndexOf(element: T): Int
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open override fun listIterator(): ListIterator<T>
open override fun listIterator(index: Int): ListIterator<T>
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open override fun subList(fromIndex: Int, toIndex: Int): List<T>
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-status/-companion/-cancelled.html b/openai-core/com.aallam.openai.api.core/-status/-companion/-cancelled.html new file mode 100644 index 00000000..939c1ff2 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-status/-companion/-cancelled.html @@ -0,0 +1,66 @@ + + + + + Cancelled + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Cancelled

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-status/-companion/-deleted.html b/openai-core/com.aallam.openai.api.core/-status/-companion/-deleted.html new file mode 100644 index 00000000..5ff69a15 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-status/-companion/-deleted.html @@ -0,0 +1,66 @@ + + + + + Deleted + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Deleted

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-status/-companion/-failed.html b/openai-core/com.aallam.openai.api.core/-status/-companion/-failed.html new file mode 100644 index 00000000..e0ba4985 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-status/-companion/-failed.html @@ -0,0 +1,66 @@ + + + + + Failed + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Failed

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-status/-companion/-processed.html b/openai-core/com.aallam.openai.api.core/-status/-companion/-processed.html new file mode 100644 index 00000000..89dd110c --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-status/-companion/-processed.html @@ -0,0 +1,66 @@ + + + + + Processed + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Processed

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-status/-companion/-queued.html b/openai-core/com.aallam.openai.api.core/-status/-companion/-queued.html new file mode 100644 index 00000000..0e5ac00e --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-status/-companion/-queued.html @@ -0,0 +1,66 @@ + + + + + Queued + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Queued

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-status/-companion/-running.html b/openai-core/com.aallam.openai.api.core/-status/-companion/-running.html new file mode 100644 index 00000000..a63a67bb --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-status/-companion/-running.html @@ -0,0 +1,66 @@ + + + + + Running + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Running

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-status/-companion/-succeeded.html b/openai-core/com.aallam.openai.api.core/-status/-companion/-succeeded.html new file mode 100644 index 00000000..330c0b1b --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-status/-companion/-succeeded.html @@ -0,0 +1,66 @@ + + + + + Succeeded + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Succeeded

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-status/-companion/-validating-files.html b/openai-core/com.aallam.openai.api.core/-status/-companion/-validating-files.html new file mode 100644 index 00000000..e57438fc --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-status/-companion/-validating-files.html @@ -0,0 +1,66 @@ + + + + + ValidatingFiles + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ValidatingFiles

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-status/-companion/index.html b/openai-core/com.aallam.openai.api.core/-status/-companion/index.html new file mode 100644 index 00000000..6db15531 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-status/-companion/index.html @@ -0,0 +1,195 @@ + + + + + Companion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Companion

+
object Companion
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-status/-status.html b/openai-core/com.aallam.openai.api.core/-status/-status.html new file mode 100644 index 00000000..e8eda886 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-status/-status.html @@ -0,0 +1,66 @@ + + + + + Status + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Status

+
+
constructor(value: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-status/index.html b/openai-core/com.aallam.openai.api.core/-status/index.html new file mode 100644 index 00000000..de7cf312 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-status/index.html @@ -0,0 +1,128 @@ + + + + + Status + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Status

+
@Serializable
value class Status(val value: String)

File status.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(value: String)
+
+
+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
object Companion
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-status/value.html b/openai-core/com.aallam.openai.api.core/-status/value.html new file mode 100644 index 00000000..07ae17ad --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-status/value.html @@ -0,0 +1,66 @@ + + + + + value + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

value

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-usage/-usage.html b/openai-core/com.aallam.openai.api.core/-usage/-usage.html new file mode 100644 index 00000000..3da56c53 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-usage/-usage.html @@ -0,0 +1,66 @@ + + + + + Usage + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Usage

+
+
constructor(promptTokens: Int? = null, completionTokens: Int? = null, totalTokens: Int? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-usage/completion-tokens.html b/openai-core/com.aallam.openai.api.core/-usage/completion-tokens.html new file mode 100644 index 00000000..84963090 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-usage/completion-tokens.html @@ -0,0 +1,66 @@ + + + + + completionTokens + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

completionTokens

+
+
val completionTokens: Int? = null

Count of completion tokens.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-usage/index.html b/openai-core/com.aallam.openai.api.core/-usage/index.html new file mode 100644 index 00000000..c6ddf0e1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-usage/index.html @@ -0,0 +1,139 @@ + + + + + Usage + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Usage

+
@Serializable
data class Usage(val promptTokens: Int? = null, val completionTokens: Int? = null, val totalTokens: Int? = null)
+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(promptTokens: Int? = null, completionTokens: Int? = null, totalTokens: Int? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val completionTokens: Int? = null

Count of completion tokens.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val promptTokens: Int? = null

Count of prompts tokens.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val totalTokens: Int? = null

Count of total tokens.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-usage/prompt-tokens.html b/openai-core/com.aallam.openai.api.core/-usage/prompt-tokens.html new file mode 100644 index 00000000..6dc43e0e --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-usage/prompt-tokens.html @@ -0,0 +1,66 @@ + + + + + promptTokens + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

promptTokens

+
+
val promptTokens: Int? = null

Count of prompts tokens.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/-usage/total-tokens.html b/openai-core/com.aallam.openai.api.core/-usage/total-tokens.html new file mode 100644 index 00000000..5c008499 --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/-usage/total-tokens.html @@ -0,0 +1,66 @@ + + + + + totalTokens + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

totalTokens

+
+
val totalTokens: Int? = null

Count of total tokens.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.core/index.html b/openai-core/com.aallam.openai.api.core/index.html new file mode 100644 index 00000000..276c1a7a --- /dev/null +++ b/openai-core/com.aallam.openai.api.core/index.html @@ -0,0 +1,179 @@ + + + + + com.aallam.openai.api.core + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class DeleteResponse(val id: String, val objectType: String, val deleted: Boolean)

Delete operation response.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class FinishReason(val value: String)
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class ListResponse<T>(val data: List<T>, val usage: Usage? = null)

Response as List of T.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class OrganizationId(val id: String)

Organization identifier.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class PaginatedList<T>(val data: List<T>, val hasMore: Boolean) : List<T>
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class Status(val value: String)

File status.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class Usage(val promptTokens: Int? = null, val completionTokens: Int? = null, val totalTokens: Int? = null)
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edit/-edit.html b/openai-core/com.aallam.openai.api.edits/-edit/-edit.html new file mode 100644 index 00000000..f15b05a5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edit/-edit.html @@ -0,0 +1,66 @@ + + + + + Edit + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Edit

+
+
constructor(created: Long, choices: List<Choice>, usage: Usage)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edit/choices.html b/openai-core/com.aallam.openai.api.edits/-edit/choices.html new file mode 100644 index 00000000..3c5bb41b --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edit/choices.html @@ -0,0 +1,66 @@ + + + + + choices + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

choices

+
+

A list of generated completions.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edit/created.html b/openai-core/com.aallam.openai.api.edits/-edit/created.html new file mode 100644 index 00000000..dc1941be --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edit/created.html @@ -0,0 +1,66 @@ + + + + + created + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

created

+
+

The creation time in epoch milliseconds.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edit/index.html b/openai-core/com.aallam.openai.api.edits/-edit/index.html new file mode 100644 index 00000000..3401de56 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edit/index.html @@ -0,0 +1,139 @@ + + + + + Edit + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Edit

+
@Serializable
class Edit(val created: Long, val choices: List<Choice>, val usage: Usage)

Response to the edit creation request.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(created: Long, choices: List<Choice>, usage: Usage)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

A list of generated completions.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The creation time in epoch milliseconds.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Edit usage data.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edit/usage.html b/openai-core/com.aallam.openai.api.edits/-edit/usage.html new file mode 100644 index 00000000..82dad358 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edit/usage.html @@ -0,0 +1,66 @@ + + + + + usage + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

usage

+
+

Edit usage data.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request-builder/-edits-request-builder.html b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/-edits-request-builder.html new file mode 100644 index 00000000..5e623086 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/-edits-request-builder.html @@ -0,0 +1,66 @@ + + + + + EditsRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

EditsRequestBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request-builder/build.html b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/build.html new file mode 100644 index 00000000..043d0d1f --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+

Create EditsRequest instance.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request-builder/index.html b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/index.html new file mode 100644 index 00000000..482e6812 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/index.html @@ -0,0 +1,188 @@ + + + + + EditsRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

EditsRequestBuilder

+

Builder of EditsRequest instances.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The input text to use as a starting point for the edit.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The instruction that tells the model how to edit the prompt.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

ID of the model to use.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var topP: Double?

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Create EditsRequest instance.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request-builder/input.html b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/input.html new file mode 100644 index 00000000..bc5b6aa4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/input.html @@ -0,0 +1,66 @@ + + + + + input + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

input

+
+

The input text to use as a starting point for the edit.

Defaults to ''.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request-builder/instruction.html b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/instruction.html new file mode 100644 index 00000000..961aa9e2 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/instruction.html @@ -0,0 +1,66 @@ + + + + + instruction + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

instruction

+
+

The instruction that tells the model how to edit the prompt.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request-builder/model.html b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/model.html new file mode 100644 index 00000000..1261b2ba --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

ID of the model to use.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request-builder/temperature.html b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/temperature.html new file mode 100644 index 00000000..8a3f9db9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/temperature.html @@ -0,0 +1,66 @@ + + + + + temperature + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

temperature

+
+

What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

We generally recommend altering this or top_p but not both.

Defaults to 1.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request-builder/top-p.html b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/top-p.html new file mode 100644 index 00000000..dd28b3ff --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request-builder/top-p.html @@ -0,0 +1,66 @@ + + + + + topP + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

topP

+
+
var topP: Double?

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

We generally recommend altering this or temperature but not both.

Defaults to 1.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request/-edits-request.html b/openai-core/com.aallam.openai.api.edits/-edits-request/-edits-request.html new file mode 100644 index 00000000..716e5bb8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request/-edits-request.html @@ -0,0 +1,66 @@ + + + + + EditsRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

EditsRequest

+
+
constructor(model: ModelId, instruction: String, input: String? = null, temperature: Double? = null, topP: Double? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request/index.html b/openai-core/com.aallam.openai.api.edits/-edits-request/index.html new file mode 100644 index 00000000..dbacc1fc --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request/index.html @@ -0,0 +1,169 @@ + + + + + EditsRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

EditsRequest

+
@Serializable
class EditsRequest(val model: ModelId, val instruction: String, val input: String? = null, val temperature: Double? = null, val topP: Double? = null)

A request for OpenAI to creates a new edit for the provided input, instruction, and parameters.

documentation

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(model: ModelId, instruction: String, input: String? = null, temperature: Double? = null, topP: Double? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val input: String? = null

The input text to use as a starting point for the edit.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The instruction that tells the model how to edit the prompt.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

ID of the model to use.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val temperature: Double? = null

What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val topP: Double? = null

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request/input.html b/openai-core/com.aallam.openai.api.edits/-edits-request/input.html new file mode 100644 index 00000000..c8027dc0 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request/input.html @@ -0,0 +1,66 @@ + + + + + input + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

input

+
+
val input: String? = null

The input text to use as a starting point for the edit.

Defaults to ''.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request/instruction.html b/openai-core/com.aallam.openai.api.edits/-edits-request/instruction.html new file mode 100644 index 00000000..6d0407b7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request/instruction.html @@ -0,0 +1,66 @@ + + + + + instruction + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

instruction

+
+

The instruction that tells the model how to edit the prompt.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request/model.html b/openai-core/com.aallam.openai.api.edits/-edits-request/model.html new file mode 100644 index 00000000..794bb207 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

ID of the model to use.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request/temperature.html b/openai-core/com.aallam.openai.api.edits/-edits-request/temperature.html new file mode 100644 index 00000000..75813fa7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request/temperature.html @@ -0,0 +1,66 @@ + + + + + temperature + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

temperature

+
+
val temperature: Double? = null

What sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

We generally recommend altering this or top_p but not both.

Defaults to 1.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/-edits-request/top-p.html b/openai-core/com.aallam.openai.api.edits/-edits-request/top-p.html new file mode 100644 index 00000000..f6b66633 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/-edits-request/top-p.html @@ -0,0 +1,66 @@ + + + + + topP + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

topP

+
+
val topP: Double? = null

An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.

We generally recommend altering this or temperature but not both.

Defaults to 1.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/edits-request.html b/openai-core/com.aallam.openai.api.edits/edits-request.html new file mode 100644 index 00000000..ece1e964 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/edits-request.html @@ -0,0 +1,66 @@ + + + + + editsRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

editsRequest

+
+

A request for OpenAI to creates a new edit for the provided input, instruction, and parameters.

documentation

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.edits/index.html b/openai-core/com.aallam.openai.api.edits/index.html new file mode 100644 index 00000000..93e9d358 --- /dev/null +++ b/openai-core/com.aallam.openai.api.edits/index.html @@ -0,0 +1,138 @@ + + + + + com.aallam.openai.api.edits + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class Edit(val created: Long, val choices: List<Choice>, val usage: Usage)

Response to the edit creation request.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class EditsRequest(val model: ModelId, val instruction: String, val input: String? = null, val temperature: Double? = null, val topP: Double? = null)

A request for OpenAI to creates a new edit for the provided input, instruction, and parameters.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Builder of EditsRequest instances.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

A request for OpenAI to creates a new edit for the provided input, instruction, and parameters.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/-embedding-request-builder.html b/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/-embedding-request-builder.html new file mode 100644 index 00000000..993814fb --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/-embedding-request-builder.html @@ -0,0 +1,66 @@ + + + + + EmbeddingRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

EmbeddingRequestBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/build.html b/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/build.html new file mode 100644 index 00000000..b5e66145 --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+

Create EmbeddingRequest instance.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/index.html b/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/index.html new file mode 100644 index 00000000..cfa95f7e --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/index.html @@ -0,0 +1,158 @@ + + + + + EmbeddingRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

EmbeddingRequestBuilder

+

Builder of EmbeddingRequest instances.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Input text to get embeddings for, encoded as an array of token. Each input must not exceed 2048 tokens in length.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

ID of the model to use.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var user: String?

A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Create EmbeddingRequest instance.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/input.html b/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/input.html new file mode 100644 index 00000000..27581bf7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/input.html @@ -0,0 +1,66 @@ + + + + + input + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

input

+
+

Input text to get embeddings for, encoded as an array of token. Each input must not exceed 2048 tokens in length.

Unless you are embedding code, we suggest replacing newlines (\n) in your input with a single space, as we have observed inferior results when newlines are present.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/model.html b/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/model.html new file mode 100644 index 00000000..3dc6e76b --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

ID of the model to use.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/user.html b/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/user.html new file mode 100644 index 00000000..74f2200a --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-request-builder/user.html @@ -0,0 +1,66 @@ + + + + + user + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

user

+
+
var user: String?

A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-request/-embedding-request.html b/openai-core/com.aallam.openai.api.embedding/-embedding-request/-embedding-request.html new file mode 100644 index 00000000..bf71c4c5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-request/-embedding-request.html @@ -0,0 +1,66 @@ + + + + + EmbeddingRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

EmbeddingRequest

+
+
constructor(model: ModelId, input: List<String>, user: String? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-request/index.html b/openai-core/com.aallam.openai.api.embedding/-embedding-request/index.html new file mode 100644 index 00000000..bf54a0cf --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-request/index.html @@ -0,0 +1,139 @@ + + + + + EmbeddingRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

EmbeddingRequest

+
@Serializable
class EmbeddingRequest(val model: ModelId, val input: List<String>, val user: String? = null)

Create an embedding request.

documentation

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(model: ModelId, input: List<String>, user: String? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Input text to get embeddings for, encoded as an array of token. Each input must not exceed 2048 tokens in length.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

ID of the model to use.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val user: String? = null

A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-request/input.html b/openai-core/com.aallam.openai.api.embedding/-embedding-request/input.html new file mode 100644 index 00000000..3292143b --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-request/input.html @@ -0,0 +1,66 @@ + + + + + input + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

input

+
+

Input text to get embeddings for, encoded as an array of token. Each input must not exceed 2048 tokens in length.

Unless you are embedding code, we suggest replacing newlines (\n) in your input with a single space, as we have observed inferior results when newlines are present.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-request/model.html b/openai-core/com.aallam.openai.api.embedding/-embedding-request/model.html new file mode 100644 index 00000000..6c32f353 --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-request/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

ID of the model to use.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-request/user.html b/openai-core/com.aallam.openai.api.embedding/-embedding-request/user.html new file mode 100644 index 00000000..f635c841 --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-request/user.html @@ -0,0 +1,66 @@ + + + + + user + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

user

+
+
val user: String? = null

A unique identifier representing your end-user, which will help OpenAI to monitor and detect abuse.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-response/-embedding-response.html b/openai-core/com.aallam.openai.api.embedding/-embedding-response/-embedding-response.html new file mode 100644 index 00000000..b0139fd6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-response/-embedding-response.html @@ -0,0 +1,66 @@ + + + + + EmbeddingResponse + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

EmbeddingResponse

+
+
constructor(embeddings: List<Embedding>, usage: Usage)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-response/embeddings.html b/openai-core/com.aallam.openai.api.embedding/-embedding-response/embeddings.html new file mode 100644 index 00000000..5aa7a29d --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-response/embeddings.html @@ -0,0 +1,66 @@ + + + + + embeddings + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

embeddings

+
+

An embedding results.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-response/index.html b/openai-core/com.aallam.openai.api.embedding/-embedding-response/index.html new file mode 100644 index 00000000..533e707f --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-response/index.html @@ -0,0 +1,124 @@ + + + + + EmbeddingResponse + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

EmbeddingResponse

+
@Serializable
class EmbeddingResponse(val embeddings: List<Embedding>, val usage: Usage)

Create embeddings response.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(embeddings: List<Embedding>, usage: Usage)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

An embedding results.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Embedding usage data.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding-response/usage.html b/openai-core/com.aallam.openai.api.embedding/-embedding-response/usage.html new file mode 100644 index 00000000..41f45dfd --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding-response/usage.html @@ -0,0 +1,66 @@ + + + + + usage + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

usage

+
+

Embedding usage data.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding/--index--.html b/openai-core/com.aallam.openai.api.embedding/-embedding/--index--.html new file mode 100644 index 00000000..ae85013b --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding/--index--.html @@ -0,0 +1,66 @@ + + + + + index + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

index

+
+
val index: Int
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding/-embedding.html b/openai-core/com.aallam.openai.api.embedding/-embedding/-embedding.html new file mode 100644 index 00000000..89b69140 --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding/-embedding.html @@ -0,0 +1,66 @@ + + + + + Embedding + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Embedding

+
+
constructor(embedding: List<Double>, index: Int)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding/embedding.html b/openai-core/com.aallam.openai.api.embedding/-embedding/embedding.html new file mode 100644 index 00000000..b26e2634 --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding/embedding.html @@ -0,0 +1,66 @@ + + + + + embedding + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

embedding

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/-embedding/index.html b/openai-core/com.aallam.openai.api.embedding/-embedding/index.html new file mode 100644 index 00000000..27094526 --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/-embedding/index.html @@ -0,0 +1,124 @@ + + + + + Embedding + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Embedding

+
@Serializable
class Embedding(val embedding: List<Double>, val index: Int)

An embedding result. documentation

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(embedding: List<Double>, index: Int)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val index: Int
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/embedding-request.html b/openai-core/com.aallam.openai.api.embedding/embedding-request.html new file mode 100644 index 00000000..a01284e7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/embedding-request.html @@ -0,0 +1,66 @@ + + + + + embeddingRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

embeddingRequest

+
+

Create an embedding request.

documentation

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.embedding/index.html b/openai-core/com.aallam.openai.api.embedding/index.html new file mode 100644 index 00000000..13ae909e --- /dev/null +++ b/openai-core/com.aallam.openai.api.embedding/index.html @@ -0,0 +1,153 @@ + + + + + com.aallam.openai.api.embedding + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class Embedding(val embedding: List<Double>, val index: Int)

An embedding result. documentation

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class EmbeddingRequest(val model: ModelId, val input: List<String>, val user: String? = null)

Create an embedding request.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Builder of EmbeddingRequest instances.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class EmbeddingResponse(val embeddings: List<Embedding>, val usage: Usage)

Create embeddings response.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Create an embedding request.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-authentication-exception/-authentication-exception.html b/openai-core/com.aallam.openai.api.exception/-authentication-exception/-authentication-exception.html new file mode 100644 index 00000000..bb3e0a18 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-authentication-exception/-authentication-exception.html @@ -0,0 +1,66 @@ + + + + + AuthenticationException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

AuthenticationException

+
+
constructor(statusCode: Int, error: OpenAIError, throwable: Throwable? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-authentication-exception/index.html b/openai-core/com.aallam.openai.api.exception/-authentication-exception/index.html new file mode 100644 index 00000000..98a375ca --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-authentication-exception/index.html @@ -0,0 +1,154 @@ + + + + + AuthenticationException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

AuthenticationException

+
class AuthenticationException(val statusCode: Int, val error: OpenAIError, throwable: Throwable? = null) : OpenAIAPIException

Represents an exception thrown when an authentication error occurs while interacting with the OpenAI API.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(statusCode: Int, error: OpenAIError, throwable: Throwable? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val cause: Throwable?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

an instance of OpenAIError containing information about the error that occurred.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val message: String?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

the HTTP status code associated with the error.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-generic-i-o-exception/-generic-i-o-exception.html b/openai-core/com.aallam.openai.api.exception/-generic-i-o-exception/-generic-i-o-exception.html new file mode 100644 index 00000000..1835c903 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-generic-i-o-exception/-generic-i-o-exception.html @@ -0,0 +1,66 @@ + + + + + GenericIOException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

GenericIOException

+
+
constructor(throwable: Throwable? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-generic-i-o-exception/index.html b/openai-core/com.aallam.openai.api.exception/-generic-i-o-exception/index.html new file mode 100644 index 00000000..818e4fb9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-generic-i-o-exception/index.html @@ -0,0 +1,124 @@ + + + + + GenericIOException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

GenericIOException

+
class GenericIOException(throwable: Throwable? = null) : OpenAIIOException

An exception thrown in case of an I/O error

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(throwable: Throwable? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val cause: Throwable?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val message: String?
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-invalid-request-exception/-invalid-request-exception.html b/openai-core/com.aallam.openai.api.exception/-invalid-request-exception/-invalid-request-exception.html new file mode 100644 index 00000000..48baa914 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-invalid-request-exception/-invalid-request-exception.html @@ -0,0 +1,66 @@ + + + + + InvalidRequestException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

InvalidRequestException

+
+
constructor(statusCode: Int, error: OpenAIError, throwable: Throwable? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-invalid-request-exception/index.html b/openai-core/com.aallam.openai.api.exception/-invalid-request-exception/index.html new file mode 100644 index 00000000..65b3b503 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-invalid-request-exception/index.html @@ -0,0 +1,154 @@ + + + + + InvalidRequestException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

InvalidRequestException

+
class InvalidRequestException(val statusCode: Int, val error: OpenAIError, throwable: Throwable? = null) : OpenAIAPIException

Represents an exception thrown when an invalid request is made to the OpenAI API.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(statusCode: Int, error: OpenAIError, throwable: Throwable? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val cause: Throwable?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

an instance of OpenAIError containing information about the error that occurred.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val message: String?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

the HTTP status code associated with the error.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-a-p-i-exception/error.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-a-p-i-exception/error.html new file mode 100644 index 00000000..e92268e5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-a-p-i-exception/error.html @@ -0,0 +1,66 @@ + + + + + error + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

error

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-a-p-i-exception/index.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-a-p-i-exception/index.html new file mode 100644 index 00000000..b3e3c315 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-a-p-i-exception/index.html @@ -0,0 +1,135 @@ + + + + + OpenAIAPIException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIAPIException

+

Represents an exception thrown when an error occurs while interacting with the OpenAI API.

Inheritors

+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val cause: Throwable?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

an instance of OpenAIError containing information about the error that occurred.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val message: String?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

the HTTP status code associated with the error.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-a-p-i-exception/status-code.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-a-p-i-exception/status-code.html new file mode 100644 index 00000000..7132b895 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-a-p-i-exception/status-code.html @@ -0,0 +1,66 @@ + + + + + statusCode + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

statusCode

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/-open-a-i-error-details.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/-open-a-i-error-details.html new file mode 100644 index 00000000..11b7684e --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/-open-a-i-error-details.html @@ -0,0 +1,66 @@ + + + + + OpenAIErrorDetails + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIErrorDetails

+
+
constructor(code: String?, message: String?, param: String?, type: String?)

Parameters

code

error code returned by the OpenAI API.

message

human-readable error message describing the error that occurred.

param

the parameter that caused the error, if applicable.

type

the type of error that occurred.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/code.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/code.html new file mode 100644 index 00000000..16cce405 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/code.html @@ -0,0 +1,66 @@ + + + + + code + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

code

+
+
val code: String?

Parameters

code

error code returned by the OpenAI API.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/index.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/index.html new file mode 100644 index 00000000..49771c8e --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/index.html @@ -0,0 +1,154 @@ + + + + + OpenAIErrorDetails + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIErrorDetails

+
@Serializable
data class OpenAIErrorDetails(val code: String?, val message: String?, val param: String?, val type: String?)

Represents an error object returned by the OpenAI API.

Parameters

code

error code returned by the OpenAI API.

message

human-readable error message describing the error that occurred.

param

the parameter that caused the error, if applicable.

type

the type of error that occurred.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(code: String?, message: String?, param: String?, type: String?)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val code: String?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val type: String?
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/message.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/message.html new file mode 100644 index 00000000..72b0c7b9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/message.html @@ -0,0 +1,66 @@ + + + + + message + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

message

+
+

Parameters

message

human-readable error message describing the error that occurred.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/param.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/param.html new file mode 100644 index 00000000..88bf84b8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/param.html @@ -0,0 +1,66 @@ + + + + + param + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

param

+
+

Parameters

param

the parameter that caused the error, if applicable.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/type.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/type.html new file mode 100644 index 00000000..7bf4f278 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-error-details/type.html @@ -0,0 +1,66 @@ + + + + + type + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

type

+
+
val type: String?

Parameters

type

the type of error that occurred.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-error/-open-a-i-error.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-error/-open-a-i-error.html new file mode 100644 index 00000000..4a634a84 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-error/-open-a-i-error.html @@ -0,0 +1,66 @@ + + + + + OpenAIError + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIError

+
+
constructor(detail: OpenAIErrorDetails?)

Parameters

detail

information about the error that occurred.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-error/detail.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-error/detail.html new file mode 100644 index 00000000..189e5e90 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-error/detail.html @@ -0,0 +1,66 @@ + + + + + detail + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

detail

+
+

Parameters

detail

information about the error that occurred.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-error/index.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-error/index.html new file mode 100644 index 00000000..39aac185 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-error/index.html @@ -0,0 +1,109 @@ + + + + + OpenAIError + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIError

+
@Serializable
data class OpenAIError(val detail: OpenAIErrorDetails?)

Represents an error response from the OpenAI API.

Parameters

detail

information about the error that occurred.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(detail: OpenAIErrorDetails?)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-exception/index.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-exception/index.html new file mode 100644 index 00000000..4d580d88 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-exception/index.html @@ -0,0 +1,105 @@ + + + + + OpenAIException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIException

+ +
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val cause: Throwable?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val message: String?
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-http-exception/-open-a-i-http-exception.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-http-exception/-open-a-i-http-exception.html new file mode 100644 index 00000000..07ca95c4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-http-exception/-open-a-i-http-exception.html @@ -0,0 +1,66 @@ + + + + + OpenAIHttpException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIHttpException

+
+
constructor(throwable: Throwable? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-http-exception/index.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-http-exception/index.html new file mode 100644 index 00000000..f120691d --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-http-exception/index.html @@ -0,0 +1,124 @@ + + + + + OpenAIHttpException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIHttpException

+
class OpenAIHttpException(throwable: Throwable? = null) : OpenAIException

Runtime Http Client exception

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(throwable: Throwable? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val cause: Throwable?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val message: String?
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-i-o-exception/index.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-i-o-exception/index.html new file mode 100644 index 00000000..1e4e7c3d --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-i-o-exception/index.html @@ -0,0 +1,105 @@ + + + + + OpenAIIOException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIIOException

+

An exception thrown in case of an I/O error

Inheritors

+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val cause: Throwable?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val message: String?
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-server-exception/-open-a-i-server-exception.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-server-exception/-open-a-i-server-exception.html new file mode 100644 index 00000000..a7fdfbb9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-server-exception/-open-a-i-server-exception.html @@ -0,0 +1,66 @@ + + + + + OpenAIServerException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIServerException

+
+
constructor(throwable: Throwable? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-server-exception/index.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-server-exception/index.html new file mode 100644 index 00000000..c1784706 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-server-exception/index.html @@ -0,0 +1,124 @@ + + + + + OpenAIServerException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIServerException

+
class OpenAIServerException(throwable: Throwable? = null) : OpenAIException

An exception thrown in case of a server error

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(throwable: Throwable? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val cause: Throwable?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val message: String?
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-timeout-exception/-open-a-i-timeout-exception.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-timeout-exception/-open-a-i-timeout-exception.html new file mode 100644 index 00000000..a7b1be67 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-timeout-exception/-open-a-i-timeout-exception.html @@ -0,0 +1,66 @@ + + + + + OpenAITimeoutException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAITimeoutException

+
+
constructor(throwable: Throwable)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-open-a-i-timeout-exception/index.html b/openai-core/com.aallam.openai.api.exception/-open-a-i-timeout-exception/index.html new file mode 100644 index 00000000..b7bfe1ae --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-open-a-i-timeout-exception/index.html @@ -0,0 +1,124 @@ + + + + + OpenAITimeoutException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAITimeoutException

+

An exception thrown in case a request times out.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(throwable: Throwable)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val cause: Throwable?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val message: String?
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-permission-exception/-permission-exception.html b/openai-core/com.aallam.openai.api.exception/-permission-exception/-permission-exception.html new file mode 100644 index 00000000..f3d5ee05 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-permission-exception/-permission-exception.html @@ -0,0 +1,66 @@ + + + + + PermissionException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

PermissionException

+
+
constructor(statusCode: Int, error: OpenAIError, throwable: Throwable? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-permission-exception/index.html b/openai-core/com.aallam.openai.api.exception/-permission-exception/index.html new file mode 100644 index 00000000..c5b191e8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-permission-exception/index.html @@ -0,0 +1,154 @@ + + + + + PermissionException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

PermissionException

+
class PermissionException(val statusCode: Int, val error: OpenAIError, throwable: Throwable? = null) : OpenAIAPIException

Represents an exception thrown when a permission error occurs while interacting with the OpenAI API.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(statusCode: Int, error: OpenAIError, throwable: Throwable? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val cause: Throwable?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

an instance of OpenAIError containing information about the error that occurred.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val message: String?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

the HTTP status code associated with the error.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-rate-limit-exception/-rate-limit-exception.html b/openai-core/com.aallam.openai.api.exception/-rate-limit-exception/-rate-limit-exception.html new file mode 100644 index 00000000..2bf5677a --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-rate-limit-exception/-rate-limit-exception.html @@ -0,0 +1,66 @@ + + + + + RateLimitException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

RateLimitException

+
+
constructor(statusCode: Int, error: OpenAIError, throwable: Throwable? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-rate-limit-exception/index.html b/openai-core/com.aallam.openai.api.exception/-rate-limit-exception/index.html new file mode 100644 index 00000000..5b184d79 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-rate-limit-exception/index.html @@ -0,0 +1,154 @@ + + + + + RateLimitException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

RateLimitException

+
class RateLimitException(val statusCode: Int, val error: OpenAIError, throwable: Throwable? = null) : OpenAIAPIException

Represents an exception thrown when the OpenAI API rate limit is exceeded.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(statusCode: Int, error: OpenAIError, throwable: Throwable? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val cause: Throwable?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

an instance of OpenAIError containing information about the error that occurred.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val message: String?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

the HTTP status code associated with the error.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-unknown-a-p-i-exception/-unknown-a-p-i-exception.html b/openai-core/com.aallam.openai.api.exception/-unknown-a-p-i-exception/-unknown-a-p-i-exception.html new file mode 100644 index 00000000..9af8fe6a --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-unknown-a-p-i-exception/-unknown-a-p-i-exception.html @@ -0,0 +1,66 @@ + + + + + UnknownAPIException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

UnknownAPIException

+
+
constructor(statusCode: Int, error: OpenAIError, throwable: Throwable? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/-unknown-a-p-i-exception/index.html b/openai-core/com.aallam.openai.api.exception/-unknown-a-p-i-exception/index.html new file mode 100644 index 00000000..53e95faf --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/-unknown-a-p-i-exception/index.html @@ -0,0 +1,154 @@ + + + + + UnknownAPIException + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

UnknownAPIException

+
class UnknownAPIException(val statusCode: Int, val error: OpenAIError, throwable: Throwable? = null) : OpenAIAPIException

Represents an exception thrown when an unknown error occurs while interacting with the OpenAI API. This exception is used when the specific type of error is not covered by the existing subclasses.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(statusCode: Int, error: OpenAIError, throwable: Throwable? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val cause: Throwable?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

an instance of OpenAIError containing information about the error that occurred.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
open val message: String?
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

the HTTP status code associated with the error.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.exception/index.html b/openai-core/com.aallam.openai.api.exception/index.html new file mode 100644 index 00000000..230e3388 --- /dev/null +++ b/openai-core/com.aallam.openai.api.exception/index.html @@ -0,0 +1,284 @@ + + + + + com.aallam.openai.api.exception + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
class AuthenticationException(val statusCode: Int, val error: OpenAIError, throwable: Throwable? = null) : OpenAIAPIException

Represents an exception thrown when an authentication error occurs while interacting with the OpenAI API.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class GenericIOException(throwable: Throwable? = null) : OpenAIIOException

An exception thrown in case of an I/O error

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class InvalidRequestException(val statusCode: Int, val error: OpenAIError, throwable: Throwable? = null) : OpenAIAPIException

Represents an exception thrown when an invalid request is made to the OpenAI API.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Represents an exception thrown when an error occurs while interacting with the OpenAI API.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class OpenAIError(val detail: OpenAIErrorDetails?)

Represents an error response from the OpenAI API.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class OpenAIErrorDetails(val code: String?, val message: String?, val param: String?, val type: String?)

Represents an error object returned by the OpenAI API.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

OpenAI client exception

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class OpenAIHttpException(throwable: Throwable? = null) : OpenAIException

Runtime Http Client exception

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

An exception thrown in case of an I/O error

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class OpenAIServerException(throwable: Throwable? = null) : OpenAIException

An exception thrown in case of a server error

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

An exception thrown in case a request times out.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class PermissionException(val statusCode: Int, val error: OpenAIError, throwable: Throwable? = null) : OpenAIAPIException

Represents an exception thrown when a permission error occurs while interacting with the OpenAI API.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class RateLimitException(val statusCode: Int, val error: OpenAIError, throwable: Throwable? = null) : OpenAIAPIException

Represents an exception thrown when the OpenAI API rate limit is exceeded.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class UnknownAPIException(val statusCode: Int, val error: OpenAIError, throwable: Throwable? = null) : OpenAIAPIException

Represents an exception thrown when an unknown error occurs while interacting with the OpenAI API. This exception is used when the specific type of error is not covered by the existing subclasses.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-id/-file-id.html b/openai-core/com.aallam.openai.api.file/-file-id/-file-id.html new file mode 100644 index 00000000..5d6f2288 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-id/-file-id.html @@ -0,0 +1,66 @@ + + + + + FileId + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FileId

+
+
constructor(id: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-id/id.html b/openai-core/com.aallam.openai.api.file/-file-id/id.html new file mode 100644 index 00000000..31493ce6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-id/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: String
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-id/index.html b/openai-core/com.aallam.openai.api.file/-file-id/index.html new file mode 100644 index 00000000..b0d429c5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-id/index.html @@ -0,0 +1,109 @@ + + + + + FileId + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FileId

+
@Serializable
value class FileId(val id: String)

File identifier.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: String
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-source-builder/-file-source-builder.html b/openai-core/com.aallam.openai.api.file/-file-source-builder/-file-source-builder.html new file mode 100644 index 00000000..02080545 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-source-builder/-file-source-builder.html @@ -0,0 +1,66 @@ + + + + + FileSourceBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FileSourceBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-source-builder/build.html b/openai-core/com.aallam.openai.api.file/-file-source-builder/build.html new file mode 100644 index 00000000..79513b71 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-source-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+

Creates the FileSource instance

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-source-builder/index.html b/openai-core/com.aallam.openai.api.file/-file-source-builder/index.html new file mode 100644 index 00000000..646f2f95 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-source-builder/index.html @@ -0,0 +1,143 @@ + + + + + FileSourceBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FileSourceBuilder

+

Builder of FileSource instances.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
var name: String?

File name.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var source: Source?

File source.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Creates the FileSource instance

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-source-builder/name.html b/openai-core/com.aallam.openai.api.file/-file-source-builder/name.html new file mode 100644 index 00000000..780ed5fb --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-source-builder/name.html @@ -0,0 +1,66 @@ + + + + + name + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

name

+
+
var name: String?

File name.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-source-builder/source.html b/openai-core/com.aallam.openai.api.file/-file-source-builder/source.html new file mode 100644 index 00000000..2eab1acd --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-source-builder/source.html @@ -0,0 +1,66 @@ + + + + + source + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

source

+
+
var source: Source?

File source.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-source/-file-source.html b/openai-core/com.aallam.openai.api.file/-file-source/-file-source.html new file mode 100644 index 00000000..91a292d0 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-source/-file-source.html @@ -0,0 +1,66 @@ + + + + + FileSource + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FileSource

+
+
constructor(path: Path, fileSystem: FileSystem)

Create FileSource instance.

Parameters

path

file path to upload

fileSystem

file system to be used


constructor(name: String, source: Source)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-source/index.html b/openai-core/com.aallam.openai.api.file/-file-source/index.html new file mode 100644 index 00000000..416b748f --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-source/index.html @@ -0,0 +1,124 @@ + + + + + FileSource + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FileSource

+
class FileSource(val name: String, val source: Source)

Represents a file resource.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(path: Path, fileSystem: FileSystem)

Create FileSource instance.

constructor(name: String, source: Source)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

File name.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val source: Source

File source.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-source/name.html b/openai-core/com.aallam.openai.api.file/-file-source/name.html new file mode 100644 index 00000000..b77293bb --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-source/name.html @@ -0,0 +1,66 @@ + + + + + name + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

name

+
+

File name.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-source/source.html b/openai-core/com.aallam.openai.api.file/-file-source/source.html new file mode 100644 index 00000000..2bec3996 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-source/source.html @@ -0,0 +1,66 @@ + + + + + source + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

source

+
+
val source: Source

File source.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-status/-file-status.html b/openai-core/com.aallam.openai.api.file/-file-status/-file-status.html new file mode 100644 index 00000000..7693f1ea --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-status/-file-status.html @@ -0,0 +1,66 @@ + + + + + FileStatus + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FileStatus

+
+
constructor(raw: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-status/index.html b/openai-core/com.aallam.openai.api.file/-file-status/index.html new file mode 100644 index 00000000..df189cc1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-status/index.html @@ -0,0 +1,109 @@ + + + + + FileStatus + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FileStatus

+
@Serializable
value class FileStatus(val raw: String)

File status.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(raw: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val raw: String
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-status/raw.html b/openai-core/com.aallam.openai.api.file/-file-status/raw.html new file mode 100644 index 00000000..5731cf44 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-status/raw.html @@ -0,0 +1,66 @@ + + + + + raw + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

raw

+
+
val raw: String
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-upload-builder/-file-upload-builder.html b/openai-core/com.aallam.openai.api.file/-file-upload-builder/-file-upload-builder.html new file mode 100644 index 00000000..0ce1c4a8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-upload-builder/-file-upload-builder.html @@ -0,0 +1,66 @@ + + + + + FileUploadBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FileUploadBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-upload-builder/build.html b/openai-core/com.aallam.openai.api.file/-file-upload-builder/build.html new file mode 100644 index 00000000..462400cc --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-upload-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+

Create a new instance of FileUpload.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-upload-builder/file.html b/openai-core/com.aallam.openai.api.file/-file-upload-builder/file.html new file mode 100644 index 00000000..754805e1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-upload-builder/file.html @@ -0,0 +1,66 @@ + + + + + file + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

file

+
+

The JSON Lines file to be uploaded.

If the purpose is set to "fine-tune", each line is a JSON record with "prompt" and "completion" fields representing your training examples.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-upload-builder/index.html b/openai-core/com.aallam.openai.api.file/-file-upload-builder/index.html new file mode 100644 index 00000000..5d89bcba --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-upload-builder/index.html @@ -0,0 +1,143 @@ + + + + + FileUploadBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FileUploadBuilder

+

Builders of FileUpload instances.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The JSON Lines file to be uploaded.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The intended purpose of the uploaded documents.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Create a new instance of FileUpload.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-upload-builder/purpose.html b/openai-core/com.aallam.openai.api.file/-file-upload-builder/purpose.html new file mode 100644 index 00000000..f453b87f --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-upload-builder/purpose.html @@ -0,0 +1,66 @@ + + + + + purpose + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

purpose

+
+

The intended purpose of the uploaded documents.

Use "fine-tune" for Fine-tuning. This allows us to validate the format of the uploaded file.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-upload/-file-upload.html b/openai-core/com.aallam.openai.api.file/-file-upload/-file-upload.html new file mode 100644 index 00000000..9cd57e05 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-upload/-file-upload.html @@ -0,0 +1,66 @@ + + + + + FileUpload + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FileUpload

+
+
constructor(file: FileSource, purpose: Purpose)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-upload/file.html b/openai-core/com.aallam.openai.api.file/-file-upload/file.html new file mode 100644 index 00000000..574a748e --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-upload/file.html @@ -0,0 +1,66 @@ + + + + + file + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

file

+
+

The JSON Lines file to be uploaded.

If the purpose is set to "fine-tune", each line is a JSON record with "prompt" and "completion" fields representing your training examples.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-upload/index.html b/openai-core/com.aallam.openai.api.file/-file-upload/index.html new file mode 100644 index 00000000..13243bb3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-upload/index.html @@ -0,0 +1,124 @@ + + + + + FileUpload + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FileUpload

+
class FileUpload(val file: FileSource, val purpose: Purpose)

Request to upload a file.

documentation

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(file: FileSource, purpose: Purpose)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The JSON Lines file to be uploaded.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The intended purpose of the uploaded documents.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file-upload/purpose.html b/openai-core/com.aallam.openai.api.file/-file-upload/purpose.html new file mode 100644 index 00000000..a5969ac3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file-upload/purpose.html @@ -0,0 +1,66 @@ + + + + + purpose + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

purpose

+
+

The intended purpose of the uploaded documents.

Use "fine-tune" for Fine-tuning. This allows us to validate the format of the uploaded file.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file/-file.html b/openai-core/com.aallam.openai.api.file/-file/-file.html new file mode 100644 index 00000000..cb3df432 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file/-file.html @@ -0,0 +1,66 @@ + + + + + File + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

File

+
+
constructor(id: FileId, bytes: Int, createdAt: Long, filename: String, purpose: Purpose, status: Status? = null, format: String? = null, statusDetails: String? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file/bytes.html b/openai-core/com.aallam.openai.api.file/-file/bytes.html new file mode 100644 index 00000000..3bd2d748 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file/bytes.html @@ -0,0 +1,66 @@ + + + + + bytes + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

bytes

+
+
val bytes: Int

File size.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file/created-at.html b/openai-core/com.aallam.openai.api.file/-file/created-at.html new file mode 100644 index 00000000..07d5d051 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file/created-at.html @@ -0,0 +1,66 @@ + + + + + createdAt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

createdAt

+
+

File creation date.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file/filename.html b/openai-core/com.aallam.openai.api.file/-file/filename.html new file mode 100644 index 00000000..b635a069 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file/filename.html @@ -0,0 +1,66 @@ + + + + + filename + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

filename

+
+

File name.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file/format.html b/openai-core/com.aallam.openai.api.file/-file/format.html new file mode 100644 index 00000000..979b9682 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file/format.html @@ -0,0 +1,66 @@ + + + + + format + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

format

+
+
val format: String? = null

Deprecated

Removed field

File format.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file/id.html b/openai-core/com.aallam.openai.api.file/-file/id.html new file mode 100644 index 00000000..d99c0e0f --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: FileId

A unique id assigned to this file.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file/index.html b/openai-core/com.aallam.openai.api.file/-file/index.html new file mode 100644 index 00000000..79665dc7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file/index.html @@ -0,0 +1,214 @@ + + + + + File + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

File

+
@Serializable
data class File(val id: FileId, val bytes: Int, val createdAt: Long, val filename: String, val purpose: Purpose, val status: Status? = null, val format: String? = null, val statusDetails: String? = null)

File details.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: FileId, bytes: Int, createdAt: Long, filename: String, purpose: Purpose, status: Status? = null, format: String? = null, statusDetails: String? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val bytes: Int

File size.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

File creation date.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

File name.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val format: String? = null

File format.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: FileId

A unique id assigned to this file.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

File purpose.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val status: Status? = null

File status.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val statusDetails: String? = null

File status details

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file/purpose.html b/openai-core/com.aallam.openai.api.file/-file/purpose.html new file mode 100644 index 00000000..e7a6e512 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file/purpose.html @@ -0,0 +1,66 @@ + + + + + purpose + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

purpose

+
+

File purpose.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file/status-details.html b/openai-core/com.aallam.openai.api.file/-file/status-details.html new file mode 100644 index 00000000..8a810e83 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file/status-details.html @@ -0,0 +1,66 @@ + + + + + statusDetails + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

statusDetails

+
+
val statusDetails: String? = null

File status details

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-file/status.html b/openai-core/com.aallam.openai.api.file/-file/status.html new file mode 100644 index 00000000..65a1f576 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-file/status.html @@ -0,0 +1,66 @@ + + + + + status + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

status

+
+
val status: Status? = null

File status.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-purpose/-purpose.html b/openai-core/com.aallam.openai.api.file/-purpose/-purpose.html new file mode 100644 index 00000000..4ec76a9c --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-purpose/-purpose.html @@ -0,0 +1,66 @@ + + + + + Purpose + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Purpose

+
+
constructor(raw: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-purpose/index.html b/openai-core/com.aallam.openai.api.file/-purpose/index.html new file mode 100644 index 00000000..51596b26 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-purpose/index.html @@ -0,0 +1,109 @@ + + + + + Purpose + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Purpose

+
@Serializable
value class Purpose(val raw: String)

File purpose.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(raw: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val raw: String
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/-purpose/raw.html b/openai-core/com.aallam.openai.api.file/-purpose/raw.html new file mode 100644 index 00000000..bf934891 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/-purpose/raw.html @@ -0,0 +1,66 @@ + + + + + raw + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

raw

+
+
val raw: String
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/file-source.html b/openai-core/com.aallam.openai.api.file/file-source.html new file mode 100644 index 00000000..1355a3f5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/file-source.html @@ -0,0 +1,66 @@ + + + + + fileSource + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fileSource

+
+

Represents a file resource.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/file-upload.html b/openai-core/com.aallam.openai.api.file/file-upload.html new file mode 100644 index 00000000..78fe760e --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/file-upload.html @@ -0,0 +1,66 @@ + + + + + fileUpload + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fileUpload

+
+

Request to upload a file.

documentation

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.file/index.html b/openai-core/com.aallam.openai.api.file/index.html new file mode 100644 index 00000000..bb219328 --- /dev/null +++ b/openai-core/com.aallam.openai.api.file/index.html @@ -0,0 +1,228 @@ + + + + + com.aallam.openai.api.file + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class File(val id: FileId, val bytes: Int, val createdAt: Long, val filename: String, val purpose: Purpose, val status: Status? = null, val format: String? = null, val statusDetails: String? = null)

File details.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class FileId(val id: String)

File identifier.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class FileSource(val name: String, val source: Source)

Represents a file resource.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Builder of FileSource instances.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class FileStatus(val raw: String)

File status.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class FileUpload(val file: FileSource, val purpose: Purpose)

Request to upload a file.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Builders of FileUpload instances.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class Purpose(val raw: String)

File purpose.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Represents a file resource.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Request to upload a file.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/-fine-tune-event.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/-fine-tune-event.html new file mode 100644 index 00000000..c5a50c7f --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/-fine-tune-event.html @@ -0,0 +1,66 @@ + + + + + FineTuneEvent + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuneEvent

+
+
constructor(createdAt: Long, level: String, message: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/created-at.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/created-at.html new file mode 100644 index 00000000..7d2ba7d3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/created-at.html @@ -0,0 +1,66 @@ + + + + + createdAt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

createdAt

+
+

Creation date.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/index.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/index.html new file mode 100644 index 00000000..5a1f707a --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/index.html @@ -0,0 +1,139 @@ + + + + + FineTuneEvent + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuneEvent

+
@Serializable
data class FineTuneEvent(val createdAt: Long, val level: String, val message: String)

Fine tune event.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(createdAt: Long, level: String, message: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Creation date.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Fine tune event level.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Fine tune event message.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/level.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/level.html new file mode 100644 index 00000000..5fb461e1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/level.html @@ -0,0 +1,66 @@ + + + + + level + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

level

+
+

Fine tune event level.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/message.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/message.html new file mode 100644 index 00000000..eec75ea6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-event/message.html @@ -0,0 +1,66 @@ + + + + + message + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

message

+
+

Fine tune event message.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-id/-fine-tune-id.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-id/-fine-tune-id.html new file mode 100644 index 00000000..8650472d --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-id/-fine-tune-id.html @@ -0,0 +1,66 @@ + + + + + FineTuneId + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuneId

+
+
constructor(id: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-id/id.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-id/id.html new file mode 100644 index 00000000..7f424eb2 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-id/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: String
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-id/index.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-id/index.html new file mode 100644 index 00000000..7fe71be9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-id/index.html @@ -0,0 +1,109 @@ + + + + + FineTuneId + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuneId

+
@Serializable
value class FineTuneId(val id: String)
+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: String
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/-fine-tune-request-builder.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/-fine-tune-request-builder.html new file mode 100644 index 00000000..e8168147 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/-fine-tune-request-builder.html @@ -0,0 +1,66 @@ + + + + + FineTuneRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuneRequestBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/batch-size.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/batch-size.html new file mode 100644 index 00000000..8ba19586 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/batch-size.html @@ -0,0 +1,66 @@ + + + + + batchSize + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

batchSize

+
+

The batch size to use for training. The batch size is the number of training examples used to train a single forward and backward pass.

By default, the batch size will be dynamically configured to be ~0.2% of the number of examples in the training set, capped at 256 - in general, we've found that larger batch sizes tend to work better for larger datasets.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/build.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/build.html new file mode 100644 index 00000000..4f9d4f1a --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+

Create a new instance of FineTuneRequest.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/classification-betas.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/classification-betas.html new file mode 100644 index 00000000..9efe8952 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/classification-betas.html @@ -0,0 +1,66 @@ + + + + + classificationBetas + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

classificationBetas

+
+

If this is provided, we calculate F-beta scores at the specified beta values. The F-beta score is a generalization of F-1 score. This is only used for binary classification.

With a beta of 1 (i.e. the F-1 score), precision and recall are given the same weight. A larger beta score puts more weight on recall and less on precision. A smaller beta score puts more weight on precision and less on recall.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/classification-n-classes.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/classification-n-classes.html new file mode 100644 index 00000000..5b408c0e --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/classification-n-classes.html @@ -0,0 +1,66 @@ + + + + + classificationNClasses + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

classificationNClasses

+
+

The number of classes in a classification task.

This parameter is required for multiclass classification.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/classification-positive-class.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/classification-positive-class.html new file mode 100644 index 00000000..4f058bef --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/classification-positive-class.html @@ -0,0 +1,66 @@ + + + + + classificationPositiveClass + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

classificationPositiveClass

+
+

The positive class in binary classification.

This parameter is needed to generate precision, recall, and F1 metrics when doing binary classification.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/compute-classification-metrics.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/compute-classification-metrics.html new file mode 100644 index 00000000..bae3a1c1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/compute-classification-metrics.html @@ -0,0 +1,66 @@ + + + + + computeClassificationMetrics + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

computeClassificationMetrics

+
+

If set, we calculate classification-specific metrics such as accuracy and F-1 score using the validation set at the end of every epoch. These metrics can be viewed in the results file.

In order to compute classification metrics, you must provide a validation_file. Additionally, you must specify classification_n_classes for multiclass classification or classification_positive_class for binary classification.

Defaults to false.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/index.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/index.html new file mode 100644 index 00000000..c40ee3d6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/index.html @@ -0,0 +1,293 @@ + + + + + FineTuneRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuneRequestBuilder

+

Builder of FineTuneRequest instances.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The batch size to use for training. The batch size is the number of training examples used to train a single forward and backward pass.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

If this is provided, we calculate F-beta scores at the specified beta values. The F-beta score is a generalization of F-1 score. This is only used for binary classification.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The number of classes in a classification task.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The positive class in binary classification.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

If set, we calculate classification-specific metrics such as accuracy and F-1 score using the validation set at the end of every epoch. These metrics can be viewed in the results file.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The learning rate multiplier to use for training. The fine-tuning learning rate is the original learning rate used for pretraining multiplied by this value.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The name of the base model to fine-tune.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var nEpochs: Int?

The number of epochs to train the model for. An epoch refers to one full cycle through the training dataset.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The weight to use for loss on the prompt tokens. This controls how much the model tries to learn to generate the prompt (as compared to the completion which always has a weight of 1.0), and can add a stabilizing effect to training when completions are short.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

A string of up to 40 characters that will be added to your fine-tuned model name.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The ID of an uploaded file that contains training data.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The ID of an uploaded file that contains validation data.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Create a new instance of FineTuneRequest.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/learning-rate-multiplier.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/learning-rate-multiplier.html new file mode 100644 index 00000000..94bfdac4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/learning-rate-multiplier.html @@ -0,0 +1,66 @@ + + + + + learningRateMultiplier + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

learningRateMultiplier

+
+

The learning rate multiplier to use for training. The fine-tuning learning rate is the original learning rate used for pretraining multiplied by this value.

By default, the learning rate multiplier is the 0.05, 0.1, or 0.2 depending on final batch_size (larger learning rates tend to perform better with larger batch sizes). We recommend experimenting with values in the range 0.02 to 0.2 to see what produces the best results.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/model.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/model.html new file mode 100644 index 00000000..7c1ee5a9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

The name of the base model to fine-tune.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/n-epochs.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/n-epochs.html new file mode 100644 index 00000000..655210aa --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/n-epochs.html @@ -0,0 +1,66 @@ + + + + + nEpochs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

nEpochs

+
+
var nEpochs: Int?

The number of epochs to train the model for. An epoch refers to one full cycle through the training dataset.

Defaults to 4.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/prompt-loss-weight.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/prompt-loss-weight.html new file mode 100644 index 00000000..cbf54c73 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/prompt-loss-weight.html @@ -0,0 +1,66 @@ + + + + + promptLossWeight + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

promptLossWeight

+
+

The weight to use for loss on the prompt tokens. This controls how much the model tries to learn to generate the prompt (as compared to the completion which always has a weight of 1.0), and can add a stabilizing effect to training when completions are short.

If prompts are extremely long (relative to completions), it may make sense to reduce this weight to avoid over-prioritizing learning the prompt.

Defaults to 0.1.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/suffix.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/suffix.html new file mode 100644 index 00000000..18b25374 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/suffix.html @@ -0,0 +1,66 @@ + + + + + suffix + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

suffix

+
+

A string of up to 40 characters that will be added to your fine-tuned model name.

For example, a suffix of "custom-model-name" would produce a model name like ada:ft-your-org:custom-model-name-2022-02-15-04-21-04.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/training-file.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/training-file.html new file mode 100644 index 00000000..ca369d3d --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/training-file.html @@ -0,0 +1,66 @@ + + + + + trainingFile + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

trainingFile

+
+

The ID of an uploaded file that contains training data.

Your dataset must be formatted as a JSONL file, where each training example is a JSON object with the keys prompt and completion. Additionally, you must upload your file with the purpose fine-tune.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/validation-file.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/validation-file.html new file mode 100644 index 00000000..b39e6f75 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request-builder/validation-file.html @@ -0,0 +1,66 @@ + + + + + validationFile + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

validationFile

+
+

The ID of an uploaded file that contains validation data.

If you provide this file, the data is used to generate validation metrics periodically during fine-tuning. These metrics can be viewed in the fine-tuning results file. Your train and validation data should be mutually exclusive.

Your dataset must be formatted as a JSONL file, where each validation example is a JSON object with the keys prompt and completion. Additionally, you must upload your file with the purpose fine-tune.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/-fine-tune-request.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/-fine-tune-request.html new file mode 100644 index 00000000..ec738c28 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/-fine-tune-request.html @@ -0,0 +1,66 @@ + + + + + FineTuneRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuneRequest

+
+
constructor(trainingFile: FileId, validationFile: FileId? = null, model: ModelId? = null, nEpochs: Int? = null, batchSize: Int? = null, learningRateMultiplier: Double? = null, promptLossWeight: Double? = null, computeClassificationMetrics: Boolean? = null, classificationNClasses: Int? = null, classificationPositiveClass: String? = null, classificationBetas: List<Double>? = null, suffix: String? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/batch-size.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/batch-size.html new file mode 100644 index 00000000..7cf194ac --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/batch-size.html @@ -0,0 +1,66 @@ + + + + + batchSize + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

batchSize

+
+
val batchSize: Int? = null

The batch size to use for training. The batch size is the number of training examples used to train a single forward and backward pass.

By default, the batch size will be dynamically configured to be ~0.2% of the number of examples in the training set, capped at 256 - in general, we've found that larger batch sizes tend to work better for larger datasets.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/classification-betas.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/classification-betas.html new file mode 100644 index 00000000..e96bd758 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/classification-betas.html @@ -0,0 +1,66 @@ + + + + + classificationBetas + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

classificationBetas

+
+

If this is provided, we calculate F-beta scores at the specified beta values. The F-beta score is a generalization of F-1 score. This is only used for binary classification.

With a beta of 1 (i.e. the F-1 score), precision and recall are given the same weight. A larger beta score puts more weight on recall and less on precision. A smaller beta score puts more weight on precision and less on recall.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/classification-n-classes.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/classification-n-classes.html new file mode 100644 index 00000000..79e170f4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/classification-n-classes.html @@ -0,0 +1,66 @@ + + + + + classificationNClasses + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

classificationNClasses

+
+

The number of classes in a classification task.

This parameter is required for multiclass classification.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/classification-positive-class.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/classification-positive-class.html new file mode 100644 index 00000000..626c62cc --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/classification-positive-class.html @@ -0,0 +1,66 @@ + + + + + classificationPositiveClass + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

classificationPositiveClass

+
+

The positive class in binary classification.

This parameter is needed to generate precision, recall, and F1 metrics when doing binary classification.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/compute-classification-metrics.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/compute-classification-metrics.html new file mode 100644 index 00000000..7d26dc41 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/compute-classification-metrics.html @@ -0,0 +1,66 @@ + + + + + computeClassificationMetrics + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

computeClassificationMetrics

+
+

If set, we calculate classification-specific metrics such as accuracy and F-1 score using the validation set at the end of every epoch. These metrics can be viewed in the results file.

In order to compute classification metrics, you must provide a validation_file. Additionally, you must specify classification_n_classes for multiclass classification or classification_positive_class for binary classification.

Defaults to false.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/index.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/index.html new file mode 100644 index 00000000..c821f453 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/index.html @@ -0,0 +1,274 @@ + + + + + FineTuneRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuneRequest

+
@Serializable
data class FineTuneRequest(val trainingFile: FileId, val validationFile: FileId? = null, val model: ModelId? = null, val nEpochs: Int? = null, val batchSize: Int? = null, val learningRateMultiplier: Double? = null, val promptLossWeight: Double? = null, val computeClassificationMetrics: Boolean? = null, val classificationNClasses: Int? = null, val classificationPositiveClass: String? = null, val classificationBetas: List<Double>? = null, val suffix: String? = null)

Create a Fine-Tune request.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(trainingFile: FileId, validationFile: FileId? = null, model: ModelId? = null, nEpochs: Int? = null, batchSize: Int? = null, learningRateMultiplier: Double? = null, promptLossWeight: Double? = null, computeClassificationMetrics: Boolean? = null, classificationNClasses: Int? = null, classificationPositiveClass: String? = null, classificationBetas: List<Double>? = null, suffix: String? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val batchSize: Int? = null

The batch size to use for training. The batch size is the number of training examples used to train a single forward and backward pass.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

If this is provided, we calculate F-beta scores at the specified beta values. The F-beta score is a generalization of F-1 score. This is only used for binary classification.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The number of classes in a classification task.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The positive class in binary classification.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

If set, we calculate classification-specific metrics such as accuracy and F-1 score using the validation set at the end of every epoch. These metrics can be viewed in the results file.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The learning rate multiplier to use for training. The fine-tuning learning rate is the original learning rate used for pretraining multiplied by this value.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val model: ModelId? = null

The name of the base model to fine-tune.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val nEpochs: Int? = null

The number of epochs to train the model for. An epoch refers to one full cycle through the training dataset.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The weight to use for loss on the prompt tokens. This controls how much the model tries to learn to generate the prompt (as compared to the completion which always has a weight of 1.0), and can add a stabilizing effect to training when completions are short.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val suffix: String? = null

A string of up to 40 characters that will be added to your fine-tuned model name.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The ID of an uploaded file that contains training data.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val validationFile: FileId? = null

The ID of an uploaded file that contains validation data.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/learning-rate-multiplier.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/learning-rate-multiplier.html new file mode 100644 index 00000000..bf0fd209 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/learning-rate-multiplier.html @@ -0,0 +1,66 @@ + + + + + learningRateMultiplier + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

learningRateMultiplier

+
+

The learning rate multiplier to use for training. The fine-tuning learning rate is the original learning rate used for pretraining multiplied by this value.

By default, the learning rate multiplier is the 0.05, 0.1, or 0.2 depending on final batch_size (larger learning rates tend to perform better with larger batch sizes). We recommend experimenting with values in the range 0.02 to 0.2 to see what produces the best results.

Defaults to null.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/model.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/model.html new file mode 100644 index 00000000..80f62330 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+
val model: ModelId? = null

The name of the base model to fine-tune.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/n-epochs.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/n-epochs.html new file mode 100644 index 00000000..31b150fa --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/n-epochs.html @@ -0,0 +1,66 @@ + + + + + nEpochs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

nEpochs

+
+
val nEpochs: Int? = null

The number of epochs to train the model for. An epoch refers to one full cycle through the training dataset.

Defaults to 4.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/prompt-loss-weight.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/prompt-loss-weight.html new file mode 100644 index 00000000..fb90c718 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/prompt-loss-weight.html @@ -0,0 +1,66 @@ + + + + + promptLossWeight + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

promptLossWeight

+
+

The weight to use for loss on the prompt tokens. This controls how much the model tries to learn to generate the prompt (as compared to the completion which always has a weight of 1.0), and can add a stabilizing effect to training when completions are short.

If prompts are extremely long (relative to completions), it may make sense to reduce this weight to avoid over-prioritizing learning the prompt.

Defaults to 0.1.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/suffix.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/suffix.html new file mode 100644 index 00000000..138278c2 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/suffix.html @@ -0,0 +1,66 @@ + + + + + suffix + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

suffix

+
+
val suffix: String? = null

A string of up to 40 characters that will be added to your fine-tuned model name.

For example, a suffix of "custom-model-name" would produce a model name like ada:ft-your-org:custom-model-name-2022-02-15-04-21-04.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/training-file.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/training-file.html new file mode 100644 index 00000000..c8e152a6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/training-file.html @@ -0,0 +1,66 @@ + + + + + trainingFile + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

trainingFile

+
+

The ID of an uploaded file that contains training data.

Your dataset must be formatted as a JSONL file, where each training example is a JSON object with the keys prompt and completion. Additionally, you must upload your file with the purpose fine-tune.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/validation-file.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/validation-file.html new file mode 100644 index 00000000..72731cec --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune-request/validation-file.html @@ -0,0 +1,66 @@ + + + + + validationFile + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

validationFile

+
+
val validationFile: FileId? = null

The ID of an uploaded file that contains validation data.

If you provide this file, the data is used to generate validation metrics periodically during fine-tuning. These metrics can be viewed in the fine-tuning results file. Your train and validation data should be mutually exclusive.

Your dataset must be formatted as a JSONL file, where each validation example is a JSON object with the keys prompt and completion. Additionally, you must upload your file with the purpose fine-tune.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/-fine-tune.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/-fine-tune.html new file mode 100644 index 00000000..adffe538 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/-fine-tune.html @@ -0,0 +1,66 @@ + + + + + FineTune + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTune

+
+
constructor(id: FineTuneId, model: ModelId, createdAt: Long, events: List<FineTuneEvent>? = null, fineTunedModel: ModelId? = null, hyperParams: HyperParams? = null, organizationId: String?, resultFiles: List<File>, status: Status, validationFiles: List<File>, trainingFiles: List<File>, updatedAt: Long)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/created-at.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/created-at.html new file mode 100644 index 00000000..69ab365f --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/created-at.html @@ -0,0 +1,66 @@ + + + + + createdAt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

createdAt

+
+

Creation date.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/events.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/events.html new file mode 100644 index 00000000..56af7af0 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/events.html @@ -0,0 +1,66 @@ + + + + + events + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

events

+
+
val events: List<FineTuneEvent>? = null

List of FineTuneEvents.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/fine-tuned-model.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/fine-tuned-model.html new file mode 100644 index 00000000..fca5d151 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/fine-tuned-model.html @@ -0,0 +1,66 @@ + + + + + fineTunedModel + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fineTunedModel

+
+
val fineTunedModel: ModelId? = null

Fine-tuned model.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/hyper-params.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/hyper-params.html new file mode 100644 index 00000000..1ae48c16 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/hyper-params.html @@ -0,0 +1,66 @@ + + + + + hyperParams + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

hyperParams

+
+

Hyper parameters.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/id.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/id.html new file mode 100644 index 00000000..895e8e46 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+

A unique id assigned to this fine-tune.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/index.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/index.html new file mode 100644 index 00000000..70c4da83 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/index.html @@ -0,0 +1,274 @@ + + + + + FineTune + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTune

+
@Serializable
data class FineTune(val id: FineTuneId, val model: ModelId, val createdAt: Long, val events: List<FineTuneEvent>? = null, val fineTunedModel: ModelId? = null, val hyperParams: HyperParams? = null, val organizationId: String?, val resultFiles: List<File>, val status: Status, val validationFiles: List<File>, val trainingFiles: List<File>, val updatedAt: Long)

Fine-tune of a specified model from a given dataset

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: FineTuneId, model: ModelId, createdAt: Long, events: List<FineTuneEvent>? = null, fineTunedModel: ModelId? = null, hyperParams: HyperParams? = null, organizationId: String?, resultFiles: List<File>, status: Status, validationFiles: List<File>, trainingFiles: List<File>, updatedAt: Long)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Creation date.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val events: List<FineTuneEvent>? = null

List of FineTuneEvents.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val fineTunedModel: ModelId? = null

Fine-tuned model.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Hyper parameters.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

A unique id assigned to this fine-tune.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The name of the base model to fine-tune.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Organization ID.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Result Files.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Fine-Tune status.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

List of training Files.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Fine-Tune update date.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

List of validation Files.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/model.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/model.html new file mode 100644 index 00000000..e5978815 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

The name of the base model to fine-tune.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/organization-id.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/organization-id.html new file mode 100644 index 00000000..fe1d04c9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/organization-id.html @@ -0,0 +1,66 @@ + + + + + organizationId + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

organizationId

+
+

Organization ID.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/result-files.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/result-files.html new file mode 100644 index 00000000..94671120 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/result-files.html @@ -0,0 +1,66 @@ + + + + + resultFiles + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

resultFiles

+
+

Result Files.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/status.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/status.html new file mode 100644 index 00000000..88078d39 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/status.html @@ -0,0 +1,66 @@ + + + + + status + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

status

+
+

Fine-Tune status.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/training-files.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/training-files.html new file mode 100644 index 00000000..f0c2ed85 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/training-files.html @@ -0,0 +1,66 @@ + + + + + trainingFiles + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

trainingFiles

+
+

List of training Files.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/updated-at.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/updated-at.html new file mode 100644 index 00000000..c148618f --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/updated-at.html @@ -0,0 +1,66 @@ + + + + + updatedAt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

updatedAt

+
+

Fine-Tune update date.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-fine-tune/validation-files.html b/openai-core/com.aallam.openai.api.finetune/-fine-tune/validation-files.html new file mode 100644 index 00000000..09444840 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-fine-tune/validation-files.html @@ -0,0 +1,66 @@ + + + + + validationFiles + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

validationFiles

+
+

List of validation Files.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-hyper-params/-hyper-params.html b/openai-core/com.aallam.openai.api.finetune/-hyper-params/-hyper-params.html new file mode 100644 index 00000000..1a1262e3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-hyper-params/-hyper-params.html @@ -0,0 +1,66 @@ + + + + + HyperParams + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

HyperParams

+
+
constructor(batchSize: Int? = null, learningRateMultiplier: Double? = null, nEpochs: Long, promptLossWeight: Double, computeClassificationMetrics: Boolean? = null, classificationNClasses: Int? = null, classificationPositiveClass: String? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-hyper-params/batch-size.html b/openai-core/com.aallam.openai.api.finetune/-hyper-params/batch-size.html new file mode 100644 index 00000000..6b451c7e --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-hyper-params/batch-size.html @@ -0,0 +1,66 @@ + + + + + batchSize + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

batchSize

+
+
val batchSize: Int? = null

The batch size to use for training. The batch size is the number of training examples used to train a single forward and backward pass.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-hyper-params/classification-n-classes.html b/openai-core/com.aallam.openai.api.finetune/-hyper-params/classification-n-classes.html new file mode 100644 index 00000000..fbc7e81a --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-hyper-params/classification-n-classes.html @@ -0,0 +1,66 @@ + + + + + classificationNClasses + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

classificationNClasses

+
+

The number of classes in a classification task.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-hyper-params/classification-positive-class.html b/openai-core/com.aallam.openai.api.finetune/-hyper-params/classification-positive-class.html new file mode 100644 index 00000000..d2b4ef7f --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-hyper-params/classification-positive-class.html @@ -0,0 +1,66 @@ + + + + + classificationPositiveClass + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

classificationPositiveClass

+
+

The positive class in binary classification. This parameter is needed to generate precision, recall, and F1 metrics when doing binary classification.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-hyper-params/compute-classification-metrics.html b/openai-core/com.aallam.openai.api.finetune/-hyper-params/compute-classification-metrics.html new file mode 100644 index 00000000..20614c58 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-hyper-params/compute-classification-metrics.html @@ -0,0 +1,66 @@ + + + + + computeClassificationMetrics + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

computeClassificationMetrics

+
+

If set, we calculate classification-specific metrics such as accuracy and F-1 score using the validation set at the end of every epoch. These metrics can be viewed in the results file.

In order to compute classification metrics, you must provide a validation_file. Additionally, you must specify classificationNClasses for multiclass classification or classificationPositiveClass for binary classification.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-hyper-params/index.html b/openai-core/com.aallam.openai.api.finetune/-hyper-params/index.html new file mode 100644 index 00000000..e186887b --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-hyper-params/index.html @@ -0,0 +1,199 @@ + + + + + HyperParams + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

HyperParams

+
@Serializable
data class HyperParams(val batchSize: Int? = null, val learningRateMultiplier: Double? = null, val nEpochs: Long, val promptLossWeight: Double, val computeClassificationMetrics: Boolean? = null, val classificationNClasses: Int? = null, val classificationPositiveClass: String? = null)

Fine-Tune hyper parameters.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(batchSize: Int? = null, learningRateMultiplier: Double? = null, nEpochs: Long, promptLossWeight: Double, computeClassificationMetrics: Boolean? = null, classificationNClasses: Int? = null, classificationPositiveClass: String? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val batchSize: Int? = null

The batch size to use for training. The batch size is the number of training examples used to train a single forward and backward pass.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The number of classes in a classification task.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The positive class in binary classification. This parameter is needed to generate precision, recall, and F1 metrics when doing binary classification.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

If set, we calculate classification-specific metrics such as accuracy and F-1 score using the validation set at the end of every epoch. These metrics can be viewed in the results file.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The learning rate multiplier to use for training. The fine-tuning learning rate is the original learning rate used for pretraining multiplied by this value.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The number of epochs to train the model for. An epoch refers to one full cycle through the training dataset.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The weight to use for loss on the prompt tokens. This controls how much the model tries to learn to generate the prompt (as compared to the completion which always has a weight of 1.0), and can add a stabilizing effect to training when completions are short.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-hyper-params/learning-rate-multiplier.html b/openai-core/com.aallam.openai.api.finetune/-hyper-params/learning-rate-multiplier.html new file mode 100644 index 00000000..3bb0424f --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-hyper-params/learning-rate-multiplier.html @@ -0,0 +1,66 @@ + + + + + learningRateMultiplier + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

learningRateMultiplier

+
+

The learning rate multiplier to use for training. The fine-tuning learning rate is the original learning rate used for pretraining multiplied by this value.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-hyper-params/n-epochs.html b/openai-core/com.aallam.openai.api.finetune/-hyper-params/n-epochs.html new file mode 100644 index 00000000..0f81cd9a --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-hyper-params/n-epochs.html @@ -0,0 +1,66 @@ + + + + + nEpochs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

nEpochs

+
+

The number of epochs to train the model for. An epoch refers to one full cycle through the training dataset.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/-hyper-params/prompt-loss-weight.html b/openai-core/com.aallam.openai.api.finetune/-hyper-params/prompt-loss-weight.html new file mode 100644 index 00000000..efd92f52 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/-hyper-params/prompt-loss-weight.html @@ -0,0 +1,66 @@ + + + + + promptLossWeight + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

promptLossWeight

+
+

The weight to use for loss on the prompt tokens. This controls how much the model tries to learn to generate the prompt (as compared to the completion which always has a weight of 1.0), and can add a stabilizing effect to training when completions are short.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/fine-tune-request.html b/openai-core/com.aallam.openai.api.finetune/fine-tune-request.html new file mode 100644 index 00000000..2d29956e --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/fine-tune-request.html @@ -0,0 +1,66 @@ + + + + + fineTuneRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fineTuneRequest

+
+

Create a Fine-Tune request.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetune/index.html b/openai-core/com.aallam.openai.api.finetune/index.html new file mode 100644 index 00000000..8371f56b --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetune/index.html @@ -0,0 +1,183 @@ + + + + + com.aallam.openai.api.finetune + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class FineTune(val id: FineTuneId, val model: ModelId, val createdAt: Long, val events: List<FineTuneEvent>? = null, val fineTunedModel: ModelId? = null, val hyperParams: HyperParams? = null, val organizationId: String?, val resultFiles: List<File>, val status: Status, val validationFiles: List<File>, val trainingFiles: List<File>, val updatedAt: Long)

Fine-tune of a specified model from a given dataset

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class FineTuneEvent(val createdAt: Long, val level: String, val message: String)

Fine tune event.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class FineTuneId(val id: String)
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class FineTuneRequest(val trainingFile: FileId, val validationFile: FileId? = null, val model: ModelId? = null, val nEpochs: Int? = null, val batchSize: Int? = null, val learningRateMultiplier: Double? = null, val promptLossWeight: Double? = null, val computeClassificationMetrics: Boolean? = null, val classificationNClasses: Int? = null, val classificationPositiveClass: String? = null, val classificationBetas: List<Double>? = null, val suffix: String? = null)

Create a Fine-Tune request.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Builder of FineTuneRequest instances.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class HyperParams(val batchSize: Int? = null, val learningRateMultiplier: Double? = null, val nEpochs: Long, val promptLossWeight: Double, val computeClassificationMetrics: Boolean? = null, val classificationNClasses: Int? = null, val classificationPositiveClass: String? = null)

Fine-Tune hyper parameters.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Create a Fine-Tune request.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-error-info/-error-info.html b/openai-core/com.aallam.openai.api.finetuning/-error-info/-error-info.html new file mode 100644 index 00000000..e6d39289 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-error-info/-error-info.html @@ -0,0 +1,66 @@ + + + + + ErrorInfo + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ErrorInfo

+
+
constructor(message: String, code: String, param: String? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-error-info/code.html b/openai-core/com.aallam.openai.api.finetuning/-error-info/code.html new file mode 100644 index 00000000..5b47a16b --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-error-info/code.html @@ -0,0 +1,66 @@ + + + + + code + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

code

+
+

A machine-readable error code.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-error-info/index.html b/openai-core/com.aallam.openai.api.finetuning/-error-info/index.html new file mode 100644 index 00000000..7f03276f --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-error-info/index.html @@ -0,0 +1,139 @@ + + + + + ErrorInfo + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ErrorInfo

+
@Serializable
data class ErrorInfo(val message: String, val code: String, val param: String? = null)

For fine-tuning jobs that have failed, this will contain more information on the cause of the failure.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(message: String, code: String, param: String? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

A machine-readable error code.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

A human-readable error message.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val param: String? = null

The parameter that was invalid (e.g., training_file, validation_file), or null if not parameter-specific.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-error-info/message.html b/openai-core/com.aallam.openai.api.finetuning/-error-info/message.html new file mode 100644 index 00000000..d3306741 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-error-info/message.html @@ -0,0 +1,66 @@ + + + + + message + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

message

+
+

A human-readable error message.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-error-info/param.html b/openai-core/com.aallam.openai.api.finetuning/-error-info/param.html new file mode 100644 index 00000000..d0630b41 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-error-info/param.html @@ -0,0 +1,66 @@ + + + + + param + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

param

+
+
val param: String? = null

The parameter that was invalid (e.g., training_file, validation_file), or null if not parameter-specific.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-id/-fine-tuning-id.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-id/-fine-tuning-id.html new file mode 100644 index 00000000..80d36b73 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-id/-fine-tuning-id.html @@ -0,0 +1,66 @@ + + + + + FineTuningId + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuningId

+
+
constructor(id: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-id/id.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-id/id.html new file mode 100644 index 00000000..9783edc2 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-id/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: String
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-id/index.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-id/index.html new file mode 100644 index 00000000..ed37c478 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-id/index.html @@ -0,0 +1,109 @@ + + + + + FineTuningId + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuningId

+
@Serializable
value class FineTuningId(val id: String)

Fine-tuning identifier.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: String
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/-fine-tuning-job-event.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/-fine-tuning-job-event.html new file mode 100644 index 00000000..2356239e --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/-fine-tuning-job-event.html @@ -0,0 +1,66 @@ + + + + + FineTuningJobEvent + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuningJobEvent

+
+
constructor(id: String, createdAt: Int, level: Level, message: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/created-at.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/created-at.html new file mode 100644 index 00000000..e5a4feb9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/created-at.html @@ -0,0 +1,66 @@ + + + + + createdAt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

createdAt

+
+

The Unix timestamp (in seconds) for when the event was created.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/id.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/id.html new file mode 100644 index 00000000..fd428887 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: String

The identifier of the fine-tuning job event.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/index.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/index.html new file mode 100644 index 00000000..622485f8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/index.html @@ -0,0 +1,154 @@ + + + + + FineTuningJobEvent + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuningJobEvent

+
@Serializable
data class FineTuningJobEvent(val id: String, val createdAt: Int, val level: Level, val message: String)

Data class representing a fine-tuning job event.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: String, createdAt: Int, level: Level, message: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The Unix timestamp (in seconds) for when the event was created.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: String

The identifier of the fine-tuning job event.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The severity level of the event, which can be either "info", "warn", or "error".

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

A human-readable message providing more details about the event.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/level.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/level.html new file mode 100644 index 00000000..7115b206 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/level.html @@ -0,0 +1,66 @@ + + + + + level + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

level

+
+

The severity level of the event, which can be either "info", "warn", or "error".

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/message.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/message.html new file mode 100644 index 00000000..1319bdc3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job-event/message.html @@ -0,0 +1,66 @@ + + + + + message + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

message

+
+

A human-readable message providing more details about the event.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/-fine-tuning-job.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/-fine-tuning-job.html new file mode 100644 index 00000000..d13251e3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/-fine-tuning-job.html @@ -0,0 +1,66 @@ + + + + + FineTuningJob + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuningJob

+
+
constructor(id: FineTuningId, createdAt: Int, model: ModelId, organizationId: OrganizationId, status: Status, hyperparameters: Hyperparameters, trainingFile: FileId, resultFiles: List<FileId>, finishedAt: Int? = null, fineTunedModel: ModelId? = null, validationFile: FileId? = null, trainedTokens: Int? = null, error: ErrorInfo? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/created-at.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/created-at.html new file mode 100644 index 00000000..f8c93820 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/created-at.html @@ -0,0 +1,66 @@ + + + + + createdAt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

createdAt

+
+

The Unix timestamp (in seconds) for when the fine-tuning job was created.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/error.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/error.html new file mode 100644 index 00000000..47e7c732 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/error.html @@ -0,0 +1,66 @@ + + + + + error + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

error

+
+
val error: ErrorInfo? = null

Contains more information on the cause of failure for failed fine-tuning jobs, or null if not failed.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/fine-tuned-model.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/fine-tuned-model.html new file mode 100644 index 00000000..997a2086 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/fine-tuned-model.html @@ -0,0 +1,66 @@ + + + + + fineTunedModel + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fineTunedModel

+
+
val fineTunedModel: ModelId? = null

The name of the fine-tuned model that is being created, or null if the fine-tuning job is still running.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/finished-at.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/finished-at.html new file mode 100644 index 00000000..4da80d25 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/finished-at.html @@ -0,0 +1,66 @@ + + + + + finishedAt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

finishedAt

+
+
val finishedAt: Int? = null

The Unix timestamp (in seconds) for when the fine-tuning job was finished, or null if still running.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/hyperparameters.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/hyperparameters.html new file mode 100644 index 00000000..ce003481 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/hyperparameters.html @@ -0,0 +1,66 @@ + + + + + hyperparameters + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

hyperparameters

+
+

The hyperparameters used for the fine-tuning job.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/id.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/id.html new file mode 100644 index 00000000..6cb7f159 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+

The object identifier, which can be referenced in the API endpoints.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/index.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/index.html new file mode 100644 index 00000000..50b19c8a --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/index.html @@ -0,0 +1,289 @@ + + + + + FineTuningJob + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuningJob

+
@Serializable
data class FineTuningJob(val id: FineTuningId, val createdAt: Int, val model: ModelId, val organizationId: OrganizationId, val status: Status, val hyperparameters: Hyperparameters, val trainingFile: FileId, val resultFiles: List<FileId>, val finishedAt: Int? = null, val fineTunedModel: ModelId? = null, val validationFile: FileId? = null, val trainedTokens: Int? = null, val error: ErrorInfo? = null)

A data class representing a fine-tuning job.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: FineTuningId, createdAt: Int, model: ModelId, organizationId: OrganizationId, status: Status, hyperparameters: Hyperparameters, trainingFile: FileId, resultFiles: List<FileId>, finishedAt: Int? = null, fineTunedModel: ModelId? = null, validationFile: FileId? = null, trainedTokens: Int? = null, error: ErrorInfo? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The Unix timestamp (in seconds) for when the fine-tuning job was created.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val error: ErrorInfo? = null

Contains more information on the cause of failure for failed fine-tuning jobs, or null if not failed.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val fineTunedModel: ModelId? = null

The name of the fine-tuned model that is being created, or null if the fine-tuning job is still running.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val finishedAt: Int? = null

The Unix timestamp (in seconds) for when the fine-tuning job was finished, or null if still running.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The hyperparameters used for the fine-tuning job.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The object identifier, which can be referenced in the API endpoints.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The base model that is being fine-tuned.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The organization that owns the fine-tuning job.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The compiled results file ID(s) for the fine-tuning job, retrievable via the Files API.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The current status of the fine-tuning job (e.g., Status.ValidatingFiles, Status.Queued, etc.).

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val trainedTokens: Int? = null

The total number of billable tokens processed by this fine-tuning job, or null if the job is still running.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The file ID used for training, retrievable via the Files API.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val validationFile: FileId? = null

The file ID used for validation, retrievable via the Files API, or null if not available.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/model.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/model.html new file mode 100644 index 00000000..7ee3f512 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

The base model that is being fine-tuned.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/organization-id.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/organization-id.html new file mode 100644 index 00000000..494b5b1c --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/organization-id.html @@ -0,0 +1,66 @@ + + + + + organizationId + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

organizationId

+
+

The organization that owns the fine-tuning job.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/result-files.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/result-files.html new file mode 100644 index 00000000..9268657f --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/result-files.html @@ -0,0 +1,66 @@ + + + + + resultFiles + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

resultFiles

+
+

The compiled results file ID(s) for the fine-tuning job, retrievable via the Files API.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/status.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/status.html new file mode 100644 index 00000000..6c88c777 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/status.html @@ -0,0 +1,66 @@ + + + + + status + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

status

+
+

The current status of the fine-tuning job (e.g., Status.ValidatingFiles, Status.Queued, etc.).

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/trained-tokens.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/trained-tokens.html new file mode 100644 index 00000000..d2645b00 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/trained-tokens.html @@ -0,0 +1,66 @@ + + + + + trainedTokens + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

trainedTokens

+
+
val trainedTokens: Int? = null

The total number of billable tokens processed by this fine-tuning job, or null if the job is still running.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/training-file.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/training-file.html new file mode 100644 index 00000000..8ebe5e43 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/training-file.html @@ -0,0 +1,66 @@ + + + + + trainingFile + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

trainingFile

+
+

The file ID used for training, retrievable via the Files API.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/validation-file.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/validation-file.html new file mode 100644 index 00000000..8d248a32 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-job/validation-file.html @@ -0,0 +1,66 @@ + + + + + validationFile + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

validationFile

+
+
val validationFile: FileId? = null

The file ID used for validation, retrievable via the Files API, or null if not available.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/-fine-tuning-request-builder.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/-fine-tuning-request-builder.html new file mode 100644 index 00000000..b48dffd1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/-fine-tuning-request-builder.html @@ -0,0 +1,66 @@ + + + + + FineTuningRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuningRequestBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/build.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/build.html new file mode 100644 index 00000000..1662d4e3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+

Create a new instance of FineTuningRequest.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/hyperparameters.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/hyperparameters.html new file mode 100644 index 00000000..a34d7c4f --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/hyperparameters.html @@ -0,0 +1,66 @@ + + + + + hyperparameters + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

hyperparameters

+
+

The hyperparameters used for the fine-tuning job (Optional).

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/index.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/index.html new file mode 100644 index 00000000..5b7fa639 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/index.html @@ -0,0 +1,188 @@ + + + + + FineTuningRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuningRequestBuilder

+

Builder of FineTuningRequest instances.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The hyperparameters used for the fine-tuning job (Optional).

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The name of the model to fine-tune. See /docs/guides/fine-tuning/what-models-can-be-fine-tuned for more details.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

A string of up to 18 characters that will be added to your fine-tuned model name (Optional).

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The ID of an uploaded file that contains training data. See /docs/api-reference/files/upload for how to upload a file.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The ID of an uploaded file that contains validation data (Optional). The same data should not be present in both train and validation files.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Create a new instance of FineTuningRequest.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/model.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/model.html new file mode 100644 index 00000000..77fcbc7c --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

The name of the model to fine-tune. See /docs/guides/fine-tuning/what-models-can-be-fine-tuned for more details.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/suffix.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/suffix.html new file mode 100644 index 00000000..d618ff28 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/suffix.html @@ -0,0 +1,66 @@ + + + + + suffix + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

suffix

+
+

A string of up to 18 characters that will be added to your fine-tuned model name (Optional).

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/training-file.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/training-file.html new file mode 100644 index 00000000..6331282d --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/training-file.html @@ -0,0 +1,66 @@ + + + + + trainingFile + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

trainingFile

+
+

The ID of an uploaded file that contains training data. See /docs/api-reference/files/upload for how to upload a file.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/validation-file.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/validation-file.html new file mode 100644 index 00000000..7199c27e --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request-builder/validation-file.html @@ -0,0 +1,66 @@ + + + + + validationFile + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

validationFile

+
+

The ID of an uploaded file that contains validation data (Optional). The same data should not be present in both train and validation files.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/-fine-tuning-request.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/-fine-tuning-request.html new file mode 100644 index 00000000..ad041c98 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/-fine-tuning-request.html @@ -0,0 +1,66 @@ + + + + + FineTuningRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuningRequest

+
+
constructor(trainingFile: FileId, model: ModelId, validationFile: FileId? = null, hyperparameters: Hyperparameters? = null, suffix: String? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/hyperparameters.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/hyperparameters.html new file mode 100644 index 00000000..0bbcae1d --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/hyperparameters.html @@ -0,0 +1,66 @@ + + + + + hyperparameters + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

hyperparameters

+
+

The hyperparameters used for the fine-tuning job.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/index.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/index.html new file mode 100644 index 00000000..273a8171 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/index.html @@ -0,0 +1,169 @@ + + + + + FineTuningRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

FineTuningRequest

+
@Serializable
data class FineTuningRequest(val trainingFile: FileId, val model: ModelId, val validationFile: FileId? = null, val hyperparameters: Hyperparameters? = null, val suffix: String? = null)

A data class representing a fine-tuning request.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(trainingFile: FileId, model: ModelId, validationFile: FileId? = null, hyperparameters: Hyperparameters? = null, suffix: String? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The hyperparameters used for the fine-tuning job.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The name of the model to fine-tune. You can select one of the /docs/guides/fine-tuning/what-models-can-be-fine-tuned.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val suffix: String? = null

A string of up to 18 characters that will be added to your fine-tuned model name. For example, a suffix of "custom-model-name" would produce a model name like ft:gpt-3.5-turbo:openai:custom-model-name:7p4lURel.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The ID of an uploaded file that contains training data. See /docs/api-reference/files/upload for how to upload a file.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val validationFile: FileId? = null

The ID of an uploaded file that contains validation data.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/model.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/model.html new file mode 100644 index 00000000..ef5fd335 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

The name of the model to fine-tune. You can select one of the /docs/guides/fine-tuning/what-models-can-be-fine-tuned.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/suffix.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/suffix.html new file mode 100644 index 00000000..e32ea1a9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/suffix.html @@ -0,0 +1,66 @@ + + + + + suffix + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

suffix

+
+
val suffix: String? = null

A string of up to 18 characters that will be added to your fine-tuned model name. For example, a suffix of "custom-model-name" would produce a model name like ft:gpt-3.5-turbo:openai:custom-model-name:7p4lURel.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/training-file.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/training-file.html new file mode 100644 index 00000000..e326885f --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/training-file.html @@ -0,0 +1,66 @@ + + + + + trainingFile + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

trainingFile

+
+

The ID of an uploaded file that contains training data. See /docs/api-reference/files/upload for how to upload a file.

Your dataset must be formatted as a JSONL file. Additionally, you must upload your file with the purpose fine-tune. See the /docs/guides/fine-tuning for more details.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/validation-file.html b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/validation-file.html new file mode 100644 index 00000000..bf65b5a8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-fine-tuning-request/validation-file.html @@ -0,0 +1,66 @@ + + + + + validationFile + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

validationFile

+
+
val validationFile: FileId? = null

The ID of an uploaded file that contains validation data.

If you provide this file, the data is used to generate validation metrics periodically during fine-tuning. These metrics can be viewed in the fine-tuning results file. The same data should not be present in both train and validation files.

Your dataset must be formatted as a JSONL file. You must upload your file with the purpose fine-tune. See the /docs/guides/fine-tuning for more details.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-hyperparameters.html b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-hyperparameters.html new file mode 100644 index 00000000..1436d2ac --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-hyperparameters.html @@ -0,0 +1,66 @@ + + + + + Hyperparameters + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Hyperparameters

+
+
constructor(nEpochs: Int)
constructor(nEpochs: String)
constructor(nEpochs: Hyperparameters.NEpochs)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/-companion/-auto.html b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/-companion/-auto.html new file mode 100644 index 00000000..7e2e62ce --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/-companion/-auto.html @@ -0,0 +1,66 @@ + + + + + Auto + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Auto

+
+

A predefined NEpochs instance which indicates automatic determination of epochs.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/-companion/index.html b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/-companion/index.html new file mode 100644 index 00000000..5ac832c8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/-companion/index.html @@ -0,0 +1,109 @@ + + + + + Companion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Companion

+
object Companion
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

A predefined NEpochs instance which indicates automatic determination of epochs.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
operator fun invoke(value: Int): Hyperparameters.NEpochs

Creates an NEpochs instance holding an Int value.

operator fun invoke(value: String): Hyperparameters.NEpochs

Creates an NEpochs instance holding a String value.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/-companion/invoke.html b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/-companion/invoke.html new file mode 100644 index 00000000..e2f80e62 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/-companion/invoke.html @@ -0,0 +1,66 @@ + + + + + invoke + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

invoke

+
+
operator fun invoke(value: Int): Hyperparameters.NEpochs

Creates an NEpochs instance holding an Int value.


operator fun invoke(value: String): Hyperparameters.NEpochs

Creates an NEpochs instance holding a String value.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/index.html b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/index.html new file mode 100644 index 00000000..5da8af2f --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/index.html @@ -0,0 +1,109 @@ + + + + + NEpochs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

NEpochs

+
@Serializable(with = NEpochsSerializer::class)
interface NEpochs

A sealed interface representing a flexible parameter for the number of epochs.

This interface allows the number of epochs to be either a specific Int or a String representing an automatic value selection ("auto").

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
object Companion
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
abstract val value: Any

A value which can be either an Int or a String representing the number of epochs.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/value.html b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/value.html new file mode 100644 index 00000000..41a502f5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/-n-epochs/value.html @@ -0,0 +1,66 @@ + + + + + value + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

value

+
+
abstract val value: Any

A value which can be either an Int or a String representing the number of epochs.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/index.html b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/index.html new file mode 100644 index 00000000..991f56d8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/index.html @@ -0,0 +1,128 @@ + + + + + Hyperparameters + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Hyperparameters

+
@Serializable
data class Hyperparameters(val nEpochs: Hyperparameters.NEpochs)

A data class representing hyperparameters used during the fine-tuning of a model.

This class holds configuration options that guide the training process, and it supports serialization to allow for easy storage and retrieval of the settings.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(nEpochs: Int)
constructor(nEpochs: String)
constructor(nEpochs: Hyperparameters.NEpochs)
+
+
+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable(with = NEpochsSerializer::class)
interface NEpochs

A sealed interface representing a flexible parameter for the number of epochs.

+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The number of training epochs.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/n-epochs.html b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/n-epochs.html new file mode 100644 index 00000000..22cf696f --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-hyperparameters/n-epochs.html @@ -0,0 +1,66 @@ + + + + + nEpochs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

nEpochs

+
+

The number of training epochs.

This parameter can either be a specific number or the string "auto", where "auto" implies that the number of epochs will be determined automatically. It uses a sealed interface NEpochs to accept either an integer or a string value.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-level/-companion/-e-r-r-o-r.html b/openai-core/com.aallam.openai.api.finetuning/-level/-companion/-e-r-r-o-r.html new file mode 100644 index 00000000..eb41d489 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-level/-companion/-e-r-r-o-r.html @@ -0,0 +1,66 @@ + + + + + ERROR + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ERROR

+
+

An error event.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-level/-companion/-i-n-f-o.html b/openai-core/com.aallam.openai.api.finetuning/-level/-companion/-i-n-f-o.html new file mode 100644 index 00000000..b8949665 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-level/-companion/-i-n-f-o.html @@ -0,0 +1,66 @@ + + + + + INFO + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

INFO

+
+
val INFO: Level

An informational event.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-level/-companion/-w-a-r-n.html b/openai-core/com.aallam.openai.api.finetuning/-level/-companion/-w-a-r-n.html new file mode 100644 index 00000000..ce926d5a --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-level/-companion/-w-a-r-n.html @@ -0,0 +1,66 @@ + + + + + WARN + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

WARN

+
+
val WARN: Level

A warning event.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-level/-companion/index.html b/openai-core/com.aallam.openai.api.finetuning/-level/-companion/index.html new file mode 100644 index 00000000..a7376778 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-level/-companion/index.html @@ -0,0 +1,120 @@ + + + + + Companion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Companion

+
object Companion
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

An error event.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val INFO: Level

An informational event.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val WARN: Level

A warning event.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-level/-level.html b/openai-core/com.aallam.openai.api.finetuning/-level/-level.html new file mode 100644 index 00000000..9ec87ebf --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-level/-level.html @@ -0,0 +1,66 @@ + + + + + Level + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Level

+
+
constructor(value: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-level/index.html b/openai-core/com.aallam.openai.api.finetuning/-level/index.html new file mode 100644 index 00000000..52360d76 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-level/index.html @@ -0,0 +1,128 @@ + + + + + Level + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Level

+
@Serializable
value class Level(val value: String)

Represents the severity level of a fine-tuning job event.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(value: String)
+
+
+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
object Companion
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/-level/value.html b/openai-core/com.aallam.openai.api.finetuning/-level/value.html new file mode 100644 index 00000000..925f86d7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/-level/value.html @@ -0,0 +1,66 @@ + + + + + value + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

value

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/fine-tuning-request.html b/openai-core/com.aallam.openai.api.finetuning/fine-tuning-request.html new file mode 100644 index 00000000..cb05f6b9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/fine-tuning-request.html @@ -0,0 +1,66 @@ + + + + + fineTuningRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

fineTuningRequest

+
+

Create a Fine-Tuning request.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.finetuning/index.html b/openai-core/com.aallam.openai.api.finetuning/index.html new file mode 100644 index 00000000..7685bc0b --- /dev/null +++ b/openai-core/com.aallam.openai.api.finetuning/index.html @@ -0,0 +1,213 @@ + + + + + com.aallam.openai.api.finetuning + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class ErrorInfo(val message: String, val code: String, val param: String? = null)

For fine-tuning jobs that have failed, this will contain more information on the cause of the failure.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class FineTuningId(val id: String)

Fine-tuning identifier.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class FineTuningJob(val id: FineTuningId, val createdAt: Int, val model: ModelId, val organizationId: OrganizationId, val status: Status, val hyperparameters: Hyperparameters, val trainingFile: FileId, val resultFiles: List<FileId>, val finishedAt: Int? = null, val fineTunedModel: ModelId? = null, val validationFile: FileId? = null, val trainedTokens: Int? = null, val error: ErrorInfo? = null)

A data class representing a fine-tuning job.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class FineTuningJobEvent(val id: String, val createdAt: Int, val level: Level, val message: String)

Data class representing a fine-tuning job event.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class FineTuningRequest(val trainingFile: FileId, val model: ModelId, val validationFile: FileId? = null, val hyperparameters: Hyperparameters? = null, val suffix: String? = null)

A data class representing a fine-tuning request.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Builder of FineTuningRequest instances.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class Hyperparameters(val nEpochs: Hyperparameters.NEpochs)

A data class representing hyperparameters used during the fine-tuning of a model.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class Level(val value: String)

Represents the severity level of a fine-tuning job event.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Create a Fine-Tuning request.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.http/-timeout/-timeout.html b/openai-core/com.aallam.openai.api.http/-timeout/-timeout.html new file mode 100644 index 00000000..23e651a1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.http/-timeout/-timeout.html @@ -0,0 +1,66 @@ + + + + + Timeout + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Timeout

+
+
constructor(request: Duration? = null, connect: Duration? = null, socket: Duration? = null)

Parameters

request

time period required to process an HTTP call: from sending a request to receiving a response

connect

time period in which a client should establish a connection with a server

socket

maximum time of inactivity between two data packets when exchanging data with a server

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.http/-timeout/connect.html b/openai-core/com.aallam.openai.api.http/-timeout/connect.html new file mode 100644 index 00000000..cdc1269a --- /dev/null +++ b/openai-core/com.aallam.openai.api.http/-timeout/connect.html @@ -0,0 +1,66 @@ + + + + + connect + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

connect

+
+
val connect: Duration? = null

Parameters

connect

time period in which a client should establish a connection with a server

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.http/-timeout/index.html b/openai-core/com.aallam.openai.api.http/-timeout/index.html new file mode 100644 index 00000000..1b106a31 --- /dev/null +++ b/openai-core/com.aallam.openai.api.http/-timeout/index.html @@ -0,0 +1,139 @@ + + + + + Timeout + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Timeout

+
class Timeout(val request: Duration? = null, val connect: Duration? = null, val socket: Duration? = null)

Http operations timeouts.

Parameters

request

time period required to process an HTTP call: from sending a request to receiving a response

connect

time period in which a client should establish a connection with a server

socket

maximum time of inactivity between two data packets when exchanging data with a server

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(request: Duration? = null, connect: Duration? = null, socket: Duration? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val connect: Duration? = null
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val request: Duration? = null
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val socket: Duration? = null
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.http/-timeout/request.html b/openai-core/com.aallam.openai.api.http/-timeout/request.html new file mode 100644 index 00000000..5f5c4156 --- /dev/null +++ b/openai-core/com.aallam.openai.api.http/-timeout/request.html @@ -0,0 +1,66 @@ + + + + + request + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

request

+
+
val request: Duration? = null

Parameters

request

time period required to process an HTTP call: from sending a request to receiving a response

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.http/-timeout/socket.html b/openai-core/com.aallam.openai.api.http/-timeout/socket.html new file mode 100644 index 00000000..3b252b1a --- /dev/null +++ b/openai-core/com.aallam.openai.api.http/-timeout/socket.html @@ -0,0 +1,66 @@ + + + + + socket + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

socket

+
+
val socket: Duration? = null

Parameters

socket

maximum time of inactivity between two data packets when exchanging data with a server

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.http/index.html b/openai-core/com.aallam.openai.api.http/index.html new file mode 100644 index 00000000..dcc8f651 --- /dev/null +++ b/openai-core/com.aallam.openai.api.http/index.html @@ -0,0 +1,89 @@ + + + + + com.aallam.openai.api.http + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
class Timeout(val request: Duration? = null, val connect: Duration? = null, val socket: Duration? = null)

Http operations timeouts.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/-image-creation-request.html b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/-image-creation-request.html new file mode 100644 index 00000000..3306369c --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/-image-creation-request.html @@ -0,0 +1,66 @@ + + + + + ImageCreationRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageCreationRequest

+
+
constructor(prompt: String, n: Int? = null, size: ImageSize? = null, user: String? = null, responseFormat: ImageResponseFormat)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/index.html b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/index.html new file mode 100644 index 00000000..1722468f --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/index.html @@ -0,0 +1,169 @@ + + + + + ImageCreationRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageCreationRequest

+
@Serializable
data class ImageCreationRequest(val prompt: String, val n: Int? = null, val size: ImageSize? = null, val user: String? = null, val responseFormat: ImageResponseFormat)

Image generation request. Results are expected as URLs.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(prompt: String, n: Int? = null, size: ImageSize? = null, user: String? = null, responseFormat: ImageResponseFormat)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val n: Int? = null
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+ +
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val size: ImageSize? = null
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val user: String? = null
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/n.html b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/n.html new file mode 100644 index 00000000..f8932ef2 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/n.html @@ -0,0 +1,66 @@ + + + + + n + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

n

+
+
val n: Int? = null
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/prompt.html b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/prompt.html new file mode 100644 index 00000000..4deed186 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/prompt.html @@ -0,0 +1,66 @@ + + + + + prompt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

prompt

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/response-format.html b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/response-format.html new file mode 100644 index 00000000..3e49302f --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/response-format.html @@ -0,0 +1,66 @@ + + + + + responseFormat + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

responseFormat

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/size.html b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/size.html new file mode 100644 index 00000000..d6429ec7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/size.html @@ -0,0 +1,66 @@ + + + + + size + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

size

+
+
val size: ImageSize? = null
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/user.html b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/user.html new file mode 100644 index 00000000..7f2e6229 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/-image-creation-request/user.html @@ -0,0 +1,66 @@ + + + + + user + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

user

+
+
val user: String? = null
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/-image-response-format/-companion/base64-json.html b/openai-core/com.aallam.openai.api.image.internal/-image-response-format/-companion/base64-json.html new file mode 100644 index 00000000..5b2936ca --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/-image-response-format/-companion/base64-json.html @@ -0,0 +1,66 @@ + + + + + base64Json + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

base64Json

+
+

Response format as base 64 json.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/-image-response-format/-companion/index.html b/openai-core/com.aallam.openai.api.image.internal/-image-response-format/-companion/index.html new file mode 100644 index 00000000..5112e27d --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/-image-response-format/-companion/index.html @@ -0,0 +1,105 @@ + + + + + Companion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Companion

+
object Companion
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Response format as base 64 json.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Response format as url.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/-image-response-format/-companion/url.html b/openai-core/com.aallam.openai.api.image.internal/-image-response-format/-companion/url.html new file mode 100644 index 00000000..7816b076 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/-image-response-format/-companion/url.html @@ -0,0 +1,66 @@ + + + + + url + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

url

+
+

Response format as url.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/-image-response-format/-image-response-format.html b/openai-core/com.aallam.openai.api.image.internal/-image-response-format/-image-response-format.html new file mode 100644 index 00000000..c1cc9d55 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/-image-response-format/-image-response-format.html @@ -0,0 +1,66 @@ + + + + + ImageResponseFormat + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageResponseFormat

+
+
constructor(format: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/-image-response-format/format.html b/openai-core/com.aallam.openai.api.image.internal/-image-response-format/format.html new file mode 100644 index 00000000..24d6614c --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/-image-response-format/format.html @@ -0,0 +1,66 @@ + + + + + format + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

format

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/-image-response-format/index.html b/openai-core/com.aallam.openai.api.image.internal/-image-response-format/index.html new file mode 100644 index 00000000..d45e2edb --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/-image-response-format/index.html @@ -0,0 +1,128 @@ + + + + + ImageResponseFormat + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageResponseFormat

+
@Serializable
value class ImageResponseFormat(val format: String)

The format in which the generated images are returned.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(format: String)
+
+
+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
object Companion
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image.internal/index.html b/openai-core/com.aallam.openai.api.image.internal/index.html new file mode 100644 index 00000000..bce77570 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image.internal/index.html @@ -0,0 +1,104 @@ + + + + + com.aallam.openai.api.image.internal + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class ImageCreationRequest(val prompt: String, val n: Int? = null, val size: ImageSize? = null, val user: String? = null, val responseFormat: ImageResponseFormat)

Image generation request. Results are expected as URLs.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class ImageResponseFormat(val format: String)

The format in which the generated images are returned.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-creation-builder/-image-creation-builder.html b/openai-core/com.aallam.openai.api.image/-image-creation-builder/-image-creation-builder.html new file mode 100644 index 00000000..0b98a76b --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-creation-builder/-image-creation-builder.html @@ -0,0 +1,66 @@ + + + + + ImageCreationBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageCreationBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-creation-builder/build.html b/openai-core/com.aallam.openai.api.image/-image-creation-builder/build.html new file mode 100644 index 00000000..755e59d7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-creation-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+

Creates the ImageCreation instance

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-creation-builder/index.html b/openai-core/com.aallam.openai.api.image/-image-creation-builder/index.html new file mode 100644 index 00000000..95da43b2 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-creation-builder/index.html @@ -0,0 +1,173 @@ + + + + + ImageCreationBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageCreationBuilder

+

Builder of ImageCreation instances.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
var n: Int?

The number of images to generate. Must be between 1 and 10.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

A text description of the desired image(s). The maximum length is 1000 characters.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The size of the generated images.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var user: String?

The format in which the generated images are returned. Must be one of url or b64_json.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Creates the ImageCreation instance

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-creation-builder/n.html b/openai-core/com.aallam.openai.api.image/-image-creation-builder/n.html new file mode 100644 index 00000000..bf3bfe93 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-creation-builder/n.html @@ -0,0 +1,66 @@ + + + + + n + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

n

+
+
var n: Int?

The number of images to generate. Must be between 1 and 10.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-creation-builder/prompt.html b/openai-core/com.aallam.openai.api.image/-image-creation-builder/prompt.html new file mode 100644 index 00000000..dac1e51f --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-creation-builder/prompt.html @@ -0,0 +1,66 @@ + + + + + prompt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

prompt

+
+

A text description of the desired image(s). The maximum length is 1000 characters.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-creation-builder/size.html b/openai-core/com.aallam.openai.api.image/-image-creation-builder/size.html new file mode 100644 index 00000000..70dd964d --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-creation-builder/size.html @@ -0,0 +1,66 @@ + + + + + size + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

size

+
+

The size of the generated images.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-creation-builder/user.html b/openai-core/com.aallam.openai.api.image/-image-creation-builder/user.html new file mode 100644 index 00000000..99cc6e00 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-creation-builder/user.html @@ -0,0 +1,66 @@ + + + + + user + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

user

+
+
var user: String?

The format in which the generated images are returned. Must be one of url or b64_json.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-creation/-image-creation.html b/openai-core/com.aallam.openai.api.image/-image-creation/-image-creation.html new file mode 100644 index 00000000..c970df15 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-creation/-image-creation.html @@ -0,0 +1,66 @@ + + + + + ImageCreation + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageCreation

+
+
constructor(prompt: String, n: Int? = null, size: ImageSize? = null, user: String? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-creation/index.html b/openai-core/com.aallam.openai.api.image/-image-creation/index.html new file mode 100644 index 00000000..d620eded --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-creation/index.html @@ -0,0 +1,154 @@ + + + + + ImageCreation + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageCreation

+
class ImageCreation(val prompt: String, val n: Int? = null, val size: ImageSize? = null, val user: String? = null)

Image generation request.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(prompt: String, n: Int? = null, size: ImageSize? = null, user: String? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val n: Int? = null

The number of images to generate. Must be between 1 and 10.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

A text description of the desired image(s). The maximum length is 1000 characters.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val size: ImageSize? = null

The size of the generated images.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val user: String? = null

The format in which the generated images are returned. Must be one of url or b64_json.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-creation/n.html b/openai-core/com.aallam.openai.api.image/-image-creation/n.html new file mode 100644 index 00000000..4d1d37e1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-creation/n.html @@ -0,0 +1,66 @@ + + + + + n + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

n

+
+
val n: Int? = null

The number of images to generate. Must be between 1 and 10.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-creation/prompt.html b/openai-core/com.aallam.openai.api.image/-image-creation/prompt.html new file mode 100644 index 00000000..d5d5e812 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-creation/prompt.html @@ -0,0 +1,66 @@ + + + + + prompt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

prompt

+
+

A text description of the desired image(s). The maximum length is 1000 characters.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-creation/size.html b/openai-core/com.aallam.openai.api.image/-image-creation/size.html new file mode 100644 index 00000000..8ae12ed4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-creation/size.html @@ -0,0 +1,66 @@ + + + + + size + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

size

+
+
val size: ImageSize? = null

The size of the generated images.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-creation/user.html b/openai-core/com.aallam.openai.api.image/-image-creation/user.html new file mode 100644 index 00000000..5446c89e --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-creation/user.html @@ -0,0 +1,66 @@ + + + + + user + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

user

+
+
val user: String? = null

The format in which the generated images are returned. Must be one of url or b64_json.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit-builder/-image-edit-builder.html b/openai-core/com.aallam.openai.api.image/-image-edit-builder/-image-edit-builder.html new file mode 100644 index 00000000..8b2f4a78 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit-builder/-image-edit-builder.html @@ -0,0 +1,66 @@ + + + + + ImageEditBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageEditBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit-builder/build.html b/openai-core/com.aallam.openai.api.image/-image-edit-builder/build.html new file mode 100644 index 00000000..aa51d922 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+

Creates the ImageEdit instance

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit-builder/image.html b/openai-core/com.aallam.openai.api.image/-image-edit-builder/image.html new file mode 100644 index 00000000..a39198da --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit-builder/image.html @@ -0,0 +1,66 @@ + + + + + image + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

image

+
+

The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit-builder/index.html b/openai-core/com.aallam.openai.api.image/-image-edit-builder/index.html new file mode 100644 index 00000000..c1f0e0df --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit-builder/index.html @@ -0,0 +1,203 @@ + + + + + ImageEditBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageEditBuilder

+

Builder of ImageEdit instances.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var n: Int?

The number of images to generate. Must be between 1 and 10.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

A text description of the desired image(s). The maximum length is 1000 characters.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The size of the generated images.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var user: String?

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Creates the ImageEdit instance

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit-builder/mask.html b/openai-core/com.aallam.openai.api.image/-image-edit-builder/mask.html new file mode 100644 index 00000000..5e551e14 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit-builder/mask.html @@ -0,0 +1,66 @@ + + + + + mask + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

mask

+
+

An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit-builder/n.html b/openai-core/com.aallam.openai.api.image/-image-edit-builder/n.html new file mode 100644 index 00000000..20d8530b --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit-builder/n.html @@ -0,0 +1,66 @@ + + + + + n + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

n

+
+
var n: Int?

The number of images to generate. Must be between 1 and 10.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit-builder/prompt.html b/openai-core/com.aallam.openai.api.image/-image-edit-builder/prompt.html new file mode 100644 index 00000000..da847eca --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit-builder/prompt.html @@ -0,0 +1,66 @@ + + + + + prompt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

prompt

+
+

A text description of the desired image(s). The maximum length is 1000 characters.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit-builder/size.html b/openai-core/com.aallam.openai.api.image/-image-edit-builder/size.html new file mode 100644 index 00000000..075face6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit-builder/size.html @@ -0,0 +1,66 @@ + + + + + size + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

size

+
+

The size of the generated images.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit-builder/user.html b/openai-core/com.aallam.openai.api.image/-image-edit-builder/user.html new file mode 100644 index 00000000..c873d703 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit-builder/user.html @@ -0,0 +1,66 @@ + + + + + user + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

user

+
+
var user: String?

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit/-image-edit.html b/openai-core/com.aallam.openai.api.image/-image-edit/-image-edit.html new file mode 100644 index 00000000..0e64a840 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit/-image-edit.html @@ -0,0 +1,66 @@ + + + + + ImageEdit + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageEdit

+
+
constructor(image: FileSource, mask: FileSource, prompt: String, n: Int? = null, size: ImageSize? = null, user: String? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit/image.html b/openai-core/com.aallam.openai.api.image/-image-edit/image.html new file mode 100644 index 00000000..16844a1a --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit/image.html @@ -0,0 +1,66 @@ + + + + + image + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

image

+
+

The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit/index.html b/openai-core/com.aallam.openai.api.image/-image-edit/index.html new file mode 100644 index 00000000..75097442 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit/index.html @@ -0,0 +1,184 @@ + + + + + ImageEdit + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageEdit

+
class ImageEdit(val image: FileSource, val mask: FileSource, val prompt: String, val n: Int? = null, val size: ImageSize? = null, val user: String? = null)

Image edit request.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(image: FileSource, mask: FileSource, prompt: String, n: Int? = null, size: ImageSize? = null, user: String? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which will be used as the mask.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val n: Int? = null

The number of images to generate. Must be between 1 and 10.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

A text description of the desired image(s). The maximum length is 1000 characters.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val size: ImageSize? = null

The size of the generated images.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val user: String? = null

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit/mask.html b/openai-core/com.aallam.openai.api.image/-image-edit/mask.html new file mode 100644 index 00000000..fc49e32b --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit/mask.html @@ -0,0 +1,66 @@ + + + + + mask + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

mask

+
+

An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid PNG file, less than 4MB, and have the same dimensions as image.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit/n.html b/openai-core/com.aallam.openai.api.image/-image-edit/n.html new file mode 100644 index 00000000..1820770a --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit/n.html @@ -0,0 +1,66 @@ + + + + + n + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

n

+
+
val n: Int? = null

The number of images to generate. Must be between 1 and 10.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit/prompt.html b/openai-core/com.aallam.openai.api.image/-image-edit/prompt.html new file mode 100644 index 00000000..85c0742a --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit/prompt.html @@ -0,0 +1,66 @@ + + + + + prompt + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

prompt

+
+

A text description of the desired image(s). The maximum length is 1000 characters.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit/size.html b/openai-core/com.aallam.openai.api.image/-image-edit/size.html new file mode 100644 index 00000000..d61b22aa --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit/size.html @@ -0,0 +1,66 @@ + + + + + size + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

size

+
+
val size: ImageSize? = null

The size of the generated images.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-edit/user.html b/openai-core/com.aallam.openai.api.image/-image-edit/user.html new file mode 100644 index 00000000..505a366c --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-edit/user.html @@ -0,0 +1,66 @@ + + + + + user + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

user

+
+
val user: String? = null

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-j-s-o-n/-image-j-s-o-n.html b/openai-core/com.aallam.openai.api.image/-image-j-s-o-n/-image-j-s-o-n.html new file mode 100644 index 00000000..4d481b2b --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-j-s-o-n/-image-j-s-o-n.html @@ -0,0 +1,66 @@ + + + + + ImageJSON + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageJSON

+
+
constructor(b64JSON: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-j-s-o-n/b64-j-s-o-n.html b/openai-core/com.aallam.openai.api.image/-image-j-s-o-n/b64-j-s-o-n.html new file mode 100644 index 00000000..8e855bcc --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-j-s-o-n/b64-j-s-o-n.html @@ -0,0 +1,66 @@ + + + + + b64JSON + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

b64JSON

+
+

Image url string.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-j-s-o-n/index.html b/openai-core/com.aallam.openai.api.image/-image-j-s-o-n/index.html new file mode 100644 index 00000000..630f845f --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-j-s-o-n/index.html @@ -0,0 +1,109 @@ + + + + + ImageJSON + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageJSON

+
@Serializable
data class ImageJSON(val b64JSON: String)

Generated image JSON (base 64).

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(b64JSON: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Image url string.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-size/-companion/index.html b/openai-core/com.aallam.openai.api.image/-image-size/-companion/index.html new file mode 100644 index 00000000..905e5c4e --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-size/-companion/index.html @@ -0,0 +1,120 @@ + + + + + Companion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Companion

+
object Companion
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Size image of dimension 1024x1024.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Size image of dimension 256x256.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Size image of dimension 512x512.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-size/-companion/is1024x1024.html b/openai-core/com.aallam.openai.api.image/-image-size/-companion/is1024x1024.html new file mode 100644 index 00000000..e8e34d1f --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-size/-companion/is1024x1024.html @@ -0,0 +1,66 @@ + + + + + is1024x1024 + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

is1024x1024

+
+

Size image of dimension 1024x1024.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-size/-companion/is256x256.html b/openai-core/com.aallam.openai.api.image/-image-size/-companion/is256x256.html new file mode 100644 index 00000000..083c8341 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-size/-companion/is256x256.html @@ -0,0 +1,66 @@ + + + + + is256x256 + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

is256x256

+
+

Size image of dimension 256x256.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-size/-companion/is512x512.html b/openai-core/com.aallam.openai.api.image/-image-size/-companion/is512x512.html new file mode 100644 index 00000000..f5056fcc --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-size/-companion/is512x512.html @@ -0,0 +1,66 @@ + + + + + is512x512 + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

is512x512

+
+

Size image of dimension 512x512.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-size/-image-size.html b/openai-core/com.aallam.openai.api.image/-image-size/-image-size.html new file mode 100644 index 00000000..eab3b390 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-size/-image-size.html @@ -0,0 +1,66 @@ + + + + + ImageSize + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageSize

+
+
constructor(size: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-size/index.html b/openai-core/com.aallam.openai.api.image/-image-size/index.html new file mode 100644 index 00000000..0b2083d7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-size/index.html @@ -0,0 +1,128 @@ + + + + + ImageSize + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageSize

+
@Serializable
value class ImageSize(val size: String)

The size of the generated images.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(size: String)
+
+
+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
object Companion
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-size/size.html b/openai-core/com.aallam.openai.api.image/-image-size/size.html new file mode 100644 index 00000000..f0178919 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-size/size.html @@ -0,0 +1,66 @@ + + + + + size + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

size

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-u-r-l/-image-u-r-l.html b/openai-core/com.aallam.openai.api.image/-image-u-r-l/-image-u-r-l.html new file mode 100644 index 00000000..a333af06 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-u-r-l/-image-u-r-l.html @@ -0,0 +1,66 @@ + + + + + ImageURL + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageURL

+
+
constructor(url: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-u-r-l/index.html b/openai-core/com.aallam.openai.api.image/-image-u-r-l/index.html new file mode 100644 index 00000000..50a2bd3d --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-u-r-l/index.html @@ -0,0 +1,109 @@ + + + + + ImageURL + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageURL

+
@Serializable
data class ImageURL(val url: String)

Generated image URL.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(url: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val url: String

Image url string.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-u-r-l/url.html b/openai-core/com.aallam.openai.api.image/-image-u-r-l/url.html new file mode 100644 index 00000000..df9846e1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-u-r-l/url.html @@ -0,0 +1,66 @@ + + + + + url + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

url

+
+
val url: String

Image url string.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-variation-builder/-image-variation-builder.html b/openai-core/com.aallam.openai.api.image/-image-variation-builder/-image-variation-builder.html new file mode 100644 index 00000000..a7ecc218 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-variation-builder/-image-variation-builder.html @@ -0,0 +1,66 @@ + + + + + ImageVariationBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageVariationBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-variation-builder/build.html b/openai-core/com.aallam.openai.api.image/-image-variation-builder/build.html new file mode 100644 index 00000000..39648042 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-variation-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+

Creates the ImageVariation instance

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-variation-builder/image.html b/openai-core/com.aallam.openai.api.image/-image-variation-builder/image.html new file mode 100644 index 00000000..b9a44247 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-variation-builder/image.html @@ -0,0 +1,66 @@ + + + + + image + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

image

+
+

The image to use as the basis for the variation(s). Must be a valid PNG file, less than 4MB, and square.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-variation-builder/index.html b/openai-core/com.aallam.openai.api.image/-image-variation-builder/index.html new file mode 100644 index 00000000..711a585b --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-variation-builder/index.html @@ -0,0 +1,173 @@ + + + + + ImageVariationBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageVariationBuilder

+

Builder of ImageVariation instances.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The image to use as the basis for the variation(s). Must be a valid PNG file, less than 4MB, and square.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var n: Int?

The number of images to generate. Must be between 1 and 10.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The size of the generated images.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
var user: String?

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Creates the ImageVariation instance

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-variation-builder/n.html b/openai-core/com.aallam.openai.api.image/-image-variation-builder/n.html new file mode 100644 index 00000000..7a2bb96e --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-variation-builder/n.html @@ -0,0 +1,66 @@ + + + + + n + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

n

+
+
var n: Int?

The number of images to generate. Must be between 1 and 10.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-variation-builder/size.html b/openai-core/com.aallam.openai.api.image/-image-variation-builder/size.html new file mode 100644 index 00000000..878b21f1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-variation-builder/size.html @@ -0,0 +1,66 @@ + + + + + size + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

size

+
+

The size of the generated images.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-variation-builder/user.html b/openai-core/com.aallam.openai.api.image/-image-variation-builder/user.html new file mode 100644 index 00000000..b7db5955 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-variation-builder/user.html @@ -0,0 +1,66 @@ + + + + + user + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

user

+
+
var user: String?

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-variation/-image-variation.html b/openai-core/com.aallam.openai.api.image/-image-variation/-image-variation.html new file mode 100644 index 00000000..df1271cb --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-variation/-image-variation.html @@ -0,0 +1,66 @@ + + + + + ImageVariation + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageVariation

+
+
constructor(image: FileSource, n: Int? = null, size: ImageSize? = null, user: String? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-variation/image.html b/openai-core/com.aallam.openai.api.image/-image-variation/image.html new file mode 100644 index 00000000..a2ce9c09 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-variation/image.html @@ -0,0 +1,66 @@ + + + + + image + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

image

+
+

The image to use as the basis for the variation(s). Must be a valid PNG file, less than 4MB, and square.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-variation/index.html b/openai-core/com.aallam.openai.api.image/-image-variation/index.html new file mode 100644 index 00000000..e0e08627 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-variation/index.html @@ -0,0 +1,154 @@ + + + + + ImageVariation + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ImageVariation

+
class ImageVariation(val image: FileSource, val n: Int? = null, val size: ImageSize? = null, val user: String? = null)

Image variant request.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(image: FileSource, n: Int? = null, size: ImageSize? = null, user: String? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The image to use as the basis for the variation(s). Must be a valid PNG file, less than 4MB, and square.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val n: Int? = null

The number of images to generate. Must be between 1 and 10.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val size: ImageSize? = null

The size of the generated images.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val user: String? = null

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-variation/n.html b/openai-core/com.aallam.openai.api.image/-image-variation/n.html new file mode 100644 index 00000000..ae02b094 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-variation/n.html @@ -0,0 +1,66 @@ + + + + + n + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

n

+
+
val n: Int? = null

The number of images to generate. Must be between 1 and 10.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-variation/size.html b/openai-core/com.aallam.openai.api.image/-image-variation/size.html new file mode 100644 index 00000000..1815fa87 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-variation/size.html @@ -0,0 +1,66 @@ + + + + + size + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

size

+
+
val size: ImageSize? = null

The size of the generated images.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/-image-variation/user.html b/openai-core/com.aallam.openai.api.image/-image-variation/user.html new file mode 100644 index 00000000..3ffa0aad --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/-image-variation/user.html @@ -0,0 +1,66 @@ + + + + + user + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

user

+
+
val user: String? = null

A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/image-creation.html b/openai-core/com.aallam.openai.api.image/image-creation.html new file mode 100644 index 00000000..67bc06d7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/image-creation.html @@ -0,0 +1,66 @@ + + + + + imageCreation + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

imageCreation

+
+

Image generation request.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/image-edit.html b/openai-core/com.aallam.openai.api.image/image-edit.html new file mode 100644 index 00000000..9499fa3e --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/image-edit.html @@ -0,0 +1,66 @@ + + + + + imageEdit + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

imageEdit

+
+

Image edit request.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/image-variation.html b/openai-core/com.aallam.openai.api.image/image-variation.html new file mode 100644 index 00000000..7b0cd90f --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/image-variation.html @@ -0,0 +1,66 @@ + + + + + imageVariation + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

imageVariation

+
+

Image variant request.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.image/index.html b/openai-core/com.aallam.openai.api.image/index.html new file mode 100644 index 00000000..d2ce75ac --- /dev/null +++ b/openai-core/com.aallam.openai.api.image/index.html @@ -0,0 +1,258 @@ + + + + + com.aallam.openai.api.image + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
class ImageCreation(val prompt: String, val n: Int? = null, val size: ImageSize? = null, val user: String? = null)

Image generation request.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Builder of ImageCreation instances.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class ImageEdit(val image: FileSource, val mask: FileSource, val prompt: String, val n: Int? = null, val size: ImageSize? = null, val user: String? = null)

Image edit request.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Builder of ImageEdit instances.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class ImageJSON(val b64JSON: String)

Generated image JSON (base 64).

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class ImageSize(val size: String)

The size of the generated images.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class ImageURL(val url: String)

Generated image URL.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
class ImageVariation(val image: FileSource, val n: Int? = null, val size: ImageSize? = null, val user: String? = null)

Image variant request.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Builder of ImageVariation instances.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Image generation request.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Image edit request.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Image variant request.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-log-level/-all/index.html b/openai-core/com.aallam.openai.api.logging/-log-level/-all/index.html new file mode 100644 index 00000000..8f5b10d7 --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-log-level/-all/index.html @@ -0,0 +1,105 @@ + + + + + All + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

All

+ +
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-log-level/-body/index.html b/openai-core/com.aallam.openai.api.logging/-log-level/-body/index.html new file mode 100644 index 00000000..bc0017a5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-log-level/-body/index.html @@ -0,0 +1,105 @@ + + + + + Body + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Body

+ +
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-log-level/-headers/index.html b/openai-core/com.aallam.openai.api.logging/-log-level/-headers/index.html new file mode 100644 index 00000000..0eb95d31 --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-log-level/-headers/index.html @@ -0,0 +1,105 @@ + + + + + Headers + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Headers

+ +
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-log-level/-info/index.html b/openai-core/com.aallam.openai.api.logging/-log-level/-info/index.html new file mode 100644 index 00000000..1615ef6a --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-log-level/-info/index.html @@ -0,0 +1,105 @@ + + + + + Info + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Info

+ +
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-log-level/-none/index.html b/openai-core/com.aallam.openai.api.logging/-log-level/-none/index.html new file mode 100644 index 00000000..97071c3b --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-log-level/-none/index.html @@ -0,0 +1,105 @@ + + + + + None + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

None

+ +
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-log-level/index.html b/openai-core/com.aallam.openai.api.logging/-log-level/index.html new file mode 100644 index 00000000..84554876 --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-log-level/index.html @@ -0,0 +1,218 @@ + + + + + LogLevel + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

LogLevel

+

Http client logging log level.

+
+
+
+
+
+

Entries

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
fun valueOf(value: String): LogLevel

Returns the enum constant of this type with the specified name. The string must match exactly an identifier used to declare an enum constant in this type. (Extraneous whitespace characters are not permitted.)

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Returns an array containing the constants of this enum type, in the order they're declared.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-log-level/value-of.html b/openai-core/com.aallam.openai.api.logging/-log-level/value-of.html new file mode 100644 index 00000000..b865b0c4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-log-level/value-of.html @@ -0,0 +1,66 @@ + + + + + valueOf + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

valueOf

+
+
fun valueOf(value: String): LogLevel

Returns the enum constant of this type with the specified name. The string must match exactly an identifier used to declare an enum constant in this type. (Extraneous whitespace characters are not permitted.)

Throws

if this enum type has no constant with the specified name

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-log-level/values.html b/openai-core/com.aallam.openai.api.logging/-log-level/values.html new file mode 100644 index 00000000..454ea33c --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-log-level/values.html @@ -0,0 +1,66 @@ + + + + + values + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

values

+
+

Returns an array containing the constants of this enum type, in the order they're declared.

This method may be used to iterate over the constants.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-logger/-default/index.html b/openai-core/com.aallam.openai.api.logging/-logger/-default/index.html new file mode 100644 index 00000000..ef8ac373 --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-logger/-default/index.html @@ -0,0 +1,105 @@ + + + + + Default + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Default

+

Default logger to use.

+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-logger/-empty/index.html b/openai-core/com.aallam.openai.api.logging/-logger/-empty/index.html new file mode 100644 index 00000000..621398da --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-logger/-empty/index.html @@ -0,0 +1,105 @@ + + + + + Empty + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Empty

+

Empty Logger for test purpose.

+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-logger/-simple/index.html b/openai-core/com.aallam.openai.api.logging/-logger/-simple/index.html new file mode 100644 index 00000000..2fd2740b --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-logger/-simple/index.html @@ -0,0 +1,105 @@ + + + + + Simple + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Simple

+

Logger using println.

+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-logger/index.html b/openai-core/com.aallam.openai.api.logging/-logger/index.html new file mode 100644 index 00000000..a73d68a0 --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-logger/index.html @@ -0,0 +1,188 @@ + + + + + Logger + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Logger

+
enum Logger : Enum<Logger>

Http client logger.

+
+
+
+
+
+

Entries

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Default logger to use.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Logger using println.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Empty Logger for test purpose.

+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
fun valueOf(value: String): Logger

Returns the enum constant of this type with the specified name. The string must match exactly an identifier used to declare an enum constant in this type. (Extraneous whitespace characters are not permitted.)

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Returns an array containing the constants of this enum type, in the order they're declared.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-logger/value-of.html b/openai-core/com.aallam.openai.api.logging/-logger/value-of.html new file mode 100644 index 00000000..87f87ec8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-logger/value-of.html @@ -0,0 +1,66 @@ + + + + + valueOf + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

valueOf

+
+
fun valueOf(value: String): Logger

Returns the enum constant of this type with the specified name. The string must match exactly an identifier used to declare an enum constant in this type. (Extraneous whitespace characters are not permitted.)

Throws

if this enum type has no constant with the specified name

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/-logger/values.html b/openai-core/com.aallam.openai.api.logging/-logger/values.html new file mode 100644 index 00000000..657fdde4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/-logger/values.html @@ -0,0 +1,66 @@ + + + + + values + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

values

+
+

Returns an array containing the constants of this enum type, in the order they're declared.

This method may be used to iterate over the constants.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.logging/index.html b/openai-core/com.aallam.openai.api.logging/index.html new file mode 100644 index 00000000..42c65e54 --- /dev/null +++ b/openai-core/com.aallam.openai.api.logging/index.html @@ -0,0 +1,104 @@ + + + + + com.aallam.openai.api.logging + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
enum Logger : Enum<Logger>

Http client logger.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Http client logging log level.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-id/-model-id.html b/openai-core/com.aallam.openai.api.model/-model-id/-model-id.html new file mode 100644 index 00000000..c4d6211c --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-id/-model-id.html @@ -0,0 +1,66 @@ + + + + + ModelId + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ModelId

+
+
constructor(id: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-id/id.html b/openai-core/com.aallam.openai.api.model/-model-id/id.html new file mode 100644 index 00000000..4cb49909 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-id/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: String
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-id/index.html b/openai-core/com.aallam.openai.api.model/-model-id/index.html new file mode 100644 index 00000000..79bc9cc6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-id/index.html @@ -0,0 +1,109 @@ + + + + + ModelId + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ModelId

+
@Serializable
value class ModelId(val id: String)

Model identifier.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: String)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: String
+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-permission/-model-permission.html b/openai-core/com.aallam.openai.api.model/-model-permission/-model-permission.html new file mode 100644 index 00000000..0798f01f --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-permission/-model-permission.html @@ -0,0 +1,66 @@ + + + + + ModelPermission + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ModelPermission

+
+
constructor(id: String, created: Long, allowCreateEngine: Boolean, allowSampling: Boolean, allowLogprobs: Boolean, allowSearchIndices: Boolean, allowView: Boolean, allowFineTuning: Boolean, organization: String, isBlocking: Boolean)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-permission/allow-create-engine.html b/openai-core/com.aallam.openai.api.model/-model-permission/allow-create-engine.html new file mode 100644 index 00000000..5d9ddcc9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-permission/allow-create-engine.html @@ -0,0 +1,66 @@ + + + + + allowCreateEngine + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

allowCreateEngine

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-permission/allow-fine-tuning.html b/openai-core/com.aallam.openai.api.model/-model-permission/allow-fine-tuning.html new file mode 100644 index 00000000..4c2d88f3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-permission/allow-fine-tuning.html @@ -0,0 +1,66 @@ + + + + + allowFineTuning + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

allowFineTuning

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-permission/allow-logprobs.html b/openai-core/com.aallam.openai.api.model/-model-permission/allow-logprobs.html new file mode 100644 index 00000000..1b01f8d0 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-permission/allow-logprobs.html @@ -0,0 +1,66 @@ + + + + + allowLogprobs + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

allowLogprobs

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-permission/allow-sampling.html b/openai-core/com.aallam.openai.api.model/-model-permission/allow-sampling.html new file mode 100644 index 00000000..ba6fc400 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-permission/allow-sampling.html @@ -0,0 +1,66 @@ + + + + + allowSampling + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

allowSampling

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-permission/allow-search-indices.html b/openai-core/com.aallam.openai.api.model/-model-permission/allow-search-indices.html new file mode 100644 index 00000000..3de7b947 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-permission/allow-search-indices.html @@ -0,0 +1,66 @@ + + + + + allowSearchIndices + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

allowSearchIndices

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-permission/allow-view.html b/openai-core/com.aallam.openai.api.model/-model-permission/allow-view.html new file mode 100644 index 00000000..3d73e090 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-permission/allow-view.html @@ -0,0 +1,66 @@ + + + + + allowView + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

allowView

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-permission/created.html b/openai-core/com.aallam.openai.api.model/-model-permission/created.html new file mode 100644 index 00000000..affdddfc --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-permission/created.html @@ -0,0 +1,66 @@ + + + + + created + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

created

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-permission/id.html b/openai-core/com.aallam.openai.api.model/-model-permission/id.html new file mode 100644 index 00000000..9aa7c5f1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-permission/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: String
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-permission/index.html b/openai-core/com.aallam.openai.api.model/-model-permission/index.html new file mode 100644 index 00000000..b65388a3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-permission/index.html @@ -0,0 +1,244 @@ + + + + + ModelPermission + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ModelPermission

+
@Serializable
data class ModelPermission(val id: String, val created: Long, val allowCreateEngine: Boolean, val allowSampling: Boolean, val allowLogprobs: Boolean, val allowSearchIndices: Boolean, val allowView: Boolean, val allowFineTuning: Boolean, val organization: String, val isBlocking: Boolean)

Model permission details.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: String, created: Long, allowCreateEngine: Boolean, allowSampling: Boolean, allowLogprobs: Boolean, allowSearchIndices: Boolean, allowView: Boolean, allowFineTuning: Boolean, organization: String, isBlocking: Boolean)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+ +
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+ +
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: String
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-permission/is-blocking.html b/openai-core/com.aallam.openai.api.model/-model-permission/is-blocking.html new file mode 100644 index 00000000..a1b39395 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-permission/is-blocking.html @@ -0,0 +1,66 @@ + + + + + isBlocking + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

isBlocking

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model-permission/organization.html b/openai-core/com.aallam.openai.api.model/-model-permission/organization.html new file mode 100644 index 00000000..bf91719b --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model-permission/organization.html @@ -0,0 +1,66 @@ + + + + + organization + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

organization

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model/-model.html b/openai-core/com.aallam.openai.api.model/-model/-model.html new file mode 100644 index 00000000..00d9db1b --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model/-model.html @@ -0,0 +1,66 @@ + + + + + Model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Model

+
+
constructor(id: ModelId, created: Long, ownedBy: String, permission: List<ModelPermission>)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model/created.html b/openai-core/com.aallam.openai.api.model/-model/created.html new file mode 100644 index 00000000..ee53cc81 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model/created.html @@ -0,0 +1,66 @@ + + + + + created + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

created

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model/id.html b/openai-core/com.aallam.openai.api.model/-model/id.html new file mode 100644 index 00000000..5bc254bf --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: ModelId
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model/index.html b/openai-core/com.aallam.openai.api.model/-model/index.html new file mode 100644 index 00000000..c6f5e102 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model/index.html @@ -0,0 +1,154 @@ + + + + + Model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Model

+
@Serializable
data class Model(val id: ModelId, val created: Long, val ownedBy: String, val permission: List<ModelPermission>)

OpenAI's Model.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: ModelId, created: Long, ownedBy: String, permission: List<ModelPermission>)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: ModelId
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+ +
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model/owned-by.html b/openai-core/com.aallam.openai.api.model/-model/owned-by.html new file mode 100644 index 00000000..27007900 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model/owned-by.html @@ -0,0 +1,66 @@ + + + + + ownedBy + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ownedBy

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/-model/permission.html b/openai-core/com.aallam.openai.api.model/-model/permission.html new file mode 100644 index 00000000..bd5da4b5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/-model/permission.html @@ -0,0 +1,66 @@ + + + + + permission + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

permission

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.model/index.html b/openai-core/com.aallam.openai.api.model/index.html new file mode 100644 index 00000000..eeceb67d --- /dev/null +++ b/openai-core/com.aallam.openai.api.model/index.html @@ -0,0 +1,119 @@ + + + + + com.aallam.openai.api.model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class Model(val id: ModelId, val created: Long, val ownedBy: String, val permission: List<ModelPermission>)

OpenAI's Model.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class ModelId(val id: String)

Model identifier.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
data class ModelPermission(val id: String, val created: Long, val allowCreateEngine: Boolean, val allowSampling: Boolean, val allowLogprobs: Boolean, val allowSearchIndices: Boolean, val allowView: Boolean, val allowFineTuning: Boolean, val organization: String, val isBlocking: Boolean)

Model permission details.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-categories/-categories.html b/openai-core/com.aallam.openai.api.moderation/-categories/-categories.html new file mode 100644 index 00000000..48d63039 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-categories/-categories.html @@ -0,0 +1,66 @@ + + + + + Categories + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Categories

+
+
constructor(hate: Boolean, hateThreatening: Boolean, selfHarm: Boolean, sexual: Boolean, sexualMinors: Boolean, violence: Boolean, violenceGraphic: Boolean, harassment: Boolean, harassmentThreatening: Boolean, selfHarmIntent: Boolean, selfHarmInstructions: Boolean)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-categories/harassment-threatening.html b/openai-core/com.aallam.openai.api.moderation/-categories/harassment-threatening.html new file mode 100644 index 00000000..9abab2e6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-categories/harassment-threatening.html @@ -0,0 +1,66 @@ + + + + + harassmentThreatening + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

harassmentThreatening

+
+

Harassment content that also includes violence or serious harm towards any target.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-categories/harassment.html b/openai-core/com.aallam.openai.api.moderation/-categories/harassment.html new file mode 100644 index 00000000..7791f78b --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-categories/harassment.html @@ -0,0 +1,66 @@ + + + + + harassment + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

harassment

+
+

Content that expresses, incites, or promotes harassing language towards any target.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-categories/hate-threatening.html b/openai-core/com.aallam.openai.api.moderation/-categories/hate-threatening.html new file mode 100644 index 00000000..7f7827f9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-categories/hate-threatening.html @@ -0,0 +1,66 @@ + + + + + hateThreatening + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

hateThreatening

+
+

Hateful content that also includes violence or serious harm towards the targeted group.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-categories/hate.html b/openai-core/com.aallam.openai.api.moderation/-categories/hate.html new file mode 100644 index 00000000..661f83df --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-categories/hate.html @@ -0,0 +1,66 @@ + + + + + hate + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

hate

+
+

Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-categories/index.html b/openai-core/com.aallam.openai.api.moderation/-categories/index.html new file mode 100644 index 00000000..adeb86c9 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-categories/index.html @@ -0,0 +1,259 @@ + + + + + Categories + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Categories

+
@Serializable
class Categories(val hate: Boolean, val hateThreatening: Boolean, val selfHarm: Boolean, val sexual: Boolean, val sexualMinors: Boolean, val violence: Boolean, val violenceGraphic: Boolean, val harassment: Boolean, val harassmentThreatening: Boolean, val selfHarmIntent: Boolean, val selfHarmInstructions: Boolean)
+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(hate: Boolean, hateThreatening: Boolean, selfHarm: Boolean, sexual: Boolean, sexualMinors: Boolean, violence: Boolean, violenceGraphic: Boolean, harassment: Boolean, harassmentThreatening: Boolean, selfHarmIntent: Boolean, selfHarmInstructions: Boolean)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Content that expresses, incites, or promotes harassing language towards any target.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Harassment content that also includes violence or serious harm towards any target.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Hateful content that also includes violence or serious harm towards the targeted group.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Content that encourages performing acts of self-harm, such as suicide, cutting, and eating disorders, or that gives instructions or advice on how to commit such acts.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Content where the speaker expresses that they are engaging or intend to engage in acts of self-harm, such as suicide, cutting, and eating disorders.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Sexual content that includes an individual who is under 18 years old.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-categories/self-harm-instructions.html b/openai-core/com.aallam.openai.api.moderation/-categories/self-harm-instructions.html new file mode 100644 index 00000000..797f2fa0 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-categories/self-harm-instructions.html @@ -0,0 +1,66 @@ + + + + + selfHarmInstructions + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

selfHarmInstructions

+
+

Content that encourages performing acts of self-harm, such as suicide, cutting, and eating disorders, or that gives instructions or advice on how to commit such acts.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-categories/self-harm-intent.html b/openai-core/com.aallam.openai.api.moderation/-categories/self-harm-intent.html new file mode 100644 index 00000000..96cfcb31 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-categories/self-harm-intent.html @@ -0,0 +1,66 @@ + + + + + selfHarmIntent + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

selfHarmIntent

+
+

Content where the speaker expresses that they are engaging or intend to engage in acts of self-harm, such as suicide, cutting, and eating disorders.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-categories/self-harm.html b/openai-core/com.aallam.openai.api.moderation/-categories/self-harm.html new file mode 100644 index 00000000..9d6e1613 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-categories/self-harm.html @@ -0,0 +1,66 @@ + + + + + selfHarm + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

selfHarm

+
+

Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-categories/sexual-minors.html b/openai-core/com.aallam.openai.api.moderation/-categories/sexual-minors.html new file mode 100644 index 00000000..2007f321 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-categories/sexual-minors.html @@ -0,0 +1,66 @@ + + + + + sexualMinors + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

sexualMinors

+
+

Sexual content that includes an individual who is under 18 years old.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-categories/sexual.html b/openai-core/com.aallam.openai.api.moderation/-categories/sexual.html new file mode 100644 index 00000000..4d6ba3cd --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-categories/sexual.html @@ -0,0 +1,66 @@ + + + + + sexual + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

sexual

+
+

Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-categories/violence-graphic.html b/openai-core/com.aallam.openai.api.moderation/-categories/violence-graphic.html new file mode 100644 index 00000000..a3a8e88f --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-categories/violence-graphic.html @@ -0,0 +1,66 @@ + + + + + violenceGraphic + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

violenceGraphic

+
+

Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-categories/violence.html b/openai-core/com.aallam.openai.api.moderation/-categories/violence.html new file mode 100644 index 00000000..9da98b94 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-categories/violence.html @@ -0,0 +1,66 @@ + + + + + violence + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

violence

+
+

Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-category-scores/-category-scores.html b/openai-core/com.aallam.openai.api.moderation/-category-scores/-category-scores.html new file mode 100644 index 00000000..109775a5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-category-scores/-category-scores.html @@ -0,0 +1,66 @@ + + + + + CategoryScores + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

CategoryScores

+
+
constructor(hate: Double, hateThreatening: Double, selfHarm: Double, sexual: Double, sexualMinors: Double, violence: Double, violenceGraphic: Double, harassment: Double, harassmentThreatening: Double, selfHarmIntent: Double, selfHarmInstructions: Double)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-category-scores/harassment-threatening.html b/openai-core/com.aallam.openai.api.moderation/-category-scores/harassment-threatening.html new file mode 100644 index 00000000..30359dde --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-category-scores/harassment-threatening.html @@ -0,0 +1,66 @@ + + + + + harassmentThreatening + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

harassmentThreatening

+
+

The score for the category 'harassment/threatening'.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-category-scores/harassment.html b/openai-core/com.aallam.openai.api.moderation/-category-scores/harassment.html new file mode 100644 index 00000000..7f2878f3 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-category-scores/harassment.html @@ -0,0 +1,66 @@ + + + + + harassment + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

harassment

+
+

The score for the category 'harassment'.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-category-scores/hate-threatening.html b/openai-core/com.aallam.openai.api.moderation/-category-scores/hate-threatening.html new file mode 100644 index 00000000..66c4b5b1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-category-scores/hate-threatening.html @@ -0,0 +1,66 @@ + + + + + hateThreatening + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

hateThreatening

+
+

Hateful content that also includes violence or serious harm towards the targeted group.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-category-scores/hate.html b/openai-core/com.aallam.openai.api.moderation/-category-scores/hate.html new file mode 100644 index 00000000..951d35b2 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-category-scores/hate.html @@ -0,0 +1,66 @@ + + + + + hate + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

hate

+
+

Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-category-scores/index.html b/openai-core/com.aallam.openai.api.moderation/-category-scores/index.html new file mode 100644 index 00000000..78b8ad47 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-category-scores/index.html @@ -0,0 +1,259 @@ + + + + + CategoryScores + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

CategoryScores

+
@Serializable
class CategoryScores(val hate: Double, val hateThreatening: Double, val selfHarm: Double, val sexual: Double, val sexualMinors: Double, val violence: Double, val violenceGraphic: Double, val harassment: Double, val harassmentThreatening: Double, val selfHarmIntent: Double, val selfHarmInstructions: Double)
+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(hate: Double, hateThreatening: Double, selfHarm: Double, sexual: Double, sexualMinors: Double, violence: Double, violenceGraphic: Double, harassment: Double, harassmentThreatening: Double, selfHarmIntent: Double, selfHarmInstructions: Double)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The score for the category 'harassment'.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The score for the category 'harassment/threatening'.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Content that expresses, incites, or promotes hate based on race, gender, ethnicity, religion, nationality, sexual orientation, disability status, or caste.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Hateful content that also includes violence or serious harm towards the targeted group.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The score for the category 'self-harm/instructions'.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

The score for the category 'self-harm/intent'.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Sexual content that includes an individual who is under 18 years old.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-category-scores/self-harm-instructions.html b/openai-core/com.aallam.openai.api.moderation/-category-scores/self-harm-instructions.html new file mode 100644 index 00000000..9860e8f1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-category-scores/self-harm-instructions.html @@ -0,0 +1,66 @@ + + + + + selfHarmInstructions + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

selfHarmInstructions

+
+

The score for the category 'self-harm/instructions'.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-category-scores/self-harm-intent.html b/openai-core/com.aallam.openai.api.moderation/-category-scores/self-harm-intent.html new file mode 100644 index 00000000..7342df32 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-category-scores/self-harm-intent.html @@ -0,0 +1,66 @@ + + + + + selfHarmIntent + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

selfHarmIntent

+
+

The score for the category 'self-harm/intent'.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-category-scores/self-harm.html b/openai-core/com.aallam.openai.api.moderation/-category-scores/self-harm.html new file mode 100644 index 00000000..3be54348 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-category-scores/self-harm.html @@ -0,0 +1,66 @@ + + + + + selfHarm + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

selfHarm

+
+

Content that promotes, encourages, or depicts acts of self-harm, such as suicide, cutting, and eating disorders.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-category-scores/sexual-minors.html b/openai-core/com.aallam.openai.api.moderation/-category-scores/sexual-minors.html new file mode 100644 index 00000000..541882aa --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-category-scores/sexual-minors.html @@ -0,0 +1,66 @@ + + + + + sexualMinors + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

sexualMinors

+
+

Sexual content that includes an individual who is under 18 years old.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-category-scores/sexual.html b/openai-core/com.aallam.openai.api.moderation/-category-scores/sexual.html new file mode 100644 index 00000000..740a21ec --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-category-scores/sexual.html @@ -0,0 +1,66 @@ + + + + + sexual + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

sexual

+
+

Content meant to arouse sexual excitement, such as the description of sexual activity, or that promotes sexual services (excluding sex education and wellness).

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-category-scores/violence-graphic.html b/openai-core/com.aallam.openai.api.moderation/-category-scores/violence-graphic.html new file mode 100644 index 00000000..190c7bb5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-category-scores/violence-graphic.html @@ -0,0 +1,66 @@ + + + + + violenceGraphic + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

violenceGraphic

+
+

Violent content that depicts death, violence, or serious physical injury in extreme graphic detail.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-category-scores/violence.html b/openai-core/com.aallam.openai.api.moderation/-category-scores/violence.html new file mode 100644 index 00000000..02162568 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-category-scores/violence.html @@ -0,0 +1,66 @@ + + + + + violence + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

violence

+
+

Content that promotes or glorifies violence or celebrates the suffering or humiliation of others.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-model/-companion/-latest.html b/openai-core/com.aallam.openai.api.moderation/-moderation-model/-companion/-latest.html new file mode 100644 index 00000000..c6a906da --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-model/-companion/-latest.html @@ -0,0 +1,66 @@ + + + + + Latest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Latest

+
+

Advanced notice is provided before updating this model.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-model/-companion/-stable.html b/openai-core/com.aallam.openai.api.moderation/-moderation-model/-companion/-stable.html new file mode 100644 index 00000000..7ad98218 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-model/-companion/-stable.html @@ -0,0 +1,66 @@ + + + + + Stable + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Stable

+
+

Ensures you are always using the most accurate model.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-model/-companion/index.html b/openai-core/com.aallam.openai.api.moderation/-moderation-model/-companion/index.html new file mode 100644 index 00000000..c4848562 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-model/-companion/index.html @@ -0,0 +1,105 @@ + + + + + Companion + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Companion

+
object Companion
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Advanced notice is provided before updating this model.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Ensures you are always using the most accurate model.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-model/-moderation-model.html b/openai-core/com.aallam.openai.api.moderation/-moderation-model/-moderation-model.html new file mode 100644 index 00000000..91dfb5e8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-model/-moderation-model.html @@ -0,0 +1,66 @@ + + + + + ModerationModel + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ModerationModel

+
+
constructor(model: String)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-model/index.html b/openai-core/com.aallam.openai.api.moderation/-moderation-model/index.html new file mode 100644 index 00000000..62bd10f4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-model/index.html @@ -0,0 +1,128 @@ + + + + + ModerationModel + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ModerationModel

+
@Serializable
value class ModerationModel(val model: String)

Moderation model.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(model: String)
+
+
+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
object Companion
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-model/model.html b/openai-core/com.aallam.openai.api.moderation/-moderation-model/model.html new file mode 100644 index 00000000..84b3c0ae --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-model/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+ +
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/-moderation-request-builder.html b/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/-moderation-request-builder.html new file mode 100644 index 00000000..443d90ee --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/-moderation-request-builder.html @@ -0,0 +1,66 @@ + + + + + ModerationRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ModerationRequestBuilder

+
+
constructor()
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/build.html b/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/build.html new file mode 100644 index 00000000..a1d89a0e --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/build.html @@ -0,0 +1,66 @@ + + + + + build + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

build

+
+

Creates the ModerationRequest instance

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/index.html b/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/index.html new file mode 100644 index 00000000..8c21e2f6 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/index.html @@ -0,0 +1,143 @@ + + + + + ModerationRequestBuilder + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ModerationRequestBuilder

+

Data class representing a ModerationRequest

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor()
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The input text to classify.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Moderation model. Defaults to ModerationModel.Latest.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Creates the ModerationRequest instance

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/input.html b/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/input.html new file mode 100644 index 00000000..81573c7c --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/input.html @@ -0,0 +1,66 @@ + + + + + input + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

input

+
+

The input text to classify.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/model.html b/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/model.html new file mode 100644 index 00000000..a565acd8 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-request-builder/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

Moderation model. Defaults to ModerationModel.Latest.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-request/-moderation-request.html b/openai-core/com.aallam.openai.api.moderation/-moderation-request/-moderation-request.html new file mode 100644 index 00000000..31591fd1 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-request/-moderation-request.html @@ -0,0 +1,66 @@ + + + + + ModerationRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ModerationRequest

+
+
constructor(input: List<String>, model: ModerationModel? = null)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-request/index.html b/openai-core/com.aallam.openai.api.moderation/-moderation-request/index.html new file mode 100644 index 00000000..f473db3e --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-request/index.html @@ -0,0 +1,124 @@ + + + + + ModerationRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ModerationRequest

+
@Serializable
class ModerationRequest(val input: List<String>, val model: ModerationModel? = null)

Request to classify if text violates OpenAI's Content Policy.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(input: List<String>, model: ModerationModel? = null)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

The input text to classify.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
val model: ModerationModel? = null

Moderation model. Defaults to ModerationModel.Latest.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-request/input.html b/openai-core/com.aallam.openai.api.moderation/-moderation-request/input.html new file mode 100644 index 00000000..ffa03679 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-request/input.html @@ -0,0 +1,66 @@ + + + + + input + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

input

+
+

The input text to classify.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-request/model.html b/openai-core/com.aallam.openai.api.moderation/-moderation-request/model.html new file mode 100644 index 00000000..1a3f284a --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-request/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+
val model: ModerationModel? = null

Moderation model. Defaults to ModerationModel.Latest.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-result/-moderation-result.html b/openai-core/com.aallam.openai.api.moderation/-moderation-result/-moderation-result.html new file mode 100644 index 00000000..dc18975d --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-result/-moderation-result.html @@ -0,0 +1,66 @@ + + + + + ModerationResult + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ModerationResult

+
+
constructor(categories: Categories, categoryScores: CategoryScores, flagged: Boolean)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-result/categories.html b/openai-core/com.aallam.openai.api.moderation/-moderation-result/categories.html new file mode 100644 index 00000000..2b681a14 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-result/categories.html @@ -0,0 +1,66 @@ + + + + + categories + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

categories

+
+

Per-category binary content policy violation flags.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-result/category-scores.html b/openai-core/com.aallam.openai.api.moderation/-moderation-result/category-scores.html new file mode 100644 index 00000000..7a7952f4 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-result/category-scores.html @@ -0,0 +1,66 @@ + + + + + categoryScores + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

categoryScores

+
+

Per-category raw scores output by the model, denoting the model's confidence that the input violates the OpenAI's policy for the category. The value is between 0 and 1, where higher values denote higher confidence. The scores should not be interpreted as probabilities.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-result/flagged.html b/openai-core/com.aallam.openai.api.moderation/-moderation-result/flagged.html new file mode 100644 index 00000000..5993424a --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-result/flagged.html @@ -0,0 +1,66 @@ + + + + + flagged + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

flagged

+
+

Set to true if the model classifies the content as violating OpenAI's content policy, false otherwise.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-moderation-result/index.html b/openai-core/com.aallam.openai.api.moderation/-moderation-result/index.html new file mode 100644 index 00000000..7a6bf0a5 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-moderation-result/index.html @@ -0,0 +1,139 @@ + + + + + ModerationResult + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ModerationResult

+
@Serializable
class ModerationResult(val categories: Categories, val categoryScores: CategoryScores, val flagged: Boolean)
+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(categories: Categories, categoryScores: CategoryScores, flagged: Boolean)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Per-category binary content policy violation flags.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Per-category raw scores output by the model, denoting the model's confidence that the input violates the OpenAI's policy for the category. The value is between 0 and 1, where higher values denote higher confidence. The scores should not be interpreted as probabilities.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Set to true if the model classifies the content as violating OpenAI's content policy, false otherwise.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-text-moderation/-text-moderation.html b/openai-core/com.aallam.openai.api.moderation/-text-moderation/-text-moderation.html new file mode 100644 index 00000000..8d03a061 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-text-moderation/-text-moderation.html @@ -0,0 +1,66 @@ + + + + + TextModeration + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

TextModeration

+
+
constructor(id: String, model: ModerationModel, results: List<ModerationResult>)
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-text-moderation/id.html b/openai-core/com.aallam.openai.api.moderation/-text-moderation/id.html new file mode 100644 index 00000000..d40205ef --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-text-moderation/id.html @@ -0,0 +1,66 @@ + + + + + id + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

id

+
+
val id: String

Moderation response id.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-text-moderation/index.html b/openai-core/com.aallam.openai.api.moderation/-text-moderation/index.html new file mode 100644 index 00000000..9f332eea --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-text-moderation/index.html @@ -0,0 +1,139 @@ + + + + + TextModeration + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

TextModeration

+
@Serializable
class TextModeration(val id: String, val model: ModerationModel, val results: List<ModerationResult>)

Moderation response.

+
+
+
+
+
+

Constructors

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
constructor(id: String, model: ModerationModel, results: List<ModerationResult>)
+
+
+
+
+
+
+
+

Properties

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
val id: String

Moderation response id.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Model used for moderation response.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Moderation results.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-text-moderation/model.html b/openai-core/com.aallam.openai.api.moderation/-text-moderation/model.html new file mode 100644 index 00000000..d74cc15a --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-text-moderation/model.html @@ -0,0 +1,66 @@ + + + + + model + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

model

+
+

Model used for moderation response.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/-text-moderation/results.html b/openai-core/com.aallam.openai.api.moderation/-text-moderation/results.html new file mode 100644 index 00000000..6d551248 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/-text-moderation/results.html @@ -0,0 +1,66 @@ + + + + + results + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

results

+
+

Moderation results.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/index.html b/openai-core/com.aallam.openai.api.moderation/index.html new file mode 100644 index 00000000..1f40d041 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/index.html @@ -0,0 +1,198 @@ + + + + + com.aallam.openai.api.moderation + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class Categories(val hate: Boolean, val hateThreatening: Boolean, val selfHarm: Boolean, val sexual: Boolean, val sexualMinors: Boolean, val violence: Boolean, val violenceGraphic: Boolean, val harassment: Boolean, val harassmentThreatening: Boolean, val selfHarmIntent: Boolean, val selfHarmInstructions: Boolean)
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class CategoryScores(val hate: Double, val hateThreatening: Double, val selfHarm: Double, val sexual: Double, val sexualMinors: Double, val violence: Double, val violenceGraphic: Double, val harassment: Double, val harassmentThreatening: Double, val selfHarmIntent: Double, val selfHarmInstructions: Double)
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
value class ModerationModel(val model: String)

Moderation model.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class ModerationRequest(val input: List<String>, val model: ModerationModel? = null)

Request to classify if text violates OpenAI's Content Policy.

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+

Data class representing a ModerationRequest

+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class ModerationResult(val categories: Categories, val categoryScores: CategoryScores, val flagged: Boolean)
+
+
+
+
+ +
+
+
+ + +
Link copied to clipboard
+
+
+
+
@Serializable
class TextModeration(val id: String, val model: ModerationModel, val results: List<ModerationResult>)

Moderation response.

+
+
+
+
+
+
+
+

Functions

+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+

Request to classify if text violates OpenAI's Content Policy.

+
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api.moderation/moderation-request.html b/openai-core/com.aallam.openai.api.moderation/moderation-request.html new file mode 100644 index 00000000..2f455434 --- /dev/null +++ b/openai-core/com.aallam.openai.api.moderation/moderation-request.html @@ -0,0 +1,66 @@ + + + + + moderationRequest + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

moderationRequest

+
+

Request to classify if text violates OpenAI's Content Policy.

+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api/-beta-open-a-i/index.html b/openai-core/com.aallam.openai.api/-beta-open-a-i/index.html new file mode 100644 index 00000000..eed8ad92 --- /dev/null +++ b/openai-core/com.aallam.openai.api/-beta-open-a-i/index.html @@ -0,0 +1,70 @@ + + + + + BetaOpenAI + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

BetaOpenAI

+

This annotation marks a library API as beta.

Any usage of a declaration annotated with @BetaOpenAI must be accepted either by annotating that usage with the OptIn annotation, e.g. @OptIn(BetaOpenAI::class), or by using the compiler argument -Xopt-in=com.aallam.openai.api.BetaOpenAI.

+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api/-experimental-open-a-i/index.html b/openai-core/com.aallam.openai.api/-experimental-open-a-i/index.html new file mode 100644 index 00000000..a388be29 --- /dev/null +++ b/openai-core/com.aallam.openai.api/-experimental-open-a-i/index.html @@ -0,0 +1,70 @@ + + + + + ExperimentalOpenAI + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

ExperimentalOpenAI

+

This annotation marks a library API as experimental.

Any usage of a declaration annotated with @ExperimentalOpenAI must be accepted either by annotating that usage with the OptIn annotation, e.g. @OptIn(ExperimentalOpenAI::class), or by using the compiler argument -Xopt-in=com.aallam.openai.api.ExperimentalOpenAI.

+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api/-internal-open-a-i/index.html b/openai-core/com.aallam.openai.api/-internal-open-a-i/index.html new file mode 100644 index 00000000..ae26b239 --- /dev/null +++ b/openai-core/com.aallam.openai.api/-internal-open-a-i/index.html @@ -0,0 +1,70 @@ + + + + + InternalOpenAI + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

InternalOpenAI

+

API marked with this annotation is internal, and it is not intended to be used outside OpenAI API client. It could be modified or removed without any notice. Using it could cause undefined behaviour and/or any unexpected effects.

+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api/-legacy-open-a-i/index.html b/openai-core/com.aallam.openai.api/-legacy-open-a-i/index.html new file mode 100644 index 00000000..3029002e --- /dev/null +++ b/openai-core/com.aallam.openai.api/-legacy-open-a-i/index.html @@ -0,0 +1,70 @@ + + + + + LegacyOpenAI + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

LegacyOpenAI

+

This annotation marks a library API as beta.

Any usage of a declaration annotated with @BetaOpenAI must be accepted either by annotating that usage with the OptIn annotation, e.g. @OptIn(BetaOpenAI::class), or by using the compiler argument -Xopt-in=com.aallam.openai.api.BetaOpenAI.

+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api/-open-a-i-dsl/index.html b/openai-core/com.aallam.openai.api/-open-a-i-dsl/index.html new file mode 100644 index 00000000..bf008612 --- /dev/null +++ b/openai-core/com.aallam.openai.api/-open-a-i-dsl/index.html @@ -0,0 +1,70 @@ + + + + + OpenAIDsl + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

OpenAIDsl

+ +
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/com.aallam.openai.api/index.html b/openai-core/com.aallam.openai.api/index.html new file mode 100644 index 00000000..d4669c56 --- /dev/null +++ b/openai-core/com.aallam.openai.api/index.html @@ -0,0 +1,149 @@ + + + + + com.aallam.openai.api + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

Package-level declarations

+
+
+
+
+
+

Types

+
+ + + + +
+
+
+ + +
Link copied to clipboard
+
+
+
+

API marked with this annotation is internal, and it is not intended to be used outside OpenAI API client. It could be modified or removed without any notice. Using it could cause undefined behaviour and/or any unexpected effects.

+
+
+
+
+ + + +
+
+
+ + +
Link copied to clipboard
+
+
+
+ +
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/index.html b/openai-core/index.html new file mode 100644 index 00000000..686d8d82 --- /dev/null +++ b/openai-core/index.html @@ -0,0 +1,373 @@ + + + + + openai-core + + + + + + + + + + + + + + + +
+
+
+
+
+
+ +
+

openai-core

+
+

Packages

+
+
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+ +
+
+
+
+ + +
Link copied to clipboard
+
+
+
+
common
+
+
+
+
+
+
+
+
+ +
+
+ + + diff --git a/openai-core/navigation.html b/openai-core/navigation.html new file mode 100644 index 00000000..65cf8e7d --- /dev/null +++ b/openai-core/navigation.html @@ -0,0 +1,967 @@ +
+
+ +
+ +
+
+ Audio +
+
+
+
+ Chat +
+
+
+
+ Closeable +
+
+
+ +
+
+
+ Edits +
+
+
+ +
+
+
+ Files +
+
+
+
+ FineTunes +
+
+
+ +
+
+
+ Images +
+
+
+ +
+
+
+ Models +
+
+
+ +
+
+
+ OpenAI +
+
+
+
+ OpenAI() +
+
+
+ +
+
+ +
+
+ Companion +
+
+
+
+ +
+
+ Http +
+
+
+
+ Socks +
+
+
+
+ +
+
+ +
+
+ +
+ +
+ +
+ +
+ +
+
+ +
+
+
+ OpenAIDsl +
+
+
+ +
+ +
+ +
+
+
+ ChatChunk +
+
+
+ +
+ + + + + + + +
+
+ ChatDelta +
+
+
+ +
+
+ +
+ + +
+
+ ChatRole +
+
+
+ Companion +
+
+
+
+ +
+
+ +
+
+ Companion +
+
+
+
+ Default +
+
+
+
+ Named +
+
+
+ +
+ +
+
+ Companion +
+
+ +
+
+ +
+ +
+ +
+
+ +
+
+ Companion +
+
+
+
+ +
+
+ +
+
+ +
+
+
+ Status +
+
+
+ Companion +
+
+
+
+
+ Usage +
+
+
+
+ +
+
+ Edit +
+
+
+ +
+
+ +
+ +
+ + +
+ +
+
+ File +
+
+
+
+ FileId +
+
+
+ +
+
+ +
+ +
+ +
+
+ +
+
+ +
+ +
+
+ Purpose +
+
+
+
+ +
+
+ FineTune +
+
+
+ +
+
+ +
+
+ +
+ + +
+ +
+
+
+ +
+
+ ErrorInfo +
+
+
+ +
+
+ +
+ + + + +
+ +
+
+ NEpochs +
+
+
+ Companion +
+
+
+
+
+
+ Level +
+
+
+ Companion +
+
+
+
+ +
+ +
+ +
+
+ +
+ +
+
+ ImageEdit +
+
+
+ +
+ +
+
+ ImageJSON +
+
+
+
+ ImageSize +
+
+
+ Companion +
+
+
+
+
+ ImageURL +
+
+
+ +
+ + +
+ +
+ +
+
+ Logger +
+
+
+ Default +
+
+
+
+ Simple +
+
+
+
+ Empty +
+
+
+
+
+ LogLevel +
+
+
+ All +
+
+
+
+ Headers +
+
+
+
+ Body +
+
+
+
+ Info +
+
+
+
+ None +
+
+
+
+
+ +
+
+ Model +
+
+
+
+ ModelId +
+
+
+ +
+
+ +
+
diff --git a/package-list b/package-list new file mode 100644 index 00000000..9083a356 --- /dev/null +++ b/package-list @@ -0,0 +1,24 @@ +$dokka.format:html-v1 +$dokka.linkExtension:html + +module:openai-client +com.aallam.openai.client +com.aallam.openai.client.extension +module:openai-core +com.aallam.openai.api +com.aallam.openai.api.audio +com.aallam.openai.api.chat +com.aallam.openai.api.completion +com.aallam.openai.api.core +com.aallam.openai.api.edits +com.aallam.openai.api.embedding +com.aallam.openai.api.exception +com.aallam.openai.api.file +com.aallam.openai.api.finetune +com.aallam.openai.api.finetuning +com.aallam.openai.api.http +com.aallam.openai.api.image +com.aallam.openai.api.image.internal +com.aallam.openai.api.logging +com.aallam.openai.api.model +com.aallam.openai.api.moderation diff --git a/scripts/clipboard.js b/scripts/clipboard.js new file mode 100644 index 00000000..b00ce246 --- /dev/null +++ b/scripts/clipboard.js @@ -0,0 +1,52 @@ +window.addEventListener('load', () => { + document.querySelectorAll('span.copy-icon').forEach(element => { + element.addEventListener('click', (el) => copyElementsContentToClipboard(element)); + }) + + document.querySelectorAll('span.anchor-icon').forEach(element => { + element.addEventListener('click', (el) => { + if(element.hasAttribute('pointing-to')){ + const location = hrefWithoutCurrentlyUsedAnchor() + '#' + element.getAttribute('pointing-to') + copyTextToClipboard(element, location) + } + }); + }) +}) + +const copyElementsContentToClipboard = (element) => { + const selection = window.getSelection(); + const range = document.createRange(); + range.selectNodeContents(element.parentNode.parentNode); + selection.removeAllRanges(); + selection.addRange(range); + + copyAndShowPopup(element, () => selection.removeAllRanges()) +} + +const copyTextToClipboard = (element, text) => { + var textarea = document.createElement("textarea"); + textarea.textContent = text; + textarea.style.position = "fixed"; + document.body.appendChild(textarea); + textarea.select(); + + copyAndShowPopup(element, () => document.body.removeChild(textarea)) +} + +const copyAndShowPopup = (element, after) => { + try { + document.execCommand('copy'); + element.nextElementSibling.classList.add('active-popup'); + setTimeout(() => { + element.nextElementSibling.classList.remove('active-popup'); + }, 1200); + } catch (e) { + console.error('Failed to write to clipboard:', e) + } + finally { + if(after) after() + } +} + +const hrefWithoutCurrentlyUsedAnchor = () => window.location.href.split('#')[0] + diff --git a/scripts/main.js b/scripts/main.js new file mode 100644 index 00000000..e5b54a36 --- /dev/null +++ b/scripts/main.js @@ -0,0 +1,44 @@ +(()=>{var e={8527:e=>{e.exports=''},5570:e=>{e.exports=''},107:e=>{e.exports=''},7224:e=>{e.exports=''},538:e=>{e.exports=''},1924:(e,n,t)=>{"use strict";var r=t(210),o=t(5559),i=o(r("String.prototype.indexOf"));e.exports=function(e,n){var t=r(e,!!n);return"function"==typeof t&&i(e,".prototype.")>-1?o(t):t}},5559:(e,n,t)=>{"use strict";var r=t(8612),o=t(210),i=o("%Function.prototype.apply%"),a=o("%Function.prototype.call%"),l=o("%Reflect.apply%",!0)||r.call(a,i),c=o("%Object.getOwnPropertyDescriptor%",!0),u=o("%Object.defineProperty%",!0),s=o("%Math.max%");if(u)try{u({},"a",{value:1})}catch(e){u=null}e.exports=function(e){var n=l(r,a,arguments);if(c&&u){var t=c(n,"length");t.configurable&&u(n,"length",{value:1+s(0,e.length-(arguments.length-1))})}return n};var f=function(){return l(r,i,arguments)};u?u(e.exports,"apply",{value:f}):e.exports.apply=f},4184:(e,n)=>{var t; +/*! + Copyright (c) 2018 Jed Watson. + Licensed under the MIT License (MIT), see + http://jedwatson.github.io/classnames +*/!function(){"use strict";var r={}.hasOwnProperty;function o(){for(var e=[],n=0;n{"use strict";e.exports=function(e,n){var t=this,r=t.constructor;return t.options=Object.assign({storeInstancesGlobally:!0},n||{}),t.callbacks={},t.directMap={},t.sequenceLevels={},t.resetTimer=null,t.ignoreNextKeyup=!1,t.ignoreNextKeypress=!1,t.nextExpectedAction=!1,t.element=e,t.addEvents(),t.options.storeInstancesGlobally&&r.instances.push(t),t},e.exports.prototype.bind=t(2207),e.exports.prototype.bindMultiple=t(3396),e.exports.prototype.unbind=t(9208),e.exports.prototype.trigger=t(9855),e.exports.prototype.reset=t(6214),e.exports.prototype.stopCallback=t(3450),e.exports.prototype.handleKey=t(3067),e.exports.prototype.addEvents=t(718),e.exports.prototype.bindSingle=t(8763),e.exports.prototype.getKeyInfo=t(5825),e.exports.prototype.pickBestAction=t(8608),e.exports.prototype.getReverseMap=t(3956),e.exports.prototype.getMatches=t(3373),e.exports.prototype.resetSequences=t(3346),e.exports.prototype.fireCallback=t(2684),e.exports.prototype.bindSequence=t(7103),e.exports.prototype.resetSequenceTimer=t(7309),e.exports.prototype.detach=t(7554),e.exports.instances=[],e.exports.reset=t(1822),e.exports.REVERSE_MAP=null},718:(e,n,t)=>{"use strict";e.exports=function(){var e=this,n=t(4323),r=e.element;e.eventHandler=t(9646).bind(e),n(r,"keypress",e.eventHandler),n(r,"keydown",e.eventHandler),n(r,"keyup",e.eventHandler)}},2207:e=>{"use strict";e.exports=function(e,n,t){return e=e instanceof Array?e:[e],this.bindMultiple(e,n,t),this}},3396:e=>{"use strict";e.exports=function(e,n,t){for(var r=0;r{"use strict";e.exports=function(e,n,r,o){var i=this;function a(n){return function(){i.nextExpectedAction=n,++i.sequenceLevels[e],i.resetSequenceTimer()}}function l(n){var a;i.fireCallback(r,n,e),"keyup"!==o&&(a=t(6770),i.ignoreNextKeyup=a(n)),setTimeout((function(){i.resetSequences()}),10)}i.sequenceLevels[e]=0;for(var c=0;c{"use strict";e.exports=function(e,n,t,r,o){var i=this;i.directMap[e+":"+t]=n;var a,l=(e=e.replace(/\s+/g," ")).split(" ");l.length>1?i.bindSequence(e,l,n,t):(a=i.getKeyInfo(e,t),i.callbacks[a.key]=i.callbacks[a.key]||[],i.getMatches(a.key,a.modifiers,{type:a.action},r,e,o),i.callbacks[a.key][r?"unshift":"push"]({callback:n,modifiers:a.modifiers,action:a.action,seq:r,level:o,combo:e}))}},7554:(e,n,t)=>{var r=t(4323).off;e.exports=function(){var e=this,n=e.element;r(n,"keypress",e.eventHandler),r(n,"keydown",e.eventHandler),r(n,"keyup",e.eventHandler)}},4323:e=>{function n(e,n,t,r){return!e.addEventListener&&(n="on"+n),(e.addEventListener||e.attachEvent).call(e,n,t,r),t}e.exports=n,e.exports.on=n,e.exports.off=function(e,n,t,r){return!e.removeEventListener&&(n="on"+n),(e.removeEventListener||e.detachEvent).call(e,n,t,r),t}},2684:(e,n,t)=>{"use strict";e.exports=function(e,n,r,o){this.stopCallback(n,n.target||n.srcElement,r,o)||!1===e(n,r)&&(t(1350)(n),t(6103)(n))}},5825:(e,n,t)=>{"use strict";e.exports=function(e,n){var r,o,i,a,l,c,u=[];for(r=t(4520)(e),a=t(7549),l=t(5355),c=t(8581),i=0;i{"use strict";e.exports=function(e,n,r,o,i,a){var l,c,u,s,f=this,p=[],d=r.type;"keypress"!==d||r.code&&"Arrow"===r.code.slice(0,5)||(f.callbacks["any-character"]||[]).forEach((function(e){p.push(e)}));if(!f.callbacks[e])return p;for(u=t(8581),"keyup"===d&&u(e)&&(n=[e]),l=0;l{"use strict";e.exports=function(){var e,n=this.constructor;if(!n.REVERSE_MAP)for(var r in n.REVERSE_MAP={},e=t(4766))r>95&&r<112||e.hasOwnProperty(r)&&(n.REVERSE_MAP[e[r]]=r);return n.REVERSE_MAP}},3067:(e,n,t)=>{"use strict";e.exports=function(e,n,r){var o,i,a,l,c=this,u={},s=0,f=!1;for(o=c.getMatches(e,n,r),i=0;i{"use strict";e.exports=function(e){var n,r=this;"number"!=typeof e.which&&(e.which=e.keyCode);var o=t(6770)(e);void 0!==o&&("keyup"!==e.type||r.ignoreNextKeyup!==o?(n=t(4610),r.handleKey(o,n(e),e)):r.ignoreNextKeyup=!1)}},5532:e=>{"use strict";e.exports=function(e,n){return e.sort().join(",")===n.sort().join(",")}},8608:e=>{"use strict";e.exports=function(e,n,t){return t||(t=this.getReverseMap()[e]?"keydown":"keypress"),"keypress"===t&&n.length&&(t="keydown"),t}},6214:e=>{"use strict";e.exports=function(){return this.callbacks={},this.directMap={},this}},7309:e=>{"use strict";e.exports=function(){var e=this;clearTimeout(e.resetTimer),e.resetTimer=setTimeout((function(){e.resetSequences()}),1e3)}},3346:e=>{"use strict";e.exports=function(e){var n=this;e=e||{};var t,r=!1;for(t in n.sequenceLevels)e[t]?r=!0:n.sequenceLevels[t]=0;r||(n.nextExpectedAction=!1)}},3450:e=>{"use strict";e.exports=function(e,n){if((" "+n.className+" ").indexOf(" combokeys ")>-1)return!1;var t=n.tagName.toLowerCase();return"input"===t||"select"===t||"textarea"===t||n.isContentEditable}},9855:e=>{"use strict";e.exports=function(e,n){return this.directMap[e+":"+n]&&this.directMap[e+":"+n]({},e),this}},9208:e=>{"use strict";e.exports=function(e,n){return this.bind(e,(function(){}),n)}},1822:e=>{"use strict";e.exports=function(){this.instances.forEach((function(e){e.reset()}))}},6770:(e,n,t)=>{"use strict";e.exports=function(e){var n,r;if(n=t(4766),r=t(5295),"keypress"===e.type){var o=String.fromCharCode(e.which);return e.shiftKey||(o=o.toLowerCase()),o}return void 0!==n[e.which]?n[e.which]:void 0!==r[e.which]?r[e.which]:String.fromCharCode(e.which).toLowerCase()}},4610:e=>{"use strict";e.exports=function(e){var n=[];return e.shiftKey&&n.push("shift"),e.altKey&&n.push("alt"),e.ctrlKey&&n.push("ctrl"),e.metaKey&&n.push("meta"),n}},8581:e=>{"use strict";e.exports=function(e){return"shift"===e||"ctrl"===e||"alt"===e||"meta"===e}},4520:e=>{"use strict";e.exports=function(e){return"+"===e?["+"]:e.split("+")}},1350:e=>{"use strict";e.exports=function(e){e.preventDefault?e.preventDefault():e.returnValue=!1}},5355:e=>{"use strict";e.exports={"~":"`","!":"1","@":"2","#":"3",$:"4","%":"5","^":"6","&":"7","*":"8","(":"9",")":"0",_:"-","+":"=",":":";",'"':"'","<":",",">":".","?":"/","|":"\\"}},7549:e=>{"use strict";e.exports={option:"alt",command:"meta",return:"enter",escape:"esc",mod:/Mac|iPod|iPhone|iPad/.test(navigator.platform)?"meta":"ctrl"}},5295:e=>{"use strict";e.exports={106:"*",107:"plus",109:"minus",110:".",111:"/",186:";",187:"=",188:",",189:"-",190:".",191:"/",192:"`",219:"[",220:"\\",221:"]",222:"'"}},4766:e=>{"use strict";e.exports={8:"backspace",9:"tab",13:"enter",16:"shift",17:"ctrl",18:"alt",20:"capslock",27:"esc",32:"space",33:"pageup",34:"pagedown",35:"end",36:"home",37:"left",38:"up",39:"right",40:"down",45:"ins",46:"del",91:"meta",93:"meta",173:"minus",187:"plus",189:"minus",224:"meta"};for(var n=1;n<20;++n)e.exports[111+n]="f"+n;for(n=0;n<=9;++n)e.exports[n+96]=n},6103:e=>{"use strict";e.exports=function(e){e.stopPropagation?e.stopPropagation():e.cancelBubble=!0}},3362:()=>{var e;!function(){var e=Math.PI,n=2*e,t=e/180,r=document.createElement("div");document.head.appendChild(r);var o=self.ConicGradient=function(e){o.all.push(this),e=e||{},this.canvas=document.createElement("canvas"),this.context=this.canvas.getContext("2d"),this.repeating=!!e.repeating,this.size=e.size||Math.max(innerWidth,innerHeight),this.canvas.width=this.canvas.height=this.size;var n=e.stops;this.stops=(n||"").split(/\s*,(?![^(]*\))\s*/),this.from=0;for(var t=0;t0){var i=this.stops[0].clone();i.pos=0,this.stops.unshift(i)}if(void 0===this.stops[this.stops.length-1].pos)this.stops[this.stops.length-1].pos=1;else if(!this.repeating&&this.stops[this.stops.length-1].pos<1){var a=this.stops[this.stops.length-1].clone();a.pos=1,this.stops.push(a)}if(this.stops.forEach((function(e,n){if(void 0===e.pos){for(var t=n+1;this[t];t++)if(void 0!==this[t].pos){e.pos=this[n-1].pos+(this[t].pos-this[n-1].pos)/(t-n+1);break}}else n>0&&(e.pos=Math.max(e.pos,this[n-1].pos))}),this.stops),this.repeating){var l=(n=this.stops.slice())[n.length-1].pos-n[0].pos;for(t=0;this.stops[this.stops.length-1].pos<1&&t<1e4;t++)for(var c=0;c'},get png(){return this.canvas.toDataURL()},get r(){return Math.sqrt(2)*this.size/2},paint:function(){var e,n,r,o=this.context,i=this.r,a=this.size/2,l=0,c=this.stops[l];o.translate(this.size/2,this.size/2),o.rotate(-90*t),o.rotate(this.from*t),o.translate(-this.size/2,-this.size/2);for(var u=0;u<360;){if(u/360+1e-5>=c.pos){do{e=c,l++,c=this.stops[l]}while(c&&c!=e&&c.pos===e.pos);if(!c)break;var s=e.color+""==c.color+""&&e!=c;n=e.color.map((function(e,n){return c.color[n]-e}))}r=(u/360-e.pos)/(c.pos-e.pos);var f=s?c.color:n.map((function(n,t){var o=n*r+e.color[t];return t<3?255&o:o}));if(o.fillStyle="rgba("+f.join(",")+")",o.beginPath(),o.moveTo(a,a),s)var p=360*(c.pos-e.pos);else p=.5;var d=u*t,h=(d=Math.min(360*t,d))+p*t;h=Math.min(360*t,h+.02),o.arc(a,a,i,d,h),o.closePath(),o.fill(),u+=p}}},o.ColorStop=function(e,t){if(this.gradient=e,t){var r=t.match(/^(.+?)(?:\s+([\d.]+)(%|deg|turn|grad|rad)?)?(?:\s+([\d.]+)(%|deg|turn|grad|rad)?)?\s*$/);if(this.color=o.ColorStop.colorToRGBA(r[1]),r[2]){var i=r[3];"%"==i||"0"===r[2]&&!i?this.pos=r[2]/100:"turn"==i?this.pos=+r[2]:"deg"==i?this.pos=r[2]/360:"grad"==i?this.pos=r[2]/400:"rad"==i&&(this.pos=r[2]/n)}r[4]&&(this.next=new o.ColorStop(e,r[1]+" "+r[4]+r[5]))}},o.ColorStop.prototype={clone:function(){var e=new o.ColorStop(this.gradient);return e.color=this.color,e.pos=this.pos,e},toString:function(){return"rgba("+this.color.join(", ")+") "+100*this.pos+"%"}},o.ColorStop.colorToRGBA=function(e){if(!Array.isArray(e)&&-1==e.indexOf("from")){r.style.color=e;var n=getComputedStyle(r).color.match(/rgba?\(([\d.]+), ([\d.]+), ([\d.]+)(?:, ([\d.]+))?\)/);return n&&(n.shift(),(n=n.map((function(e){return+e})))[3]=isNaN(n[3])?1:n[3]),n||[0,0,0,0]}return e}}(),self.StyleFix&&((e=document.createElement("p")).style.backgroundImage="conic-gradient(white, black)",e.style.backgroundImage=PrefixFree.prefix+"conic-gradient(white, black)",e.style.backgroundImage||StyleFix.register((function(e,n){return e.indexOf("conic-gradient")>-1&&(e=e.replace(/(?:repeating-)?conic-gradient\(\s*((?:\([^()]+\)|[^;()}])+?)\)/g,(function(e,n){return new ConicGradient({stops:n,repeating:e.indexOf("repeating-")>-1})}))),e})))},9662:(e,n,t)=>{var r=t(7854),o=t(614),i=t(6330),a=r.TypeError;e.exports=function(e){if(o(e))return e;throw a(i(e)+" is not a function")}},9483:(e,n,t)=>{var r=t(7854),o=t(4411),i=t(6330),a=r.TypeError;e.exports=function(e){if(o(e))return e;throw a(i(e)+" is not a constructor")}},6077:(e,n,t)=>{var r=t(7854),o=t(614),i=r.String,a=r.TypeError;e.exports=function(e){if("object"==typeof e||o(e))return e;throw a("Can't set "+i(e)+" as a prototype")}},1223:(e,n,t)=>{var r=t(5112),o=t(30),i=t(3070),a=r("unscopables"),l=Array.prototype;null==l[a]&&i.f(l,a,{configurable:!0,value:o(null)}),e.exports=function(e){l[a][e]=!0}},1530:(e,n,t)=>{"use strict";var r=t(8710).charAt;e.exports=function(e,n,t){return n+(t?r(e,n).length:1)}},5787:(e,n,t)=>{var r=t(7854),o=t(7976),i=r.TypeError;e.exports=function(e,n){if(o(n,e))return e;throw i("Incorrect invocation")}},9670:(e,n,t)=>{var r=t(7854),o=t(111),i=r.String,a=r.TypeError;e.exports=function(e){if(o(e))return e;throw a(i(e)+" is not an object")}},7556:(e,n,t)=>{var r=t(7293);e.exports=r((function(){if("function"==typeof ArrayBuffer){var e=new ArrayBuffer(8);Object.isExtensible(e)&&Object.defineProperty(e,"a",{value:8})}}))},8533:(e,n,t)=>{"use strict";var r=t(2092).forEach,o=t(9341)("forEach");e.exports=o?[].forEach:function(e){return r(this,e,arguments.length>1?arguments[1]:void 0)}},8457:(e,n,t)=>{"use strict";var r=t(7854),o=t(9974),i=t(6916),a=t(7908),l=t(3411),c=t(7659),u=t(4411),s=t(6244),f=t(6135),p=t(8554),d=t(1246),h=r.Array;e.exports=function(e){var n=a(e),t=u(this),r=arguments.length,g=r>1?arguments[1]:void 0,v=void 0!==g;v&&(g=o(g,r>2?arguments[2]:void 0));var A,b,m,y,E,_,C=d(n),w=0;if(!C||this==h&&c(C))for(A=s(n),b=t?new this(A):h(A);A>w;w++)_=v?g(n[w],w):n[w],f(b,w,_);else for(E=(y=p(n,C)).next,b=t?new this:[];!(m=i(E,y)).done;w++)_=v?l(y,g,[m.value,w],!0):m.value,f(b,w,_);return b.length=w,b}},1318:(e,n,t)=>{var r=t(5656),o=t(1400),i=t(6244),a=function(e){return function(n,t,a){var l,c=r(n),u=i(c),s=o(a,u);if(e&&t!=t){for(;u>s;)if((l=c[s++])!=l)return!0}else for(;u>s;s++)if((e||s in c)&&c[s]===t)return e||s||0;return!e&&-1}};e.exports={includes:a(!0),indexOf:a(!1)}},2092:(e,n,t)=>{var r=t(9974),o=t(1702),i=t(8361),a=t(7908),l=t(6244),c=t(5417),u=o([].push),s=function(e){var n=1==e,t=2==e,o=3==e,s=4==e,f=6==e,p=7==e,d=5==e||f;return function(h,g,v,A){for(var b,m,y=a(h),E=i(y),_=r(g,v),C=l(E),w=0,x=A||c,k=n?x(h,C):t||p?x(h,0):void 0;C>w;w++)if((d||w in E)&&(m=_(b=E[w],w,y),e))if(n)k[w]=m;else if(m)switch(e){case 3:return!0;case 5:return b;case 6:return w;case 2:u(k,b)}else switch(e){case 4:return!1;case 7:u(k,b)}return f?-1:o||s?s:k}};e.exports={forEach:s(0),map:s(1),filter:s(2),some:s(3),every:s(4),find:s(5),findIndex:s(6),filterReject:s(7)}},1194:(e,n,t)=>{var r=t(7293),o=t(5112),i=t(7392),a=o("species");e.exports=function(e){return i>=51||!r((function(){var n=[];return(n.constructor={})[a]=function(){return{foo:1}},1!==n[e](Boolean).foo}))}},9341:(e,n,t)=>{"use strict";var r=t(7293);e.exports=function(e,n){var t=[][e];return!!t&&r((function(){t.call(null,n||function(){throw 1},1)}))}},3671:(e,n,t)=>{var r=t(7854),o=t(9662),i=t(7908),a=t(8361),l=t(6244),c=r.TypeError,u=function(e){return function(n,t,r,u){o(t);var s=i(n),f=a(s),p=l(s),d=e?p-1:0,h=e?-1:1;if(r<2)for(;;){if(d in f){u=f[d],d+=h;break}if(d+=h,e?d<0:p<=d)throw c("Reduce of empty array with no initial value")}for(;e?d>=0:p>d;d+=h)d in f&&(u=t(u,f[d],d,s));return u}};e.exports={left:u(!1),right:u(!0)}},206:(e,n,t)=>{var r=t(1702);e.exports=r([].slice)},4362:(e,n,t)=>{var r=t(206),o=Math.floor,i=function(e,n){var t=e.length,c=o(t/2);return t<8?a(e,n):l(e,i(r(e,0,c),n),i(r(e,c),n),n)},a=function(e,n){for(var t,r,o=e.length,i=1;i0;)e[r]=e[--r];r!==i++&&(e[r]=t)}return e},l=function(e,n,t,r){for(var o=n.length,i=t.length,a=0,l=0;a{var r=t(7854),o=t(3157),i=t(4411),a=t(111),l=t(5112)("species"),c=r.Array;e.exports=function(e){var n;return o(e)&&(n=e.constructor,(i(n)&&(n===c||o(n.prototype))||a(n)&&null===(n=n[l]))&&(n=void 0)),void 0===n?c:n}},5417:(e,n,t)=>{var r=t(7475);e.exports=function(e,n){return new(r(e))(0===n?0:n)}},3411:(e,n,t)=>{var r=t(9670),o=t(9212);e.exports=function(e,n,t,i){try{return i?n(r(t)[0],t[1]):n(t)}catch(n){o(e,"throw",n)}}},7072:(e,n,t)=>{var r=t(5112)("iterator"),o=!1;try{var i=0,a={next:function(){return{done:!!i++}},return:function(){o=!0}};a[r]=function(){return this},Array.from(a,(function(){throw 2}))}catch(e){}e.exports=function(e,n){if(!n&&!o)return!1;var t=!1;try{var i={};i[r]=function(){return{next:function(){return{done:t=!0}}}},e(i)}catch(e){}return t}},4326:(e,n,t)=>{var r=t(1702),o=r({}.toString),i=r("".slice);e.exports=function(e){return i(o(e),8,-1)}},648:(e,n,t)=>{var r=t(7854),o=t(1694),i=t(614),a=t(4326),l=t(5112)("toStringTag"),c=r.Object,u="Arguments"==a(function(){return arguments}());e.exports=o?a:function(e){var n,t,r;return void 0===e?"Undefined":null===e?"Null":"string"==typeof(t=function(e,n){try{return e[n]}catch(e){}}(n=c(e),l))?t:u?a(n):"Object"==(r=a(n))&&i(n.callee)?"Arguments":r}},5631:(e,n,t)=>{"use strict";var r=t(3070).f,o=t(30),i=t(2248),a=t(9974),l=t(5787),c=t(408),u=t(654),s=t(6340),f=t(9781),p=t(2423).fastKey,d=t(9909),h=d.set,g=d.getterFor;e.exports={getConstructor:function(e,n,t,u){var s=e((function(e,r){l(e,d),h(e,{type:n,index:o(null),first:void 0,last:void 0,size:0}),f||(e.size=0),null!=r&&c(r,e[u],{that:e,AS_ENTRIES:t})})),d=s.prototype,v=g(n),A=function(e,n,t){var r,o,i=v(e),a=b(e,n);return a?a.value=t:(i.last=a={index:o=p(n,!0),key:n,value:t,previous:r=i.last,next:void 0,removed:!1},i.first||(i.first=a),r&&(r.next=a),f?i.size++:e.size++,"F"!==o&&(i.index[o]=a)),e},b=function(e,n){var t,r=v(e),o=p(n);if("F"!==o)return r.index[o];for(t=r.first;t;t=t.next)if(t.key==n)return t};return i(d,{clear:function(){for(var e=v(this),n=e.index,t=e.first;t;)t.removed=!0,t.previous&&(t.previous=t.previous.next=void 0),delete n[t.index],t=t.next;e.first=e.last=void 0,f?e.size=0:this.size=0},delete:function(e){var n=this,t=v(n),r=b(n,e);if(r){var o=r.next,i=r.previous;delete t.index[r.index],r.removed=!0,i&&(i.next=o),o&&(o.previous=i),t.first==r&&(t.first=o),t.last==r&&(t.last=i),f?t.size--:n.size--}return!!r},forEach:function(e){for(var n,t=v(this),r=a(e,arguments.length>1?arguments[1]:void 0);n=n?n.next:t.first;)for(r(n.value,n.key,this);n&&n.removed;)n=n.previous},has:function(e){return!!b(this,e)}}),i(d,t?{get:function(e){var n=b(this,e);return n&&n.value},set:function(e,n){return A(this,0===e?0:e,n)}}:{add:function(e){return A(this,e=0===e?0:e,e)}}),f&&r(d,"size",{get:function(){return v(this).size}}),s},setStrong:function(e,n,t){var r=n+" Iterator",o=g(n),i=g(r);u(e,n,(function(e,n){h(this,{type:r,target:e,state:o(e),kind:n,last:void 0})}),(function(){for(var e=i(this),n=e.kind,t=e.last;t&&t.removed;)t=t.previous;return e.target&&(e.last=t=t?t.next:e.state.first)?"keys"==n?{value:t.key,done:!1}:"values"==n?{value:t.value,done:!1}:{value:[t.key,t.value],done:!1}:(e.target=void 0,{value:void 0,done:!0})}),t?"entries":"values",!t,!0),s(n)}}},9320:(e,n,t)=>{"use strict";var r=t(1702),o=t(2248),i=t(2423).getWeakData,a=t(9670),l=t(111),c=t(5787),u=t(408),s=t(2092),f=t(2597),p=t(9909),d=p.set,h=p.getterFor,g=s.find,v=s.findIndex,A=r([].splice),b=0,m=function(e){return e.frozen||(e.frozen=new y)},y=function(){this.entries=[]},E=function(e,n){return g(e.entries,(function(e){return e[0]===n}))};y.prototype={get:function(e){var n=E(this,e);if(n)return n[1]},has:function(e){return!!E(this,e)},set:function(e,n){var t=E(this,e);t?t[1]=n:this.entries.push([e,n])},delete:function(e){var n=v(this.entries,(function(n){return n[0]===e}));return~n&&A(this.entries,n,1),!!~n}},e.exports={getConstructor:function(e,n,t,r){var s=e((function(e,o){c(e,p),d(e,{type:n,id:b++,frozen:void 0}),null!=o&&u(o,e[r],{that:e,AS_ENTRIES:t})})),p=s.prototype,g=h(n),v=function(e,n,t){var r=g(e),o=i(a(n),!0);return!0===o?m(r).set(n,t):o[r.id]=t,e};return o(p,{delete:function(e){var n=g(this);if(!l(e))return!1;var t=i(e);return!0===t?m(n).delete(e):t&&f(t,n.id)&&delete t[n.id]},has:function(e){var n=g(this);if(!l(e))return!1;var t=i(e);return!0===t?m(n).has(e):t&&f(t,n.id)}}),o(p,t?{get:function(e){var n=g(this);if(l(e)){var t=i(e);return!0===t?m(n).get(e):t?t[n.id]:void 0}},set:function(e,n){return v(this,e,n)}}:{add:function(e){return v(this,e,!0)}}),s}}},7710:(e,n,t)=>{"use strict";var r=t(2109),o=t(7854),i=t(1702),a=t(4705),l=t(1320),c=t(2423),u=t(408),s=t(5787),f=t(614),p=t(111),d=t(7293),h=t(7072),g=t(8003),v=t(9587);e.exports=function(e,n,t){var A=-1!==e.indexOf("Map"),b=-1!==e.indexOf("Weak"),m=A?"set":"add",y=o[e],E=y&&y.prototype,_=y,C={},w=function(e){var n=i(E[e]);l(E,e,"add"==e?function(e){return n(this,0===e?0:e),this}:"delete"==e?function(e){return!(b&&!p(e))&&n(this,0===e?0:e)}:"get"==e?function(e){return b&&!p(e)?void 0:n(this,0===e?0:e)}:"has"==e?function(e){return!(b&&!p(e))&&n(this,0===e?0:e)}:function(e,t){return n(this,0===e?0:e,t),this})};if(a(e,!f(y)||!(b||E.forEach&&!d((function(){(new y).entries().next()})))))_=t.getConstructor(n,e,A,m),c.enable();else if(a(e,!0)){var x=new _,k=x[m](b?{}:-0,1)!=x,S=d((function(){x.has(1)})),O=h((function(e){new y(e)})),B=!b&&d((function(){for(var e=new y,n=5;n--;)e[m](n,n);return!e.has(-0)}));O||((_=n((function(e,n){s(e,E);var t=v(new y,e,_);return null!=n&&u(n,t[m],{that:t,AS_ENTRIES:A}),t}))).prototype=E,E.constructor=_),(S||B)&&(w("delete"),w("has"),A&&w("get")),(B||k)&&w(m),b&&E.clear&&delete E.clear}return C[e]=_,r({global:!0,forced:_!=y},C),g(_,e),b||t.setStrong(_,e,A),_}},9920:(e,n,t)=>{var r=t(2597),o=t(3887),i=t(1236),a=t(3070);e.exports=function(e,n){for(var t=o(n),l=a.f,c=i.f,u=0;u{var r=t(5112)("match");e.exports=function(e){var n=/./;try{"/./"[e](n)}catch(t){try{return n[r]=!1,"/./"[e](n)}catch(e){}}return!1}},8544:(e,n,t)=>{var r=t(7293);e.exports=!r((function(){function e(){}return e.prototype.constructor=null,Object.getPrototypeOf(new e)!==e.prototype}))},4230:(e,n,t)=>{var r=t(1702),o=t(4488),i=t(1340),a=/"/g,l=r("".replace);e.exports=function(e,n,t,r){var c=i(o(e)),u="<"+n;return""!==t&&(u+=" "+t+'="'+l(i(r),a,""")+'"'),u+">"+c+""}},4994:(e,n,t)=>{"use strict";var r=t(3383).IteratorPrototype,o=t(30),i=t(9114),a=t(8003),l=t(7497),c=function(){return this};e.exports=function(e,n,t){var u=n+" Iterator";return e.prototype=o(r,{next:i(1,t)}),a(e,u,!1,!0),l[u]=c,e}},8880:(e,n,t)=>{var r=t(9781),o=t(3070),i=t(9114);e.exports=r?function(e,n,t){return o.f(e,n,i(1,t))}:function(e,n,t){return e[n]=t,e}},9114:e=>{e.exports=function(e,n){return{enumerable:!(1&e),configurable:!(2&e),writable:!(4&e),value:n}}},6135:(e,n,t)=>{"use strict";var r=t(4948),o=t(3070),i=t(9114);e.exports=function(e,n,t){var a=r(n);a in e?o.f(e,a,i(0,t)):e[a]=t}},8709:(e,n,t)=>{"use strict";var r=t(7854),o=t(9670),i=t(2140),a=r.TypeError;e.exports=function(e){if(o(this),"string"===e||"default"===e)e="string";else if("number"!==e)throw a("Incorrect hint");return i(this,e)}},654:(e,n,t)=>{"use strict";var r=t(2109),o=t(6916),i=t(1913),a=t(6530),l=t(614),c=t(4994),u=t(9518),s=t(7674),f=t(8003),p=t(8880),d=t(1320),h=t(5112),g=t(7497),v=t(3383),A=a.PROPER,b=a.CONFIGURABLE,m=v.IteratorPrototype,y=v.BUGGY_SAFARI_ITERATORS,E=h("iterator"),_="keys",C="values",w="entries",x=function(){return this};e.exports=function(e,n,t,a,h,v,k){c(t,n,a);var S,O,B,I=function(e){if(e===h&&R)return R;if(!y&&e in j)return j[e];switch(e){case _:case C:case w:return function(){return new t(this,e)}}return function(){return new t(this)}},T=n+" Iterator",P=!1,j=e.prototype,z=j[E]||j["@@iterator"]||h&&j[h],R=!y&&z||I(h),M="Array"==n&&j.entries||z;if(M&&(S=u(M.call(new e)))!==Object.prototype&&S.next&&(i||u(S)===m||(s?s(S,m):l(S[E])||d(S,E,x)),f(S,T,!0,!0),i&&(g[T]=x)),A&&h==C&&z&&z.name!==C&&(!i&&b?p(j,"name",C):(P=!0,R=function(){return o(z,this)})),h)if(O={values:I(C),keys:v?R:I(_),entries:I(w)},k)for(B in O)(y||P||!(B in j))&&d(j,B,O[B]);else r({target:n,proto:!0,forced:y||P},O);return i&&!k||j[E]===R||d(j,E,R,{name:h}),g[n]=R,O}},7235:(e,n,t)=>{var r=t(857),o=t(2597),i=t(6061),a=t(3070).f;e.exports=function(e){var n=r.Symbol||(r.Symbol={});o(n,e)||a(n,e,{value:i.f(e)})}},9781:(e,n,t)=>{var r=t(7293);e.exports=!r((function(){return 7!=Object.defineProperty({},1,{get:function(){return 7}})[1]}))},317:(e,n,t)=>{var r=t(7854),o=t(111),i=r.document,a=o(i)&&o(i.createElement);e.exports=function(e){return a?i.createElement(e):{}}},8324:e=>{e.exports={CSSRuleList:0,CSSStyleDeclaration:0,CSSValueList:0,ClientRectList:0,DOMRectList:0,DOMStringList:0,DOMTokenList:1,DataTransferItemList:0,FileList:0,HTMLAllCollection:0,HTMLCollection:0,HTMLFormElement:0,HTMLSelectElement:0,MediaList:0,MimeTypeArray:0,NamedNodeMap:0,NodeList:1,PaintRequestList:0,Plugin:0,PluginArray:0,SVGLengthList:0,SVGNumberList:0,SVGPathSegList:0,SVGPointList:0,SVGStringList:0,SVGTransformList:0,SourceBufferList:0,StyleSheetList:0,TextTrackCueList:0,TextTrackList:0,TouchList:0}},8509:(e,n,t)=>{var r=t(317)("span").classList,o=r&&r.constructor&&r.constructor.prototype;e.exports=o===Object.prototype?void 0:o},8886:(e,n,t)=>{var r=t(8113).match(/firefox\/(\d+)/i);e.exports=!!r&&+r[1]},256:(e,n,t)=>{var r=t(8113);e.exports=/MSIE|Trident/.test(r)},5268:(e,n,t)=>{var r=t(4326),o=t(7854);e.exports="process"==r(o.process)},8113:(e,n,t)=>{var r=t(5005);e.exports=r("navigator","userAgent")||""},7392:(e,n,t)=>{var r,o,i=t(7854),a=t(8113),l=i.process,c=i.Deno,u=l&&l.versions||c&&c.version,s=u&&u.v8;s&&(o=(r=s.split("."))[0]>0&&r[0]<4?1:+(r[0]+r[1])),!o&&a&&(!(r=a.match(/Edge\/(\d+)/))||r[1]>=74)&&(r=a.match(/Chrome\/(\d+)/))&&(o=+r[1]),e.exports=o},8008:(e,n,t)=>{var r=t(8113).match(/AppleWebKit\/(\d+)\./);e.exports=!!r&&+r[1]},748:e=>{e.exports=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"]},2109:(e,n,t)=>{var r=t(7854),o=t(1236).f,i=t(8880),a=t(1320),l=t(3505),c=t(9920),u=t(4705);e.exports=function(e,n){var t,s,f,p,d,h=e.target,g=e.global,v=e.stat;if(t=g?r:v?r[h]||l(h,{}):(r[h]||{}).prototype)for(s in n){if(p=n[s],f=e.noTargetGet?(d=o(t,s))&&d.value:t[s],!u(g?s:h+(v?".":"#")+s,e.forced)&&void 0!==f){if(typeof p==typeof f)continue;c(p,f)}(e.sham||f&&f.sham)&&i(p,"sham",!0),a(t,s,p,e)}}},7293:e=>{e.exports=function(e){try{return!!e()}catch(e){return!0}}},7007:(e,n,t)=>{"use strict";t(4916);var r=t(1702),o=t(1320),i=t(2261),a=t(7293),l=t(5112),c=t(8880),u=l("species"),s=RegExp.prototype;e.exports=function(e,n,t,f){var p=l(e),d=!a((function(){var n={};return n[p]=function(){return 7},7!=""[e](n)})),h=d&&!a((function(){var n=!1,t=/a/;return"split"===e&&((t={}).constructor={},t.constructor[u]=function(){return t},t.flags="",t[p]=/./[p]),t.exec=function(){return n=!0,null},t[p](""),!n}));if(!d||!h||t){var g=r(/./[p]),v=n(p,""[e],(function(e,n,t,o,a){var l=r(e),c=n.exec;return c===i||c===s.exec?d&&!a?{done:!0,value:g(n,t,o)}:{done:!0,value:l(t,n,o)}:{done:!1}}));o(String.prototype,e,v[0]),o(s,p,v[1])}f&&c(s[p],"sham",!0)}},6677:(e,n,t)=>{var r=t(7293);e.exports=!r((function(){return Object.isExtensible(Object.preventExtensions({}))}))},2104:e=>{var n=Function.prototype,t=n.apply,r=n.bind,o=n.call;e.exports="object"==typeof Reflect&&Reflect.apply||(r?o.bind(t):function(){return o.apply(t,arguments)})},9974:(e,n,t)=>{var r=t(1702),o=t(9662),i=r(r.bind);e.exports=function(e,n){return o(e),void 0===n?e:i?i(e,n):function(){return e.apply(n,arguments)}}},7065:(e,n,t)=>{"use strict";var r=t(7854),o=t(1702),i=t(9662),a=t(111),l=t(2597),c=t(206),u=r.Function,s=o([].concat),f=o([].join),p={},d=function(e,n,t){if(!l(p,n)){for(var r=[],o=0;o{var n=Function.prototype.call;e.exports=n.bind?n.bind(n):function(){return n.apply(n,arguments)}},6530:(e,n,t)=>{var r=t(9781),o=t(2597),i=Function.prototype,a=r&&Object.getOwnPropertyDescriptor,l=o(i,"name"),c=l&&"something"===function(){}.name,u=l&&(!r||r&&a(i,"name").configurable);e.exports={EXISTS:l,PROPER:c,CONFIGURABLE:u}},1702:e=>{var n=Function.prototype,t=n.bind,r=n.call,o=t&&t.bind(r);e.exports=t?function(e){return e&&o(r,e)}:function(e){return e&&function(){return r.apply(e,arguments)}}},5005:(e,n,t)=>{var r=t(7854),o=t(614),i=function(e){return o(e)?e:void 0};e.exports=function(e,n){return arguments.length<2?i(r[e]):r[e]&&r[e][n]}},1246:(e,n,t)=>{var r=t(648),o=t(8173),i=t(7497),a=t(5112)("iterator");e.exports=function(e){if(null!=e)return o(e,a)||o(e,"@@iterator")||i[r(e)]}},8554:(e,n,t)=>{var r=t(7854),o=t(6916),i=t(9662),a=t(9670),l=t(6330),c=t(1246),u=r.TypeError;e.exports=function(e,n){var t=arguments.length<2?c(e):n;if(i(t))return a(o(t,e));throw u(l(e)+" is not iterable")}},8173:(e,n,t)=>{var r=t(9662);e.exports=function(e,n){var t=e[n];return null==t?void 0:r(t)}},647:(e,n,t)=>{var r=t(1702),o=t(7908),i=Math.floor,a=r("".charAt),l=r("".replace),c=r("".slice),u=/\$([$&'`]|\d{1,2}|<[^>]*>)/g,s=/\$([$&'`]|\d{1,2})/g;e.exports=function(e,n,t,r,f,p){var d=t+e.length,h=r.length,g=s;return void 0!==f&&(f=o(f),g=u),l(p,g,(function(o,l){var u;switch(a(l,0)){case"$":return"$";case"&":return e;case"`":return c(n,0,t);case"'":return c(n,d);case"<":u=f[c(l,1,-1)];break;default:var s=+l;if(0===s)return o;if(s>h){var p=i(s/10);return 0===p?o:p<=h?void 0===r[p-1]?a(l,1):r[p-1]+a(l,1):o}u=r[s-1]}return void 0===u?"":u}))}},7854:(e,n,t)=>{var r=function(e){return e&&e.Math==Math&&e};e.exports=r("object"==typeof globalThis&&globalThis)||r("object"==typeof window&&window)||r("object"==typeof self&&self)||r("object"==typeof t.g&&t.g)||function(){return this}()||Function("return this")()},2597:(e,n,t)=>{var r=t(1702),o=t(7908),i=r({}.hasOwnProperty);e.exports=Object.hasOwn||function(e,n){return i(o(e),n)}},3501:e=>{e.exports={}},490:(e,n,t)=>{var r=t(5005);e.exports=r("document","documentElement")},4664:(e,n,t)=>{var r=t(9781),o=t(7293),i=t(317);e.exports=!r&&!o((function(){return 7!=Object.defineProperty(i("div"),"a",{get:function(){return 7}}).a}))},8361:(e,n,t)=>{var r=t(7854),o=t(1702),i=t(7293),a=t(4326),l=r.Object,c=o("".split);e.exports=i((function(){return!l("z").propertyIsEnumerable(0)}))?function(e){return"String"==a(e)?c(e,""):l(e)}:l},9587:(e,n,t)=>{var r=t(614),o=t(111),i=t(7674);e.exports=function(e,n,t){var a,l;return i&&r(a=n.constructor)&&a!==t&&o(l=a.prototype)&&l!==t.prototype&&i(e,l),e}},2788:(e,n,t)=>{var r=t(1702),o=t(614),i=t(5465),a=r(Function.toString);o(i.inspectSource)||(i.inspectSource=function(e){return a(e)}),e.exports=i.inspectSource},2423:(e,n,t)=>{var r=t(2109),o=t(1702),i=t(3501),a=t(111),l=t(2597),c=t(3070).f,u=t(8006),s=t(1156),f=t(2050),p=t(9711),d=t(6677),h=!1,g=p("meta"),v=0,A=function(e){c(e,g,{value:{objectID:"O"+v++,weakData:{}}})},b=e.exports={enable:function(){b.enable=function(){},h=!0;var e=u.f,n=o([].splice),t={};t[g]=1,e(t).length&&(u.f=function(t){for(var r=e(t),o=0,i=r.length;o{var r,o,i,a=t(8536),l=t(7854),c=t(1702),u=t(111),s=t(8880),f=t(2597),p=t(5465),d=t(6200),h=t(3501),g="Object already initialized",v=l.TypeError,A=l.WeakMap;if(a||p.state){var b=p.state||(p.state=new A),m=c(b.get),y=c(b.has),E=c(b.set);r=function(e,n){if(y(b,e))throw new v(g);return n.facade=e,E(b,e,n),n},o=function(e){return m(b,e)||{}},i=function(e){return y(b,e)}}else{var _=d("state");h[_]=!0,r=function(e,n){if(f(e,_))throw new v(g);return n.facade=e,s(e,_,n),n},o=function(e){return f(e,_)?e[_]:{}},i=function(e){return f(e,_)}}e.exports={set:r,get:o,has:i,enforce:function(e){return i(e)?o(e):r(e,{})},getterFor:function(e){return function(n){var t;if(!u(n)||(t=o(n)).type!==e)throw v("Incompatible receiver, "+e+" required");return t}}}},7659:(e,n,t)=>{var r=t(5112),o=t(7497),i=r("iterator"),a=Array.prototype;e.exports=function(e){return void 0!==e&&(o.Array===e||a[i]===e)}},3157:(e,n,t)=>{var r=t(4326);e.exports=Array.isArray||function(e){return"Array"==r(e)}},614:e=>{e.exports=function(e){return"function"==typeof e}},4411:(e,n,t)=>{var r=t(1702),o=t(7293),i=t(614),a=t(648),l=t(5005),c=t(2788),u=function(){},s=[],f=l("Reflect","construct"),p=/^\s*(?:class|function)\b/,d=r(p.exec),h=!p.exec(u),g=function(e){if(!i(e))return!1;try{return f(u,s,e),!0}catch(e){return!1}};e.exports=!f||o((function(){var e;return g(g.call)||!g(Object)||!g((function(){e=!0}))||e}))?function(e){if(!i(e))return!1;switch(a(e)){case"AsyncFunction":case"GeneratorFunction":case"AsyncGeneratorFunction":return!1}return h||!!d(p,c(e))}:g},4705:(e,n,t)=>{var r=t(7293),o=t(614),i=/#|\.prototype\./,a=function(e,n){var t=c[l(e)];return t==s||t!=u&&(o(n)?r(n):!!n)},l=a.normalize=function(e){return String(e).replace(i,".").toLowerCase()},c=a.data={},u=a.NATIVE="N",s=a.POLYFILL="P";e.exports=a},111:(e,n,t)=>{var r=t(614);e.exports=function(e){return"object"==typeof e?null!==e:r(e)}},1913:e=>{e.exports=!1},7850:(e,n,t)=>{var r=t(111),o=t(4326),i=t(5112)("match");e.exports=function(e){var n;return r(e)&&(void 0!==(n=e[i])?!!n:"RegExp"==o(e))}},2190:(e,n,t)=>{var r=t(7854),o=t(5005),i=t(614),a=t(7976),l=t(3307),c=r.Object;e.exports=l?function(e){return"symbol"==typeof e}:function(e){var n=o("Symbol");return i(n)&&a(n.prototype,c(e))}},408:(e,n,t)=>{var r=t(7854),o=t(9974),i=t(6916),a=t(9670),l=t(6330),c=t(7659),u=t(6244),s=t(7976),f=t(8554),p=t(1246),d=t(9212),h=r.TypeError,g=function(e,n){this.stopped=e,this.result=n},v=g.prototype;e.exports=function(e,n,t){var r,A,b,m,y,E,_,C=t&&t.that,w=!(!t||!t.AS_ENTRIES),x=!(!t||!t.IS_ITERATOR),k=!(!t||!t.INTERRUPTED),S=o(n,C),O=function(e){return r&&d(r,"normal",e),new g(!0,e)},B=function(e){return w?(a(e),k?S(e[0],e[1],O):S(e[0],e[1])):k?S(e,O):S(e)};if(x)r=e;else{if(!(A=p(e)))throw h(l(e)+" is not iterable");if(c(A)){for(b=0,m=u(e);m>b;b++)if((y=B(e[b]))&&s(v,y))return y;return new g(!1)}r=f(e,A)}for(E=r.next;!(_=i(E,r)).done;){try{y=B(_.value)}catch(e){d(r,"throw",e)}if("object"==typeof y&&y&&s(v,y))return y}return new g(!1)}},9212:(e,n,t)=>{var r=t(6916),o=t(9670),i=t(8173);e.exports=function(e,n,t){var a,l;o(e);try{if(!(a=i(e,"return"))){if("throw"===n)throw t;return t}a=r(a,e)}catch(e){l=!0,a=e}if("throw"===n)throw t;if(l)throw a;return o(a),t}},3383:(e,n,t)=>{"use strict";var r,o,i,a=t(7293),l=t(614),c=t(30),u=t(9518),s=t(1320),f=t(5112),p=t(1913),d=f("iterator"),h=!1;[].keys&&("next"in(i=[].keys())?(o=u(u(i)))!==Object.prototype&&(r=o):h=!0),null==r||a((function(){var e={};return r[d].call(e)!==e}))?r={}:p&&(r=c(r)),l(r[d])||s(r,d,(function(){return this})),e.exports={IteratorPrototype:r,BUGGY_SAFARI_ITERATORS:h}},7497:e=>{e.exports={}},6244:(e,n,t)=>{var r=t(7466);e.exports=function(e){return r(e.length)}},133:(e,n,t)=>{var r=t(7392),o=t(7293);e.exports=!!Object.getOwnPropertySymbols&&!o((function(){var e=Symbol();return!String(e)||!(Object(e)instanceof Symbol)||!Symbol.sham&&r&&r<41}))},8536:(e,n,t)=>{var r=t(7854),o=t(614),i=t(2788),a=r.WeakMap;e.exports=o(a)&&/native code/.test(i(a))},3929:(e,n,t)=>{var r=t(7854),o=t(7850),i=r.TypeError;e.exports=function(e){if(o(e))throw i("The method doesn't accept regular expressions");return e}},1574:(e,n,t)=>{"use strict";var r=t(9781),o=t(1702),i=t(6916),a=t(7293),l=t(1956),c=t(5181),u=t(5296),s=t(7908),f=t(8361),p=Object.assign,d=Object.defineProperty,h=o([].concat);e.exports=!p||a((function(){if(r&&1!==p({b:1},p(d({},"a",{enumerable:!0,get:function(){d(this,"b",{value:3,enumerable:!1})}}),{b:2})).b)return!0;var e={},n={},t=Symbol(),o="abcdefghijklmnopqrst";return e[t]=7,o.split("").forEach((function(e){n[e]=e})),7!=p({},e)[t]||l(p({},n)).join("")!=o}))?function(e,n){for(var t=s(e),o=arguments.length,a=1,p=c.f,d=u.f;o>a;)for(var g,v=f(arguments[a++]),A=p?h(l(v),p(v)):l(v),b=A.length,m=0;b>m;)g=A[m++],r&&!i(d,v,g)||(t[g]=v[g]);return t}:p},30:(e,n,t)=>{var r,o=t(9670),i=t(6048),a=t(748),l=t(3501),c=t(490),u=t(317),s=t(6200),f=s("IE_PROTO"),p=function(){},d=function(e){return"