Skip to content

Commit

Permalink
Add docstring examples for tokenize() & detokenize()
Browse files Browse the repository at this point in the history
  • Loading branch information
andreaskoepf committed Nov 30, 2023
1 parent 9dd9557 commit cf33535
Showing 1 changed file with 48 additions and 0 deletions.
48 changes: 48 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,31 @@ impl Client {
.await
}

/// Tokenize a prompt for a specific model.
///
/// ```no_run
/// use aleph_alpha_client::{Client, Error, How, TaskTokenization};
///
/// async fn tokenize() -> Result<(), Error> {
/// let client = Client::new(AA_API_TOKEN)?;
///
/// // Name of the model for which we want to tokenize text.
/// let model = "luminous-base";
///
/// // Text prompt to be tokenized.
/// let prompt = "An apple a day";
///
/// let task = TaskTokenization {
/// prompt,
/// tokens: true, // return text-tokens
/// token_ids: true, // return numeric token-ids
/// };
/// let respones = client.tokenize(&task, model, &How::default()).await?;
///
/// dbg!(&respones);
/// Ok(())
/// }
/// ```
pub async fn tokenize(
&self,
task: &TaskTokenization<'_>,
Expand All @@ -231,6 +256,29 @@ impl Client {
.await
}

/// Detokenize a list of token ids into a string.
///
/// ```no_run
/// use aleph_alpha_client::{Client, Error, How, TaskDetokenization};
///
/// async fn detokenize() -> Result<(), Error> {
/// let client = Client::new(AA_API_TOKEN)?;
///
/// // Specify the name of the model whose tokenizer was used to generate the input token ids.
/// let model = "luminous-base";
///
/// // Token ids to convert into text.
/// let token_ids: Vec<u32> = vec![556, 48741, 247, 2983];
///
/// let task = TaskDetokenization {
/// token_ids: &token_ids,
/// };
/// let respones = client.detokenize(&task, model, &How::default()).await?;
///
/// dbg!(&respones);
/// Ok(())
/// }
/// ```
pub async fn detokenize(
&self,
task: &TaskDetokenization<'_>,
Expand Down

0 comments on commit cf33535

Please sign in to comment.