diff --git a/Cargo.lock b/Cargo.lock index 4795892..8aad659 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1977,6 +1977,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "98ff100c3f9f45f134fe9e14ac88691004d54eb91391a40c528cfd613e7ab374" dependencies = [ "anyhow", + "async-openai", "base64 0.21.0", "bstr", "fancy-regex", diff --git a/Cargo.toml b/Cargo.toml index aa8b001..bada6f5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -35,7 +35,7 @@ simple_logger = "4.1.0" strum = "0.24.1" strum_macros = "0.24.3" tera = { version = "1.18.1", default-features = false } -tiktoken-rs = "0.4.0" +tiktoken-rs = { version = "0.4.0", features = ["async-openai"] } tokio = { version = "1.27.0", features = ["full"] } toml = "0.7.3" toml_edit = "0.19.8" diff --git a/src/llms/openai.rs b/src/llms/openai.rs index 91047d4..1df8971 100644 --- a/src/llms/openai.rs +++ b/src/llms/openai.rs @@ -3,7 +3,7 @@ use anyhow::{anyhow, bail, Ok, Result}; use async_trait::async_trait; use reqwest::{tls, Proxy}; -use tiktoken_rs::{get_chat_completion_max_tokens, get_completion_max_tokens}; +use tiktoken_rs::{async_openai::get_chat_completion_max_tokens, get_completion_max_tokens}; use crate::settings::OpenAISettings; use async_openai::{