6 Commits

Author SHA1 Message Date
695605977f Functions and structs for working with openai api.
Input with stdout message
2024-07-31 19:37:46 +02:00
865cc6ddb9 Fix version requirement for regex 2024-07-21 19:47:15 +02:00
15de73dad4 Fix cfg for external crate 2024-07-21 19:43:58 +02:00
8cbb2757a5 Fixed version requirement 2024-07-17 13:02:09 +02:00
3389b2264e Removed trim fron inner parenthesized 2024-07-17 12:48:09 +02:00
5cd1c075a5 Read files macro for loading reading files to string at compile-time
Makefile for formatting and linting

Workspace for subcrates.

Moved crates to subdir and moved subcrate configs to workspace.*
2024-07-16 18:29:32 +02:00
31 changed files with 3188 additions and 168 deletions

View File

@ -15,4 +15,4 @@ jobs:
- name: Build - name: Build
run: cargo build --verbose run: cargo build --verbose
- name: Run tests - name: Run tests
run: cargo test --verbose --all-features run: cargo test --verbose --all-features --workspace

11
.idea/lib.iml generated
View File

@ -2,13 +2,20 @@
<module type="EMPTY_MODULE" version="4"> <module type="EMPTY_MODULE" version="4">
<component name="NewModuleRootManager"> <component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$"> <content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/derive/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" /> <sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/examples" isTestSource="false" /> <sourceFolder url="file://$MODULE_DIR$/examples" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/examples/multipart_file/src" isTestSource="false" /> <sourceFolder url="file://$MODULE_DIR$/examples/multipart_file/src" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/derive/target" /> <sourceFolder url="file://$MODULE_DIR$/crates/into_response_derive/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/crates/read_files/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/crates/read_files/tests" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/crates/read_files/tests" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/examples/openai-assistant/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/tests" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/target" /> <excludeFolder url="file://$MODULE_DIR$/target" />
<excludeFolder url="file://$MODULE_DIR$/examples/multipart_file/target" /> <excludeFolder url="file://$MODULE_DIR$/examples/multipart_file/target" />
<excludeFolder url="file://$MODULE_DIR$/crates/into_response_derive/target" />
<excludeFolder url="file://$MODULE_DIR$/crates/read_files/target" />
<excludeFolder url="file://$MODULE_DIR$/examples/openai-assistant/target" />
</content> </content>
<orderEntry type="inheritedJdk" /> <orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />

View File

@ -1,12 +1,12 @@
<component name="ProjectRunConfigurationManager"> <component name="ProjectRunConfigurationManager">
<configuration default="false" name="Test" type="CargoCommandRunConfiguration" factoryName="Cargo Command"> <configuration default="false" name="All Tests" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
<option name="command" value="test --package lib --lib tests --all-features" /> <option name="command" value="test --workspace" />
<option name="workingDirectory" value="file://$PROJECT_DIR$" /> <option name="workingDirectory" value="file://$PROJECT_DIR$" />
<envs /> <envs />
<option name="emulateTerminal" value="true" /> <option name="emulateTerminal" value="true" />
<option name="channel" value="DEFAULT" /> <option name="channel" value="DEFAULT" />
<option name="requiredFeatures" value="true" /> <option name="requiredFeatures" value="true" />
<option name="allFeatures" value="false" /> <option name="allFeatures" value="true" />
<option name="withSudo" value="false" /> <option name="withSudo" value="false" />
<option name="buildTarget" value="REMOTE" /> <option name="buildTarget" value="REMOTE" />
<option name="backtrace" value="SHORT" /> <option name="backtrace" value="SHORT" />

19
.idea/runConfigurations/Release.xml generated Normal file
View File

@ -0,0 +1,19 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Release" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
<option name="command" value="build --release --all-features" />
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
<envs />
<option name="emulateTerminal" value="true" />
<option name="channel" value="DEFAULT" />
<option name="requiredFeatures" value="true" />
<option name="allFeatures" value="true" />
<option name="withSudo" value="false" />
<option name="buildTarget" value="REMOTE" />
<option name="backtrace" value="SHORT" />
<option name="isRedirectInput" value="false" />
<option name="redirectInputPath" value="" />
<method v="2">
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
</method>
</configuration>
</component>

6
.idea/rust.xml generated Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RsVcsConfiguration">
<option name="rustFmt" value="true" />
</component>
</project>

1010
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,31 +1,54 @@
[package] [workspace]
name = "lib" members = ["crates/*"]
version = "1.3.5" exclude = ["examples"]
[workspace.package]
edition = "2021" edition = "2021"
rust-version = "1.80.0"
authors = ["Martin Berg Alstad"] authors = ["Martin Berg Alstad"]
homepage = "emberal.github.io" homepage = "emberal.github.io"
[package]
name = "lib"
version = "1.4.1-hotfix-hotfix-2"
description = "A library with utilities and helper fuctions."
edition = { workspace = true }
rust-version = { workspace = true }
authors = { workspace = true }
homepage = { workspace = true }
[lib] [lib]
[dependencies] [dependencies]
# Api # Api
axum = { version = "0.7.5", optional = true, features = ["multipart"] } axum = { version = "0.7", optional = true, features = ["multipart"] }
tower = { version = "0.4.13", optional = true } tower = { version = "0.4", optional = true }
tower-http = { version = "0.5.2", optional = true, features = ["trace", "cors", "normalize-path"] } tower-http = { version = "0.5", optional = true, features = ["trace", "cors", "normalize-path"] }
# Async # Async
tokio = { version = "1.38.0", optional = true, features = ["fs"] } tokio = { version = "1.38", optional = true, features = ["fs"] }
tokio-util = { version = "0.7.11", optional = true, features = ["io"] } tokio-util = { version = "0.7", optional = true, features = ["io"] }
async-stream = { version = "0.3", optional = true }
futures = { version = "0.3", optional = true }
# Error handling # Error handling
thiserror = { version = "1.0.61", optional = true } thiserror = { version = "1.0", optional = true }
# LLM
async-openai = { version = "0.23", optional = true }
# Logging # Logging
tracing = { version = "0.1.40", optional = true } tracing = { version = "0.1", optional = true }
tracing-subscriber = { version = "0.3.18", optional = true } tracing-subscriber = { version = "0.3", optional = true }
# Parsing # Parsing
nom = { version = "7.1.3", optional = true } nom = { version = "7.1", optional = true }
# Procedural macros
into-response-derive = { path = "crates/into_response_derive", optional = true }
read-files = { path = "crates/read_files", optional = true }
# Serialization / Deserialization # Serialization / Deserialization
serde = { version = "1.0.203", optional = true, features = ["derive"] } serde = { version = "1.0", optional = true, features = ["derive"] }
# Derive macros # Utils
derive = { path = "derive", optional = true } cfg-if = "1.0.0"
[workspace.dependencies]
syn = "2.0"
quote = "1.0"
[features] [features]
axum = ["dep:axum", "dep:tower", "dep:tower-http", "dep:thiserror", "dep:tracing", "dep:tracing-subscriber", "dep:tokio"] axum = ["dep:axum", "dep:tower", "dep:tower-http", "dep:thiserror", "dep:tracing", "dep:tracing-subscriber", "dep:tokio"]
@ -33,4 +56,6 @@ io = ["dep:tokio", "dep:tokio-util"]
iter = [] iter = []
nom = ["dep:nom"] nom = ["dep:nom"]
serde = ["dep:serde"] serde = ["dep:serde"]
derive = ["dep:derive", "axum", "serde"] derive = ["dep:into-response-derive", "axum", "serde"]
read-files = ["dep:read-files"]
openai = ["dep:async-openai", "dep:async-stream", "dep:futures"]

3
Makefile Normal file
View File

@ -0,0 +1,3 @@
fmt:
cargo clippy --all-targets --all-features
cargo fmt

3
README.md Normal file
View File

@ -0,0 +1,3 @@
# Lib
-_-

View File

@ -0,0 +1,12 @@
[package]
name = "into-response-derive"
version = "1.1.0"
edition = { workspace = true }
rust-version = { workspace = true }
[lib]
proc-macro = true
[dependencies]
syn = { workspace = true }
quote = { workspace = true }

View File

@ -1,16 +1,9 @@
extern crate proc_macro;
use proc_macro::TokenStream; use proc_macro::TokenStream;
use quote::quote; use quote::quote;
use syn::{parse_macro_input, DeriveInput}; use syn::DeriveInput;
#[proc_macro_derive(IntoResponse)] pub fn into_response_derive_impl(input: DeriveInput) -> TokenStream {
pub fn into_response_derive(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
into_response_derive_impl(input)
}
fn into_response_derive_impl(input: DeriveInput) -> TokenStream {
let name = &input.ident; let name = &input.ident;
let expanded = quote! { let expanded = quote! {

View File

@ -0,0 +1,13 @@
extern crate proc_macro;
use {
proc_macro::TokenStream,
syn::{parse_macro_input, DeriveInput},
};
mod derive;
#[proc_macro_derive(IntoResponse)]
pub fn into_response_derive(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
derive::into_response_derive_impl(input)
}

View File

@ -0,0 +1,13 @@
[package]
name = "read-files"
version = "0.1.0"
edition = { workspace = true }
rust-version = { workspace = true }
[lib]
proc-macro = true
[dependencies]
syn = { workspace = true }
quote = { workspace = true }
regex = "1.10"

View File

@ -0,0 +1,34 @@
extern crate proc_macro;
use proc_macro::TokenStream;
use syn::parse_macro_input;
use crate::read_files::read_files_to_string_impl;
mod read_files;
/// Read files from a directory into a HashMap.
/// The key is the file path relative to the root directory.
/// The value is the file contents as a string.
/// # Arguments
/// * `path` - The directory to search for files, relative to the root directory.
/// * `pattern` - The regex pattern to match files against. If missing, all files are matched.
/// # Returns
/// A HashMap containing the file paths and contents.
/// # Example
/// ```
/// use read_files::read_files_to_string;
///
/// let files = read_files_to_string!("./src", ".rs$");
/// assert!(!files.is_empty());
/// ```
/// # Panics
/// If the path is empty. \
/// If the pattern is invalid. \
/// If the path does not exist. \
/// If there are unexpected tokens. \
#[proc_macro]
pub fn read_files_to_string(input: TokenStream) -> TokenStream {
let args = parse_macro_input!(input as read_files::Args);
read_files_to_string_impl(args)
}

View File

@ -0,0 +1,124 @@
extern crate proc_macro;
use proc_macro::TokenStream;
use std::{
collections::HashMap,
fs::{metadata, read_dir, read_to_string},
io,
path::{Path, PathBuf},
};
use quote::quote;
use syn::{
parse::{Parse, ParseStream},
LitStr, Token,
};
pub fn read_files_to_string_impl(args: Args) -> TokenStream {
let (keys, values) = split_hashmap(args);
let expanded = quote! {
{
let keys = vec![#( #keys, )*];
let values = vec![#( #values, )*];
keys.into_iter()
.zip(values.into_iter())
.collect::<std::collections::HashMap<&'static str, &'static str>>()
}
};
expanded.into()
}
pub struct Args {
pub path: String,
pub pattern: String,
}
struct Syntax {
path: LitStr,
/* Comma */
pattern: Option<LitStr>,
}
impl From<Syntax> for Args {
fn from(syntax: Syntax) -> Self {
Self {
path: syntax.path.value(),
pattern: syntax
.pattern
.map(|pattern| pattern.value())
.unwrap_or_default(),
}
}
}
impl Parse for Args {
fn parse(stream: ParseStream) -> syn::Result<Self> {
if stream.is_empty() {
panic!("Expected path argument");
}
let path: LitStr = stream.parse()?;
if path.value().is_empty() {
panic!("Path must not be empty");
}
let pattern = if stream.peek(Token![,]) {
stream.parse::<Token![,]>()?;
Some(stream.parse()?)
} else {
None
};
let syntax = Syntax { path, pattern };
if !stream.is_empty() {
panic!("Expected end of input");
}
Ok(syntax.into())
}
}
pub fn split_hashmap(args: Args) -> (Vec<String>, Vec<String>) {
read_files_to_string(Path::new(&args.path), &args.pattern)
.unwrap()
.into_iter()
.map(|(key, value)| (key.to_string_lossy().to_string(), value))
.collect()
}
/// Find files within a directory and load them into a HashMap.
/// The key is the file path relative to the root directory.
/// The value is the file contents as a string.
/// # Arguments
/// * `path` - The directory to search for files.
/// * `extension` - The pattern to match files against.
/// # Returns
/// A HashMap containing the file paths and contents.
pub fn read_files_to_string(
path: &Path,
pattern: &str,
) -> Result<HashMap<PathBuf, String>, io::Error> {
use regex::Regex;
let mut files: HashMap<PathBuf, String> = HashMap::new();
let dir = read_dir(path)?;
for entry in dir {
let entry = entry?;
let path = entry.path();
let file_name = entry.file_name();
let file_name = file_name.to_string_lossy();
let metadata = metadata(&path)?;
let regex =
Regex::new(pattern).map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?;
if metadata.is_file() && regex.is_match(file_name.as_ref()) {
let file = read_to_string(&path)?;
files.insert(path, file);
} else if metadata.is_dir() {
files.extend(read_files_to_string(&path, pattern)?);
}
}
Ok(files)
}

View File

@ -0,0 +1,13 @@
use read_files::read_files_to_string;
#[test]
fn test_load_files() {
let files = read_files_to_string!("./src", ".rs$");
assert!(!files.is_empty());
}
#[test]
fn test_load_all_files() {
let files = read_files_to_string!("./src");
assert!(!files.is_empty());
}

46
derive/Cargo.lock generated
View File

@ -1,46 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "derive"
version = "1.0.0"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.85"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
dependencies = [
"proc-macro2",
]
[[package]]
name = "syn"
version = "2.0.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff8655ed1d86f3af4ee3fd3263786bc14245ad17c4c7e85ba7187fb3ae028c90"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"

View File

@ -1,12 +0,0 @@
[package]
name = "derive"
version = "1.0.0"
edition = "2021"
authors = ["Martin Berg Alstad"]
[lib]
proc-macro = true
[dependencies]
syn = "2.0.66"
quote = "1.0.36"

View File

@ -286,9 +286,10 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]] [[package]]
name = "lib" name = "lib"
version = "1.3.5" version = "1.4.1-hotfix-hotfix-2"
dependencies = [ dependencies = [
"axum", "axum",
"cfg-if",
"thiserror", "thiserror",
"tokio", "tokio",
"tower", "tower",

1554
examples/openai-assistant/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,10 @@
[package]
name = "openai-assistant"
version = "0.1.0"
edition = "2021"
[dependencies]
lib = { path = "../..", features = ["openai", "io"] }
tokio = { version = "1.38.0", features = ["rt-multi-thread"] }
futures = "0.3.0"
async-openai = "0.23.0"

View File

@ -0,0 +1,32 @@
use futures::StreamExt;
use lib::{
openai::{assistants::Assistant, streams::TokenStream},
prompt_read_line,
};
/// Expects the OPENAI_API_KEY environment variable to be set
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let assistant = Assistant::new("gpt-4o-mini", "Be a helpful assistant").await?;
let thread = assistant.create_thread().await?;
while let Some(input) = get_user_input() {
let mut stream: TokenStream = thread.run_stream(&input).await?.into();
while let Some(result) = stream.next().await {
if let Ok(text) = result {
print!("{}", text);
}
}
println!();
}
assistant.delete().await?;
Ok(())
}
fn get_user_input() -> Option<String> {
prompt_read_line!("> ")
.ok()
.take_if(|input| !input.is_empty())
}

31
src/io/console.rs Normal file
View File

@ -0,0 +1,31 @@
#[macro_export]
macro_rules! _read_line {
() => {
match std::io::Write::flush(&mut std::io::stdout()) {
Ok(_) => {
let mut input = String::new();
match std::io::Stdin::read_line(&mut std::io::stdin(), &mut input) {
Ok(_) => Ok::<String, std::io::Error>(input),
Err(error) => Err(error),
}
}
Err(error) => Err(error),
}
};
}
#[macro_export]
macro_rules! prompt_read_line {
($($expr:expr),*) => {{
print!($($expr),*);
$crate::_read_line!()
}};
}
#[macro_export]
macro_rules! promptln_read_line {
($($expr:expr),*) => {{
println!($($expr),*);
$crate::_read_line!()
}};
}

View File

@ -1 +1,2 @@
pub mod console;
pub mod file; pub mod file;

View File

@ -1,16 +1,20 @@
#![allow(dead_code)] #![allow(dead_code)]
#[cfg(all(feature = "derive", feature = "serde"))]
pub extern crate into_response_derive;
#[cfg(feature = "read-files")]
pub extern crate read_files;
#[cfg(feature = "axum")] #[cfg(feature = "axum")]
pub mod axum; pub mod axum;
#[cfg(feature = "io")] #[cfg(feature = "io")]
pub mod io; pub mod io;
#[cfg(feature = "nom")] #[cfg(feature = "nom")]
pub mod nom; pub mod nom;
#[cfg(feature = "openai")]
pub mod openai;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
pub mod serde; pub mod serde;
pub mod traits; pub mod traits;
#[cfg(feature = "iter")] #[cfg(feature = "iter")]
pub mod vector; pub mod vector;
#[cfg(all(feature = "derive", feature = "serde"))]
pub extern crate derive;

View File

@ -31,8 +31,7 @@ pub fn parenthesized<'a, Parser, R>(inner: Parser) -> impl FnMut(&'a str) -> IRe
where where
Parser: FnMut(&'a str) -> IResult<&'a str, R>, Parser: FnMut(&'a str) -> IResult<&'a str, R>,
{ {
// TODO move trim out of here delimited(char('('), inner, char(')'))
delimited(char('('), trim(inner), char(')'))
} }
/// Take where the predicate is true and the length is exactly `n` /// Take where the predicate is true and the length is exactly `n`
@ -57,9 +56,10 @@ where
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use nom::{bytes::complete::take_while, sequence::tuple}; use nom::{bytes::complete::take_while, sequence::tuple};
use super::*;
#[test] #[test]
fn test_trim_both_sides() { fn test_trim_both_sides() {
let input = " test "; let input = " test ";

125
src/openai/assistants.rs Normal file
View File

@ -0,0 +1,125 @@
use async_openai::{
types::{
AssistantEventStream, AssistantObject, CreateAssistantRequest, CreateMessageRequest,
CreateRunRequest, CreateThreadRequest, DeleteAssistantResponse, DeleteThreadResponse,
MessageObject, MessageRole, ThreadObject,
},
Client,
};
use crate::openai::types::{OpenAIClient, OpenAIResult};
#[derive(Clone, Debug)]
pub struct Assistant {
client: OpenAIClient,
assistant_object: AssistantObject,
}
#[derive(Clone, Debug)]
pub struct Thread<'client> {
client: &'client OpenAIClient,
assistant_id: String,
thread_object: ThreadObject,
}
impl Assistant {
pub async fn new(
client: &OpenAIClient,
model: impl Into<String>,
instructions: impl Into<String>,
) -> OpenAIResult<Self> {
let assistant_object = client
.assistants()
.create(CreateAssistantRequest {
model: model.into(),
instructions: Some(instructions.into()),
..Default::default()
})
.await?;
Ok(Self {
client: client.clone(),
assistant_object,
})
}
pub async fn from_id(id: impl AsRef<str>) -> OpenAIResult<Self> {
let client = Client::new();
let assistant_object = client.assistants().retrieve(id.as_ref()).await?;
Ok(Self {
client,
assistant_object,
})
}
pub async fn create_thread(&self) -> OpenAIResult<Thread> {
Thread::new(&self.client, self.id()).await
}
pub async fn delete(self) -> OpenAIResult<DeleteAssistantResponse> {
self.client.assistants().delete(self.id()).await
}
pub fn id(&self) -> &str {
&self.assistant_object.id
}
}
impl<'client> Thread<'client> {
pub async fn new(
client: &'client OpenAIClient,
assistant_id: impl Into<String>,
) -> OpenAIResult<Self> {
Ok(Self {
client,
assistant_id: assistant_id.into(),
thread_object: client
.threads()
.create(CreateThreadRequest::default())
.await?,
})
}
pub async fn from_id(
client: &'client OpenAIClient,
assistant_id: impl Into<String>,
thread_id: impl AsRef<str>,
) -> OpenAIResult<Self> {
Ok(Self {
client,
assistant_id: assistant_id.into(),
thread_object: client.threads().retrieve(thread_id.as_ref()).await?,
})
}
pub async fn run_stream(&self, prompt: impl AsRef<str>) -> OpenAIResult<AssistantEventStream> {
self.create_message(prompt.as_ref()).await?;
self.client
.threads()
.runs(self.id())
.create_stream(CreateRunRequest {
assistant_id: self.assistant_id.clone(),
..Default::default()
})
.await
}
pub fn id(&self) -> &str {
&self.thread_object.id
}
async fn create_message(&self, prompt: &str) -> OpenAIResult<MessageObject> {
self.client
.threads()
.messages(&self.thread_object.id)
.create(CreateMessageRequest {
role: MessageRole::User,
content: prompt.into(),
..Default::default()
})
.await
}
async fn delete(&self) -> OpenAIResult<DeleteThreadResponse> {
self.client.threads().delete(self.id()).await
}
}

50
src/openai/chat.rs Normal file
View File

@ -0,0 +1,50 @@
use async_openai::types::{
ChatCompletionRequestMessage, ChatCompletionRequestUserMessage,
ChatCompletionRequestUserMessageContent, ChatCompletionResponseStream,
CreateChatCompletionRequest,
};
use crate::openai::types::{OpenAIClient, OpenAIResult};
pub async fn chat(
client: &OpenAIClient,
model: impl Into<String>,
prompt: impl Into<String>,
) -> OpenAIResult<String> {
Ok(client
.chat()
.create(CreateChatCompletionRequest {
model: model.into(),
messages: vec![create_user_message(prompt)],
..Default::default()
})
.await?
.choices[0]
.message
.content
.clone()
.unwrap_or_default())
}
pub async fn chat_stream(
client: &OpenAIClient,
model: impl Into<String>,
prompt: impl Into<String>,
) -> OpenAIResult<ChatCompletionResponseStream> {
client
.chat()
.create_stream(CreateChatCompletionRequest {
model: model.into(),
stream: Some(true),
messages: vec![create_user_message(prompt)],
..Default::default()
})
.await
}
fn create_user_message(prompt: impl Into<String>) -> ChatCompletionRequestMessage {
ChatCompletionRequestMessage::User(ChatCompletionRequestUserMessage {
content: ChatCompletionRequestUserMessageContent::from(prompt.into()),
name: None,
})
}

4
src/openai/mod.rs Normal file
View File

@ -0,0 +1,4 @@
pub mod assistants;
pub mod chat;
pub mod streams;
pub mod types;

129
src/openai/streams.rs Normal file
View File

@ -0,0 +1,129 @@
use std::{
pin::Pin,
task::{Context, Poll},
};
use async_openai::types::ChatCompletionResponseStream;
use async_openai::{
error::OpenAIError,
types::{AssistantEventStream, AssistantStreamEvent, MessageDeltaContent, MessageDeltaObject},
};
use async_stream::try_stream;
use futures::{Stream, StreamExt};
use crate::openai::types::OpenAIResult;
pub struct TokenStream(Pin<Box<dyn Stream<Item = OpenAIResult<String>> + Send + 'static>>);
impl TokenStream {
pub fn new(stream: impl Stream<Item = OpenAIResult<String>> + Send + 'static) -> Self {
Self(Box::pin(stream))
}
}
impl Stream for TokenStream {
type Item = OpenAIResult<String>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self.0.as_mut().poll_next(cx)
}
}
impl From<AssistantEventStream> for TokenStream {
fn from(mut value: AssistantEventStream) -> Self {
Self::new(try_stream! {
while let Some(event) = value.next().await {
if let Ok(AssistantStreamEvent::ThreadMessageDelta(message)) = event {
if let Ok(text) = get_message(message) {
yield text;
};
}
}
})
}
}
impl From<ChatCompletionResponseStream> for TokenStream {
fn from(mut value: ChatCompletionResponseStream) -> Self {
Self::new(try_stream! {
while let Some(event) = value.next().await {
if let Ok(event) = event {
if let Some(text) = event.choices[0].delta.content.clone() {
yield text;
};
}
}
})
}
}
cfg_if::cfg_if! {
if #[cfg(feature = "axum")] {
use axum::response::sse::Event;
pub struct EventStream<E>(Pin<Box<dyn Stream<Item = Result<Event, E>> + Send + 'static>>);
impl<E> EventStream<E> {
pub fn new(stream: impl Stream<Item = Result<Event, E>> + Send + 'static) -> Self {
Self(Box::pin(stream))
}
}
impl<E> Stream for EventStream<E> {
type Item = Result<Event, E>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self.0.as_mut().poll_next(cx)
}
}
impl<E> From<AssistantEventStream> for EventStream<E>
where
E: Send + 'static,
{
fn from(mut value: AssistantEventStream) -> Self {
Self::new(try_stream! {
while let Some(event) = value.next().await {
if let Ok(AssistantStreamEvent::ThreadMessageDelta(message)) = event {
if let Ok(text) = get_message(message) {
yield Event::default().data(text);
};
}
}
})
}
}
impl<E> From<ChatCompletionResponseStream> for EventStream<E>
where
E: Send + 'static,
{
fn from(mut value: ChatCompletionResponseStream) -> Self {
Self::new(try_stream! {
while let Some(event) = value.next().await {
if let Ok(event) = event {
if let Some(text) = event.choices[0].delta.content.clone() {
yield Event::default().data(text);
};
}
}
})
}
}
}
}
fn get_message(message: MessageDeltaObject) -> OpenAIResult<String> {
let content = message
.delta
.content
.and_then(|content| content.first().cloned())
.ok_or(OpenAIError::StreamError("Expected content".into()))?;
if let MessageDeltaContent::Text(content) = content {
content
.text
.and_then(|text| text.value)
.ok_or(OpenAIError::StreamError("Expected text message".into()))
} else {
Err(OpenAIError::StreamError("Expected text message".into()))
}
}

4
src/openai/types.rs Normal file
View File

@ -0,0 +1,4 @@
use async_openai::{config::OpenAIConfig, error::OpenAIError, Client};
pub type OpenAIClient = Client<OpenAIConfig>;
pub type OpenAIResult<T> = Result<T, OpenAIError>;