Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
cd558eebfa | |||
347314460a | |||
ee487540ac |
2
.idea/lib.iml
generated
2
.idea/lib.iml
generated
@ -9,13 +9,11 @@
|
||||
<sourceFolder url="file://$MODULE_DIR$/crates/read_files/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/crates/read_files/tests" isTestSource="true" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/crates/read_files/tests" isTestSource="true" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/examples/openai-assistant/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/tests" isTestSource="true" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/target" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/examples/multipart_file/target" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/crates/into_response_derive/target" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/crates/read_files/target" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/examples/openai-assistant/target" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
|
838
Cargo.lock
generated
838
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -10,7 +10,7 @@ homepage = "emberal.github.io"
|
||||
|
||||
[package]
|
||||
name = "lib"
|
||||
version = "1.4.1-hotfix-hotfix-2"
|
||||
version = "1.4.3"
|
||||
description = "A library with utilities and helper fuctions."
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
@ -27,12 +27,8 @@ tower-http = { version = "0.5", optional = true, features = ["trace", "cors", "n
|
||||
# Async
|
||||
tokio = { version = "1.38", optional = true, features = ["fs"] }
|
||||
tokio-util = { version = "0.7", optional = true, features = ["io"] }
|
||||
async-stream = { version = "0.3", optional = true }
|
||||
futures = { version = "0.3", optional = true }
|
||||
# Error handling
|
||||
thiserror = { version = "1.0", optional = true }
|
||||
# LLM
|
||||
async-openai = { version = "0.23", optional = true }
|
||||
# Logging
|
||||
tracing = { version = "0.1", optional = true }
|
||||
tracing-subscriber = { version = "0.3", optional = true }
|
||||
@ -43,8 +39,6 @@ into-response-derive = { path = "crates/into_response_derive", optional = true }
|
||||
read-files = { path = "crates/read_files", optional = true }
|
||||
# Serialization / Deserialization
|
||||
serde = { version = "1.0", optional = true, features = ["derive"] }
|
||||
# Utils
|
||||
cfg-if = "1.0.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
syn = "2.0"
|
||||
@ -58,4 +52,3 @@ nom = ["dep:nom"]
|
||||
serde = ["dep:serde"]
|
||||
derive = ["dep:into-response-derive", "axum", "serde"]
|
||||
read-files = ["dep:read-files"]
|
||||
openai = ["dep:async-openai", "dep:async-stream", "dep:futures"]
|
||||
|
@ -7,8 +7,8 @@ pub fn into_response_derive_impl(input: DeriveInput) -> TokenStream {
|
||||
let name = &input.ident;
|
||||
|
||||
let expanded = quote! {
|
||||
impl IntoResponse for #name {
|
||||
fn into_response(self) -> Response {
|
||||
impl axum::response::IntoResponse for #name {
|
||||
fn into_response(self) -> axum::response::Response {
|
||||
let version = env!("CARGO_PKG_VERSION");
|
||||
lib::serde::response::BaseResponse::new(version, self)
|
||||
.into_response()
|
||||
|
3
examples/multipart_file/Cargo.lock
generated
3
examples/multipart_file/Cargo.lock
generated
@ -286,10 +286,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||
|
||||
[[package]]
|
||||
name = "lib"
|
||||
version = "1.4.1-hotfix-hotfix-2"
|
||||
version = "1.4.3"
|
||||
dependencies = [
|
||||
"axum",
|
||||
"cfg-if",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tower",
|
||||
|
1554
examples/openai-assistant/Cargo.lock
generated
1554
examples/openai-assistant/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,10 +0,0 @@
|
||||
[package]
|
||||
name = "openai-assistant"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
lib = { path = "../..", features = ["openai", "io"] }
|
||||
tokio = { version = "1.38.0", features = ["rt-multi-thread"] }
|
||||
futures = "0.3.0"
|
||||
async-openai = "0.23.0"
|
@ -1,32 +0,0 @@
|
||||
use futures::StreamExt;
|
||||
|
||||
use lib::{
|
||||
openai::{assistants::Assistant, streams::TokenStream},
|
||||
prompt_read_line,
|
||||
};
|
||||
|
||||
/// Expects the OPENAI_API_KEY environment variable to be set
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let assistant = Assistant::new("gpt-4o-mini", "Be a helpful assistant").await?;
|
||||
let thread = assistant.create_thread().await?;
|
||||
|
||||
while let Some(input) = get_user_input() {
|
||||
let mut stream: TokenStream = thread.run_stream(&input).await?.into();
|
||||
while let Some(result) = stream.next().await {
|
||||
if let Ok(text) = result {
|
||||
print!("{}", text);
|
||||
}
|
||||
}
|
||||
println!();
|
||||
}
|
||||
assistant.delete().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_user_input() -> Option<String> {
|
||||
prompt_read_line!("> ")
|
||||
.ok()
|
||||
.take_if(|input| !input.is_empty())
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
#[macro_export]
|
||||
macro_rules! _read_line {
|
||||
() => {
|
||||
match std::io::Write::flush(&mut std::io::stdout()) {
|
||||
Ok(_) => {
|
||||
let mut input = String::new();
|
||||
match std::io::Stdin::read_line(&mut std::io::stdin(), &mut input) {
|
||||
Ok(_) => Ok::<String, std::io::Error>(input),
|
||||
Err(error) => Err(error),
|
||||
}
|
||||
}
|
||||
Err(error) => Err(error),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! prompt_read_line {
|
||||
($($expr:expr),*) => {{
|
||||
print!($($expr),*);
|
||||
$crate::_read_line!()
|
||||
}};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! promptln_read_line {
|
||||
($($expr:expr),*) => {{
|
||||
println!($($expr),*);
|
||||
$crate::_read_line!()
|
||||
}};
|
||||
}
|
@ -1,2 +1 @@
|
||||
pub mod console;
|
||||
pub mod file;
|
||||
|
@ -11,8 +11,6 @@ pub mod axum;
|
||||
pub mod io;
|
||||
#[cfg(feature = "nom")]
|
||||
pub mod nom;
|
||||
#[cfg(feature = "openai")]
|
||||
pub mod openai;
|
||||
#[cfg(feature = "serde")]
|
||||
pub mod serde;
|
||||
pub mod traits;
|
||||
|
@ -1,125 +0,0 @@
|
||||
use async_openai::{
|
||||
types::{
|
||||
AssistantEventStream, AssistantObject, CreateAssistantRequest, CreateMessageRequest,
|
||||
CreateRunRequest, CreateThreadRequest, DeleteAssistantResponse, DeleteThreadResponse,
|
||||
MessageObject, MessageRole, ThreadObject,
|
||||
},
|
||||
Client,
|
||||
};
|
||||
|
||||
use crate::openai::types::{OpenAIClient, OpenAIResult};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Assistant {
|
||||
client: OpenAIClient,
|
||||
assistant_object: AssistantObject,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Thread<'client> {
|
||||
client: &'client OpenAIClient,
|
||||
assistant_id: String,
|
||||
thread_object: ThreadObject,
|
||||
}
|
||||
|
||||
impl Assistant {
|
||||
pub async fn new(
|
||||
client: &OpenAIClient,
|
||||
model: impl Into<String>,
|
||||
instructions: impl Into<String>,
|
||||
) -> OpenAIResult<Self> {
|
||||
let assistant_object = client
|
||||
.assistants()
|
||||
.create(CreateAssistantRequest {
|
||||
model: model.into(),
|
||||
instructions: Some(instructions.into()),
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
Ok(Self {
|
||||
client: client.clone(),
|
||||
assistant_object,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn from_id(id: impl AsRef<str>) -> OpenAIResult<Self> {
|
||||
let client = Client::new();
|
||||
let assistant_object = client.assistants().retrieve(id.as_ref()).await?;
|
||||
Ok(Self {
|
||||
client,
|
||||
assistant_object,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn create_thread(&self) -> OpenAIResult<Thread> {
|
||||
Thread::new(&self.client, self.id()).await
|
||||
}
|
||||
|
||||
pub async fn delete(self) -> OpenAIResult<DeleteAssistantResponse> {
|
||||
self.client.assistants().delete(self.id()).await
|
||||
}
|
||||
|
||||
pub fn id(&self) -> &str {
|
||||
&self.assistant_object.id
|
||||
}
|
||||
}
|
||||
|
||||
impl<'client> Thread<'client> {
|
||||
pub async fn new(
|
||||
client: &'client OpenAIClient,
|
||||
assistant_id: impl Into<String>,
|
||||
) -> OpenAIResult<Self> {
|
||||
Ok(Self {
|
||||
client,
|
||||
assistant_id: assistant_id.into(),
|
||||
thread_object: client
|
||||
.threads()
|
||||
.create(CreateThreadRequest::default())
|
||||
.await?,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn from_id(
|
||||
client: &'client OpenAIClient,
|
||||
assistant_id: impl Into<String>,
|
||||
thread_id: impl AsRef<str>,
|
||||
) -> OpenAIResult<Self> {
|
||||
Ok(Self {
|
||||
client,
|
||||
assistant_id: assistant_id.into(),
|
||||
thread_object: client.threads().retrieve(thread_id.as_ref()).await?,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn run_stream(&self, prompt: impl AsRef<str>) -> OpenAIResult<AssistantEventStream> {
|
||||
self.create_message(prompt.as_ref()).await?;
|
||||
self.client
|
||||
.threads()
|
||||
.runs(self.id())
|
||||
.create_stream(CreateRunRequest {
|
||||
assistant_id: self.assistant_id.clone(),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn id(&self) -> &str {
|
||||
&self.thread_object.id
|
||||
}
|
||||
|
||||
async fn create_message(&self, prompt: &str) -> OpenAIResult<MessageObject> {
|
||||
self.client
|
||||
.threads()
|
||||
.messages(&self.thread_object.id)
|
||||
.create(CreateMessageRequest {
|
||||
role: MessageRole::User,
|
||||
content: prompt.into(),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn delete(&self) -> OpenAIResult<DeleteThreadResponse> {
|
||||
self.client.threads().delete(self.id()).await
|
||||
}
|
||||
}
|
@ -1,50 +0,0 @@
|
||||
use async_openai::types::{
|
||||
ChatCompletionRequestMessage, ChatCompletionRequestUserMessage,
|
||||
ChatCompletionRequestUserMessageContent, ChatCompletionResponseStream,
|
||||
CreateChatCompletionRequest,
|
||||
};
|
||||
|
||||
use crate::openai::types::{OpenAIClient, OpenAIResult};
|
||||
|
||||
pub async fn chat(
|
||||
client: &OpenAIClient,
|
||||
model: impl Into<String>,
|
||||
prompt: impl Into<String>,
|
||||
) -> OpenAIResult<String> {
|
||||
Ok(client
|
||||
.chat()
|
||||
.create(CreateChatCompletionRequest {
|
||||
model: model.into(),
|
||||
messages: vec![create_user_message(prompt)],
|
||||
..Default::default()
|
||||
})
|
||||
.await?
|
||||
.choices[0]
|
||||
.message
|
||||
.content
|
||||
.clone()
|
||||
.unwrap_or_default())
|
||||
}
|
||||
|
||||
pub async fn chat_stream(
|
||||
client: &OpenAIClient,
|
||||
model: impl Into<String>,
|
||||
prompt: impl Into<String>,
|
||||
) -> OpenAIResult<ChatCompletionResponseStream> {
|
||||
client
|
||||
.chat()
|
||||
.create_stream(CreateChatCompletionRequest {
|
||||
model: model.into(),
|
||||
stream: Some(true),
|
||||
messages: vec![create_user_message(prompt)],
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
fn create_user_message(prompt: impl Into<String>) -> ChatCompletionRequestMessage {
|
||||
ChatCompletionRequestMessage::User(ChatCompletionRequestUserMessage {
|
||||
content: ChatCompletionRequestUserMessageContent::from(prompt.into()),
|
||||
name: None,
|
||||
})
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
pub mod assistants;
|
||||
pub mod chat;
|
||||
pub mod streams;
|
||||
pub mod types;
|
@ -1,129 +0,0 @@
|
||||
use std::{
|
||||
pin::Pin,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
use async_openai::types::ChatCompletionResponseStream;
|
||||
use async_openai::{
|
||||
error::OpenAIError,
|
||||
types::{AssistantEventStream, AssistantStreamEvent, MessageDeltaContent, MessageDeltaObject},
|
||||
};
|
||||
use async_stream::try_stream;
|
||||
use futures::{Stream, StreamExt};
|
||||
|
||||
use crate::openai::types::OpenAIResult;
|
||||
|
||||
pub struct TokenStream(Pin<Box<dyn Stream<Item = OpenAIResult<String>> + Send + 'static>>);
|
||||
|
||||
impl TokenStream {
|
||||
pub fn new(stream: impl Stream<Item = OpenAIResult<String>> + Send + 'static) -> Self {
|
||||
Self(Box::pin(stream))
|
||||
}
|
||||
}
|
||||
|
||||
impl Stream for TokenStream {
|
||||
type Item = OpenAIResult<String>;
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
self.0.as_mut().poll_next(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AssistantEventStream> for TokenStream {
|
||||
fn from(mut value: AssistantEventStream) -> Self {
|
||||
Self::new(try_stream! {
|
||||
while let Some(event) = value.next().await {
|
||||
if let Ok(AssistantStreamEvent::ThreadMessageDelta(message)) = event {
|
||||
if let Ok(text) = get_message(message) {
|
||||
yield text;
|
||||
};
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ChatCompletionResponseStream> for TokenStream {
|
||||
fn from(mut value: ChatCompletionResponseStream) -> Self {
|
||||
Self::new(try_stream! {
|
||||
while let Some(event) = value.next().await {
|
||||
if let Ok(event) = event {
|
||||
if let Some(text) = event.choices[0].delta.content.clone() {
|
||||
yield text;
|
||||
};
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(feature = "axum")] {
|
||||
use axum::response::sse::Event;
|
||||
|
||||
pub struct EventStream<E>(Pin<Box<dyn Stream<Item = Result<Event, E>> + Send + 'static>>);
|
||||
|
||||
impl<E> EventStream<E> {
|
||||
pub fn new(stream: impl Stream<Item = Result<Event, E>> + Send + 'static) -> Self {
|
||||
Self(Box::pin(stream))
|
||||
}
|
||||
}
|
||||
|
||||
impl<E> Stream for EventStream<E> {
|
||||
type Item = Result<Event, E>;
|
||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||
self.0.as_mut().poll_next(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E> From<AssistantEventStream> for EventStream<E>
|
||||
where
|
||||
E: Send + 'static,
|
||||
{
|
||||
fn from(mut value: AssistantEventStream) -> Self {
|
||||
Self::new(try_stream! {
|
||||
while let Some(event) = value.next().await {
|
||||
if let Ok(AssistantStreamEvent::ThreadMessageDelta(message)) = event {
|
||||
if let Ok(text) = get_message(message) {
|
||||
yield Event::default().data(text);
|
||||
};
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<E> From<ChatCompletionResponseStream> for EventStream<E>
|
||||
where
|
||||
E: Send + 'static,
|
||||
{
|
||||
fn from(mut value: ChatCompletionResponseStream) -> Self {
|
||||
Self::new(try_stream! {
|
||||
while let Some(event) = value.next().await {
|
||||
if let Ok(event) = event {
|
||||
if let Some(text) = event.choices[0].delta.content.clone() {
|
||||
yield Event::default().data(text);
|
||||
};
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_message(message: MessageDeltaObject) -> OpenAIResult<String> {
|
||||
let content = message
|
||||
.delta
|
||||
.content
|
||||
.and_then(|content| content.first().cloned())
|
||||
.ok_or(OpenAIError::StreamError("Expected content".into()))?;
|
||||
|
||||
if let MessageDeltaContent::Text(content) = content {
|
||||
content
|
||||
.text
|
||||
.and_then(|text| text.value)
|
||||
.ok_or(OpenAIError::StreamError("Expected text message".into()))
|
||||
} else {
|
||||
Err(OpenAIError::StreamError("Expected text message".into()))
|
||||
}
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
use async_openai::{config::OpenAIConfig, error::OpenAIError, Client};
|
||||
|
||||
pub type OpenAIClient = Client<OpenAIConfig>;
|
||||
pub type OpenAIResult<T> = Result<T, OpenAIError>;
|
@ -16,6 +16,12 @@ impl<T: Serialize> BaseResponse<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Serialize> From<T> for BaseResponse<T> {
|
||||
fn from(body: T) -> Self {
|
||||
Self::new(env!("CARGO_PKG_VERSION"), body)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
Reference in New Issue
Block a user