2 Commits

Author SHA1 Message Date
de3882c8b1 🧹 Removed unused dependencies
Some checks failed
Build & test / build (push) Failing after 4s
Signed-off-by: Martin Berg Alstad <600878@stud.hvl.no>
2024-12-22 16:30:08 +01:00
61da72936e Changed path to name of git repository, to work when cloning
Signed-off-by: Martin Berg Alstad <600878@stud.hvl.no>
2024-12-22 16:24:23 +01:00
50 changed files with 928 additions and 2016 deletions

View File

@ -12,10 +12,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Build - name: Build
run: cargo build --verbose run: cargo build --verbose
- name: Run tests - name: Run tests
run: cargo test --verbose --all-features --workspace run: cargo test --verbose --all-features --workspace

View File

@ -1,12 +0,0 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="GrazieInspection" enabled="false" level="GRAMMAR_ERROR" enabled_by_default="false" />
<inspection_tool class="LanguageDetectionInspection" enabled="false" level="WARNING" enabled_by_default="false" />
<inspection_tool class="SpellCheckingInspection" enabled="false" level="TYPO" enabled_by_default="false">
<option name="processCode" value="true" />
<option name="processLiterals" value="true" />
<option name="processComments" value="true" />
</inspection_tool>
</profile>
</component>

1791
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -3,14 +3,14 @@ members = ["crates/*"]
exclude = ["examples"] exclude = ["examples"]
[workspace.package] [workspace.package]
edition = "2024" edition = "2021"
rust-version = "1.89" rust-version = "1.80.1"
authors = ["Martin Berg Alstad"] authors = ["Martin Berg Alstad"]
homepage = "martials.no" homepage = "emberal.github.io"
[package] [package]
name = "lib" name = "lib"
version = "2.0.1" version = "1.4.3"
description = "A library with utilities and helper fuctions." description = "A library with utilities and helper fuctions."
edition = { workspace = true } edition = { workspace = true }
rust-version = { workspace = true } rust-version = { workspace = true }
@ -21,19 +21,18 @@ homepage = { workspace = true }
[dependencies] [dependencies]
# Api # Api
axum = { version = "0.8", optional = true, features = ["multipart"] } axum = { version = "0.7", optional = true, features = ["multipart"] }
tower = { version = "0.5", optional = true } tower = { version = "0.5", optional = true }
tower-http = { version = "0.6", optional = true, features = ["trace", "cors", "normalize-path"] } tower-http = { version = "0.5", optional = true, features = ["trace", "cors", "normalize-path"] }
mime = { version = "0.3", optional = true } mime = { version = "0.3", optional = true }
# Async # Async
tokio = { workspace = true, optional = true, features = ["fs", "rt-multi-thread"] } tokio = { workspace = true, optional = true, features = ["fs", "rt-multi-thread"] }
tokio-util = { version = "0.7", optional = true, features = ["io"] } tokio-util = { version = "0.7", optional = true, features = ["io"] }
# Database # Database
diesel = { workspace = true, optional = true, features = ["postgres"] } diesel = { workspace = true, optional = true, features = ["postgres"] }
diesel-async = { workspace = true, optional = true, features = ["postgres", "deadpool", "async-connection-wrapper"] } diesel-async = { workspace = true, optional = true, features = ["postgres", "deadpool"] }
diesel-crud-derive = { path = "crates/diesel_crud_derive", optional = true } diesel-crud-derive = { path = "crates/diesel_crud_derive", optional = true }
diesel-crud-trait = { path = "crates/diesel_crud_trait", optional = true } diesel-crud-trait = { path = "crates/diesel_crud_trait", optional = true }
diesel_migrations = { workspace = true, optional = true }
deadpool-diesel = { workspace = true, optional = true, features = ["postgres"] } deadpool-diesel = { workspace = true, optional = true, features = ["postgres"] }
# Error handling # Error handling
thiserror = { workspace = true, optional = true } thiserror = { workspace = true, optional = true }
@ -41,15 +40,12 @@ thiserror = { workspace = true, optional = true }
tracing = { version = "0.1", optional = true } tracing = { version = "0.1", optional = true }
tracing-subscriber = { version = "0.3", optional = true } tracing-subscriber = { version = "0.3", optional = true }
# Parsing # Parsing
nom = { version = "8.0", optional = true } nom = { version = "7.1", optional = true }
# Procedural macros # Procedural macros
into-response-derive = { path = "crates/into_response_derive", optional = true } into-response-derive = { path = "crates/into_response_derive", optional = true }
read-files = { path = "crates/read_files", optional = true } read-files = { path = "crates/read_files", optional = true }
# Serialization / Deserialization # Serialization / Deserialization
serde = { version = "1.0", optional = true, features = ["derive"] } serde = { version = "1.0", optional = true, features = ["derive"] }
serde_json = { version = "1.0", optional = true }
# Test
testcontainers-modules = { workspace = true, features = ["postgres"], optional = true }
# Time # Time
chrono = { version = "0.4", optional = true, features = ["serde"] } chrono = { version = "0.4", optional = true, features = ["serde"] }
# Utils # Utils
@ -57,33 +53,28 @@ derive_more = { workspace = true, features = ["from", "constructor"] }
[workspace.dependencies] [workspace.dependencies]
# Async # Async
tokio = "1.47" tokio = "1.40"
# Database # Database
diesel = "2.2" diesel = "2.2"
diesel-async = "0.6" diesel-async = "0.5"
diesel_migrations = "2.2"
deadpool-diesel = "0.6" deadpool-diesel = "0.6"
# Error handling # Error handling
thiserror = "2.0" thiserror = "1.0"
# Procedural macros # Procedural macros
syn = "2.0" syn = "2.0"
quote = "1.0" quote = "1.0"
deluxe = "0.5" deluxe = "0.5"
proc-macro2 = "1.0" proc-macro2 = "1.0"
# Test
testcontainers-modules = "0.13"
# Utils # Utils
derive_more = "2.0" derive_more = "1.0"
regex = "1.11"
[features] [features]
axum = ["dep:axum", "dep:tower", "dep:serde", "dep:tower-http", "dep:thiserror", "dep:tracing", "dep:tracing-subscriber", "dep:tokio", "dep:mime"] axum = ["dep:axum", "dep:tower", "dep:tower-http", "dep:thiserror", "dep:tracing", "dep:tracing-subscriber", "dep:tokio", "dep:mime"]
diesel = ["dep:diesel-crud-trait", "dep:diesel", "dep:diesel-async", "dep:deadpool-diesel", "dep:diesel_migrations"] diesel = ["dep:diesel-crud-trait", "dep:diesel", "dep:diesel-async", "dep:deadpool-diesel"]
io = ["dep:tokio", "dep:tokio-util"] io = ["dep:tokio", "dep:tokio-util"]
iter = [] iter = []
nom = ["dep:nom"] nom = ["dep:nom"]
serde = ["dep:serde", "dep:serde_json"] serde = ["dep:serde"]
derive = ["dep:into-response-derive", "dep:diesel-crud-derive"] derive = ["dep:into-response-derive", "dep:diesel-crud-derive"]
read-files = ["dep:read-files"] read-files = ["dep:read-files"]
time = ["dep:chrono"] time = ["dep:chrono"]
test = ["dep:testcontainers-modules"]

15
Makefile.toml Normal file
View File

@ -0,0 +1,15 @@
[tasks.clippy]
command = "cargo"
args = ["clippy", "--all-targets", "--all-features", "--", "-D", "warnings"]
[tasks.fmt]
command = "cargo"
args = ["fmt", "--all"]
[tasks.test]
command = "cargo"
args = ["test", "--all-features"]
[tasks.coverage]
command = "cargo"
args = ["llvm-cov", "--all-features"]

View File

@ -1,3 +1,3 @@
# Rust Lib 🦀 # Lib
Rust library for common code, traits and utility functions -_-

View File

@ -1,5 +1,5 @@
use crate::common::PrimaryKey; use crate::common::PrimaryKey;
use deluxe::{ExtractAttributes, extract_attributes}; use deluxe::{extract_attributes, ExtractAttributes};
use proc_macro2::Ident; use proc_macro2::Ident;
use quote::quote; use quote::quote;
use std::collections::HashMap; use std::collections::HashMap;

View File

@ -9,6 +9,6 @@ pub(crate) struct PrimaryKey {
pub(crate) fn return_type(output: proc_macro2::TokenStream) -> proc_macro2::TokenStream { pub(crate) fn return_type(output: proc_macro2::TokenStream) -> proc_macro2::TokenStream {
quote! { quote! {
Result<#output, lib::diesel_crud_trait::CrudError> std::pin::Pin<Box<dyn core::future::Future<Output = Result<#output, lib::diesel_crud_trait::CrudError>> + Send + 'async_trait>>
} }
} }

View File

@ -1,4 +1,4 @@
use crate::{Attributes, common}; use crate::{common, Attributes};
use quote::quote; use quote::quote;
pub(crate) fn derive_diesel_crud_create_impl( pub(crate) fn derive_diesel_crud_create_impl(
@ -16,7 +16,12 @@ pub(crate) fn derive_diesel_crud_create_impl(
#[automatically_derived] #[automatically_derived]
impl lib::diesel_crud_trait::DieselCrudCreate<#table::table> for #struct_ident { impl lib::diesel_crud_trait::DieselCrudCreate<#table::table> for #struct_ident {
type Insert = #insert; type Insert = #insert;
async fn insert(insert: Self::Insert, conn: &mut diesel_async::AsyncPgConnection) -> #return_type { fn insert<'a, 'async_trait>(insert: Self::Insert, conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
where
Self: Sized + Sync + 'a,
'a: 'async_trait,
{
Box::pin(async move {
use diesel::associations::HasTable; use diesel::associations::HasTable;
diesel_async::RunQueryDsl::get_result( diesel_async::RunQueryDsl::get_result(
diesel::dsl::insert_into(#table::table::table()).values(insert), diesel::dsl::insert_into(#table::table::table()).values(insert),
@ -24,9 +29,16 @@ pub(crate) fn derive_diesel_crud_create_impl(
) )
.await .await
.map_err(Into::into) .map_err(Into::into)
})
} }
async fn insert_many(insert: &[Self::Insert], conn: &mut diesel_async::AsyncPgConnection) -> #many_return_type { fn insert_many<'a, 'b, 'async_trait>(insert: &'a [Self::Insert], conn: &'b mut diesel_async::AsyncPgConnection) -> #many_return_type
where
Self: Sized + Sync + 'async_trait,
'a: 'async_trait,
'b: 'async_trait,
{
Box::pin(async move {
use diesel::associations::HasTable; use diesel::associations::HasTable;
diesel_async::RunQueryDsl::get_results( diesel_async::RunQueryDsl::get_results(
diesel::dsl::insert_into(#table::table::table()).values(insert), diesel::dsl::insert_into(#table::table::table()).values(insert),
@ -34,6 +46,7 @@ pub(crate) fn derive_diesel_crud_create_impl(
) )
.await .await
.map_err(Into::into) .map_err(Into::into)
})
} }
} }
} }

View File

@ -1,4 +1,4 @@
use crate::{Attributes, PrimaryKey, common}; use crate::{common, Attributes, PrimaryKey};
use quote::quote; use quote::quote;
pub(crate) fn derive_diesel_crud_delete_impl( pub(crate) fn derive_diesel_crud_delete_impl(
@ -22,7 +22,12 @@ pub(crate) fn derive_diesel_crud_delete_impl(
#[automatically_derived] #[automatically_derived]
impl lib::diesel_crud_trait::DieselCrudDelete for #struct_ident { impl lib::diesel_crud_trait::DieselCrudDelete for #struct_ident {
type PK = #pk_type; type PK = #pk_type;
async fn delete(pk: Self::PK, conn: &mut diesel_async::AsyncPgConnection) -> #return_type { fn delete<'a, 'async_trait>(pk: Self::PK, conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
where
Self: Sized + Sync + 'a,
'a: 'async_trait,
{
Box::pin(async move {
use diesel::QueryDsl; use diesel::QueryDsl;
use diesel::associations::HasTable; use diesel::associations::HasTable;
diesel_async::RunQueryDsl::get_result( diesel_async::RunQueryDsl::get_result(
@ -34,6 +39,7 @@ pub(crate) fn derive_diesel_crud_delete_impl(
) )
.await .await
.map_err(Into::into) .map_err(Into::into)
})
} }
} }
} }

View File

@ -1,6 +1,6 @@
extern crate proc_macro; extern crate proc_macro;
use crate::attributes::{Attributes, extract_attrs}; use crate::attributes::{extract_attrs, Attributes};
use crate::common::PrimaryKey; use crate::common::PrimaryKey;
use crate::create::derive_diesel_crud_create_impl; use crate::create::derive_diesel_crud_create_impl;
use crate::delete::derive_diesel_crud_delete_impl; use crate::delete::derive_diesel_crud_delete_impl;
@ -8,7 +8,7 @@ use crate::list::derive_diesel_crud_list_impl;
use crate::read::derive_diesel_crud_read_impl; use crate::read::derive_diesel_crud_read_impl;
use crate::update::derive_diesel_crud_update_impl; use crate::update::derive_diesel_crud_update_impl;
use quote::quote; use quote::quote;
use syn::{DeriveInput, parse_macro_input}; use syn::{parse_macro_input, DeriveInput};
mod attributes; mod attributes;
mod common; mod common;

View File

@ -1,4 +1,4 @@
use crate::{Attributes, common}; use crate::{common, Attributes};
use quote::quote; use quote::quote;
pub(crate) fn derive_diesel_crud_list_impl( pub(crate) fn derive_diesel_crud_list_impl(
@ -13,9 +13,15 @@ pub(crate) fn derive_diesel_crud_list_impl(
quote! { quote! {
#[automatically_derived] #[automatically_derived]
impl lib::diesel_crud_trait::DieselCrudList for #struct_ident { impl lib::diesel_crud_trait::DieselCrudList for #struct_ident {
async fn list(conn: &mut diesel_async::AsyncPgConnection) -> #return_type { fn list<'a, 'async_trait>(conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
where
Self: Sized + Sync + 'a,
'a: 'async_trait
{
Box::pin(async move {
use diesel::associations::HasTable; use diesel::associations::HasTable;
diesel_async::RunQueryDsl::get_results(#table::table::table(), conn).await.map_err(Into::into) diesel_async::RunQueryDsl::get_results(#table::table::table(), conn).await.map_err(Into::into)
})
} }
} }
} }

View File

@ -1,5 +1,5 @@
use crate::common::PrimaryKey; use crate::common::PrimaryKey;
use crate::{Attributes, common}; use crate::{common, Attributes};
use quote::quote; use quote::quote;
pub(crate) fn derive_diesel_crud_read_impl( pub(crate) fn derive_diesel_crud_read_impl(
@ -20,7 +20,12 @@ pub(crate) fn derive_diesel_crud_read_impl(
#[automatically_derived] #[automatically_derived]
impl lib::diesel_crud_trait::DieselCrudRead for #struct_ident { impl lib::diesel_crud_trait::DieselCrudRead for #struct_ident {
type PK = #pk_type; type PK = #pk_type;
async fn read(pk: Self::PK, conn: &mut diesel_async::AsyncPgConnection) -> #return_type { fn read<'a, 'async_trait>(pk: Self::PK, conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
where
Self: Sized + Sync + 'a,
'a: 'async_trait
{
Box::pin(async move {
use diesel::associations::HasTable; use diesel::associations::HasTable;
diesel_async::RunQueryDsl::get_result( diesel_async::RunQueryDsl::get_result(
diesel::QueryDsl::find(#table::table::table(), pk), diesel::QueryDsl::find(#table::table::table(), pk),
@ -28,6 +33,7 @@ pub(crate) fn derive_diesel_crud_read_impl(
) )
.await .await
.map_err(Into::into) .map_err(Into::into)
})
} }
} }
} }

View File

@ -1,4 +1,4 @@
use crate::{Attributes, common}; use crate::{common, Attributes};
use quote::quote; use quote::quote;
pub(crate) fn derive_diesel_crud_update_impl( pub(crate) fn derive_diesel_crud_update_impl(
@ -15,7 +15,12 @@ pub(crate) fn derive_diesel_crud_update_impl(
#[automatically_derived] #[automatically_derived]
impl lib::diesel_crud_trait::DieselCrudUpdate for #struct_ident { impl lib::diesel_crud_trait::DieselCrudUpdate for #struct_ident {
type Update = #update; type Update = #update;
async fn update(update: Self::Update, conn: &mut diesel_async::AsyncPgConnection) -> #return_type { fn update<'a, 'async_trait>(update: Self::Update, conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
where
Self: Sized + Sync + 'a,
'a: 'async_trait,
{
Box::pin(async move {
use diesel::associations::HasTable; use diesel::associations::HasTable;
diesel_async::RunQueryDsl::get_result( diesel_async::RunQueryDsl::get_result(
diesel::dsl::update(#table::table::table()).set(update), diesel::dsl::update(#table::table::table()).set(update),
@ -23,6 +28,7 @@ pub(crate) fn derive_diesel_crud_update_impl(
) )
.await .await
.map_err(Into::into) .map_err(Into::into)
})
} }
} }
} }

View File

@ -7,4 +7,6 @@ rust-version = { workspace = true }
[dependencies] [dependencies]
diesel = { workspace = true, features = ["postgres"] } diesel = { workspace = true, features = ["postgres"] }
diesel-async = { workspace = true, features = ["postgres", "deadpool"] } diesel-async = { workspace = true, features = ["postgres", "deadpool"] }
thiserror = { workspace = true } async-trait = "0.1"
deadpool-diesel = { version = "0.6", features = ["postgres"] }
thiserror = "1.0"

View File

@ -1,5 +1,6 @@
mod error; mod error;
use async_trait::async_trait;
use diesel::{AsChangeset, Insertable}; use diesel::{AsChangeset, Insertable};
use diesel_async::AsyncPgConnection; use diesel_async::AsyncPgConnection;
pub use error::CrudError; pub use error::CrudError;
@ -27,19 +28,17 @@ pub trait DieselCrud<Table>:
/// - `conn` - The database connection /// - `conn` - The database connection
/// # Returns /// # Returns
/// A result containing the inserted entity or a `CrudError` /// A result containing the inserted entity or a `CrudError`
#[async_trait]
pub trait DieselCrudCreate<Table> pub trait DieselCrudCreate<Table>
where where
Self: Sized, Self: Sized,
{ {
type Insert: Insertable<Table>; type Insert: Insertable<Table>;
fn insert( async fn insert(insert: Self::Insert, conn: &mut AsyncPgConnection) -> Result<Self, CrudError>;
insert: Self::Insert, async fn insert_many(
conn: &mut AsyncPgConnection,
) -> impl Future<Output = Result<Self, CrudError>>;
fn insert_many(
insert: &[Self::Insert], insert: &[Self::Insert],
conn: &mut AsyncPgConnection, conn: &mut AsyncPgConnection,
) -> impl Future<Output = Result<Vec<Self>, CrudError>>; ) -> Result<Vec<Self>, CrudError>;
} }
/// Gets an entity from the database /// Gets an entity from the database
@ -53,15 +52,13 @@ where
/// # Returns /// # Returns
/// A result containing the entity or a `CrudError`. /// A result containing the entity or a `CrudError`.
/// If the entity is not found, the error should be `CrudError::NotFound`. /// If the entity is not found, the error should be `CrudError::NotFound`.
#[async_trait]
pub trait DieselCrudRead pub trait DieselCrudRead
where where
Self: Sized, Self: Sized,
{ {
type PK; type PK;
fn read( async fn read(pk: Self::PK, conn: &mut AsyncPgConnection) -> Result<Self, CrudError>;
pk: Self::PK,
conn: &mut AsyncPgConnection,
) -> impl Future<Output = Result<Self, CrudError>>;
} }
/// Updates an entity in the database /// Updates an entity in the database
@ -76,15 +73,13 @@ where
/// # Returns /// # Returns
/// A result containing the old entry of the entity if successful or a `CrudError`. /// A result containing the old entry of the entity if successful or a `CrudError`.
/// If the entity is not found, the error should be `CrudError::NotFound`. /// If the entity is not found, the error should be `CrudError::NotFound`.
#[async_trait]
pub trait DieselCrudUpdate pub trait DieselCrudUpdate
where where
Self: Sized, Self: Sized,
{ {
type Update: AsChangeset; type Update: AsChangeset;
fn update( async fn update(update: Self::Update, conn: &mut AsyncPgConnection) -> Result<Self, CrudError>;
update: Self::Update,
conn: &mut AsyncPgConnection,
) -> impl Future<Output = Result<Self, CrudError>>;
} }
/// Deletes an entity from the database /// Deletes an entity from the database
@ -98,15 +93,13 @@ where
/// # Returns /// # Returns
/// A result containing the deleted entity or a `CrudError`. /// A result containing the deleted entity or a `CrudError`.
/// If the entity is not found, the error should be `CrudError::NotFound`. /// If the entity is not found, the error should be `CrudError::NotFound`.
#[async_trait]
pub trait DieselCrudDelete pub trait DieselCrudDelete
where where
Self: Sized, Self: Sized,
{ {
type PK; type PK;
fn delete( async fn delete(pk: Self::PK, conn: &mut AsyncPgConnection) -> Result<Self, CrudError>;
pk: Self::PK,
conn: &mut AsyncPgConnection,
) -> impl Future<Output = Result<Self, CrudError>>;
} }
/// Lists all entities in the table /// Lists all entities in the table
@ -116,9 +109,10 @@ where
/// - `conn` - The database connection /// - `conn` - The database connection
/// # Returns /// # Returns
/// A result containing a Vec of entities or a `CrudError`. /// A result containing a Vec of entities or a `CrudError`.
#[async_trait]
pub trait DieselCrudList pub trait DieselCrudList
where where
Self: Sized, Self: Sized,
{ {
fn list(conn: &mut AsyncPgConnection) -> impl Future<Output = Result<Vec<Self>, CrudError>>; async fn list(conn: &mut AsyncPgConnection) -> Result<Vec<Self>, CrudError>;
} }

View File

@ -1,7 +1,7 @@
extern crate proc_macro; extern crate proc_macro;
use { use {
proc_macro::TokenStream, proc_macro::TokenStream,
syn::{DeriveInput, parse_macro_input}, syn::{parse_macro_input, DeriveInput},
}; };
mod derive; mod derive;

View File

@ -10,4 +10,4 @@ proc-macro = true
[dependencies] [dependencies]
syn = { workspace = true } syn = { workspace = true }
quote = { workspace = true } quote = { workspace = true }
regex = { workspace = true } regex = "1.10"

View File

@ -10,8 +10,8 @@ use std::{
use quote::quote; use quote::quote;
use syn::{ use syn::{
LitStr, Token,
parse::{Parse, ParseStream}, parse::{Parse, ParseStream},
LitStr, Token,
}; };
pub fn read_files_to_string_impl(args: Args) -> TokenStream { pub fn read_files_to_string_impl(args: Args) -> TokenStream {

View File

@ -9,9 +9,10 @@ homepage.workspace = true
[dependencies] [dependencies]
diesel = { workspace = true } diesel = { workspace = true }
diesel-async = { workspace = true } diesel-async = { workspace = true }
lib = { path = "../../../rust-lib", features = ["diesel", "derive", "test"] } lib = { path = "../../../rust-lib", features = ["diesel", "derive"] }
derive_more = { workspace = true, features = ["constructor", "from"] }
[dev-dependencies] [dev-dependencies]
tokio = { workspace = true, features = ["macros"] } tokio = { workspace = true, features = ["macros"] }
testcontainers-modules = { workspace = true, features = ["postgres"] } testcontainers-modules = { version = "0.9", features = ["postgres"] }
diesel_migrations = { workspace = true } diesel_async_migrations = "0.14"

View File

@ -2,8 +2,8 @@
# see https://diesel.rs/guides/configuring-diesel-cli # see https://diesel.rs/guides/configuring-diesel-cli
[print_schema] [print_schema]
file = "tests/schema.rs" file = "src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"] custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory] [migrations_directory]
dir = "./migrations" dir = "/home/martin/git/rust/lib/crates/tests/migrations"

View File

@ -5,9 +5,15 @@ use lib::diesel_crud_derive::{
use lib::diesel_crud_trait::DieselCrudCreate; use lib::diesel_crud_trait::DieselCrudCreate;
use test_containers::create_test_containers_pool; use test_containers::create_test_containers_pool;
mod schema;
#[cfg(test)] #[cfg(test)]
mod test_containers; pub mod test_containers;
diesel::table! {
user (email) {
#[max_length = 255]
email -> Varchar,
}
}
#[derive( #[derive(
Debug, Debug,
@ -23,21 +29,19 @@ mod test_containers;
DieselCrudUpdate, DieselCrudUpdate,
)] )]
#[diesel_crud(insert = InsertUser)] #[diesel_crud(insert = InsertUser)]
#[diesel(table_name = schema::user)] #[diesel(table_name = user)]
struct User { struct User {
#[diesel_crud(pk)] #[diesel_crud(pk)]
email: String, email: String,
} }
#[derive(Clone, Insertable)] #[derive(Clone, Insertable)]
#[diesel(table_name = schema::user)] #[diesel(table_name = user)]
struct InsertUser { struct InsertUser {
email: String, email: String,
} }
// TODO make test work in action #[tokio::test]
#[ignore]
#[tokio::test(flavor = "multi_thread")]
async fn test_insert_user() { async fn test_insert_user() {
let container = create_test_containers_pool().await.unwrap(); let container = create_test_containers_pool().await.unwrap();
let mut conn = container.pool.get().await.unwrap(); let mut conn = container.pool.get().await.unwrap();

View File

@ -1,6 +0,0 @@
diesel::table! {
user (email) {
#[max_length = 255]
email -> Varchar,
}
}

View File

@ -1,11 +1,50 @@
use diesel_migrations::EmbeddedMigrations; use derive_more::{Constructor, From};
use lib::diesel::migration::run_migrations; use diesel_async::pooled_connection::deadpool::{BuildError, PoolError};
use lib::test::test_containers::{ContainerError, TestContainer}; use diesel_async::AsyncPgConnection;
use diesel_async_migrations::EmbeddedMigrations;
use lib::diesel::pool::{create_pool_from_url, PgPool};
use lib::diesel::DieselError;
use testcontainers_modules::postgres::Postgres;
use testcontainers_modules::testcontainers::runners::AsyncRunner;
use testcontainers_modules::testcontainers::{ContainerAsync, TestcontainersError};
pub async fn create_test_containers_pool() -> Result<TestContainer, ContainerError> { /// When the TestContainer is dropped, the container will be removed.
let test_container = lib::test::test_containers::create_test_containers_pool().await?; /// # Errors
let connection = test_container.pool.get().await?; /// If destructed and the container field is dropped, the container will be stopped
const MIGRATIONS: EmbeddedMigrations = diesel_migrations::embed_migrations!("./migrations"); /// and all connections from the pool will result in DatabaseError.
run_migrations(connection, MIGRATIONS).await?; #[derive(Constructor)]
Ok(test_container) pub struct TestContainer {
pub container: ContainerAsync<Postgres>,
pub pool: PgPool,
}
#[derive(Debug, From)]
pub enum ContainerError {
TestContainers(TestcontainersError),
BuildError(BuildError),
PoolError(PoolError),
DieselError(DieselError),
}
pub async fn create_test_containers_pool<'a>() -> Result<TestContainer, ContainerError> {
let container = create_postgres_container().await?;
let connection_string = format!(
"postgres://postgres:postgres@localhost:{}/postgres",
container.get_host_port_ipv4(5432).await?
);
let pool = create_pool_from_url(connection_string)?;
run_migrations(pool.get().await?.as_mut()).await?;
Ok(TestContainer::new(container, pool))
}
pub(crate) async fn run_migrations(
conn: &mut AsyncPgConnection,
) -> Result<(), diesel::result::Error> {
static EMBEDDED_MIGRATIONS: EmbeddedMigrations =
diesel_async_migrations::embed_migrations!("./migrations");
EMBEDDED_MIGRATIONS.run_pending_migrations(conn).await
}
pub async fn create_postgres_container() -> Result<ContainerAsync<Postgres>, TestcontainersError> {
Postgres::default().start().await
} }

View File

@ -18,14 +18,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]] [[package]]
name = "axum" name = "async-trait"
version = "0.8.4" version = "0.1.80"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5" checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca"
dependencies = [ dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "axum"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf"
dependencies = [
"async-trait",
"axum-core", "axum-core",
"bytes", "bytes",
"form_urlencoded",
"futures-util", "futures-util",
"http", "http",
"http-body", "http-body",
@ -44,9 +55,9 @@ dependencies = [
"serde_json", "serde_json",
"serde_path_to_error", "serde_path_to_error",
"serde_urlencoded", "serde_urlencoded",
"sync_wrapper", "sync_wrapper 1.0.1",
"tokio", "tokio",
"tower 0.5.2", "tower 0.4.13",
"tower-layer", "tower-layer",
"tower-service", "tower-service",
"tracing", "tracing",
@ -54,19 +65,20 @@ dependencies = [
[[package]] [[package]]
name = "axum-core" name = "axum-core"
version = "0.5.2" version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6" checksum = "a15c63fd72d41492dc4f497196f5da1fb04fb7529e631d73630d1b491e47a2e3"
dependencies = [ dependencies = [
"async-trait",
"bytes", "bytes",
"futures-core", "futures-util",
"http", "http",
"http-body", "http-body",
"http-body-util", "http-body-util",
"mime", "mime",
"pin-project-lite", "pin-project-lite",
"rustversion", "rustversion",
"sync_wrapper", "sync_wrapper 0.1.2",
"tower-layer", "tower-layer",
"tower-service", "tower-service",
"tracing", "tracing",
@ -113,18 +125,18 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]] [[package]]
name = "derive_more" name = "derive_more"
version = "2.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05"
dependencies = [ dependencies = [
"derive_more-impl", "derive_more-impl",
] ]
[[package]] [[package]]
name = "derive_more-impl" name = "derive_more-impl"
version = "2.0.1" version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -278,19 +290,6 @@ dependencies = [
"hyper", "hyper",
"pin-project-lite", "pin-project-lite",
"tokio", "tokio",
"tower 0.4.13",
"tower-service",
]
[[package]]
name = "io-uring"
version = "0.7.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b"
dependencies = [
"bitflags",
"cfg-if",
"libc",
] ]
[[package]] [[package]]
@ -307,15 +306,14 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]] [[package]]
name = "lib" name = "lib"
version = "2.0.1" version = "1.4.3"
dependencies = [ dependencies = [
"axum", "axum",
"derive_more", "derive_more",
"mime", "mime",
"serde",
"thiserror", "thiserror",
"tokio", "tokio",
"tower 0.5.2", "tower 0.5.0",
"tower-http", "tower-http",
"tracing", "tracing",
"tracing-subscriber", "tracing-subscriber",
@ -323,9 +321,9 @@ dependencies = [
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.175" version = "0.2.155"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
[[package]] [[package]]
name = "log" name = "log"
@ -335,9 +333,9 @@ checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
[[package]] [[package]]
name = "matchit" name = "matchit"
version = "0.8.4" version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94"
[[package]] [[package]]
name = "memchr" name = "memchr"
@ -369,7 +367,7 @@ dependencies = [
"hermit-abi", "hermit-abi",
"libc", "libc",
"wasi", "wasi",
"windows-sys 0.52.0", "windows-sys",
] ]
[[package]] [[package]]
@ -469,9 +467,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.101" version = "1.0.86"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
dependencies = [ dependencies = [
"unicode-ident", "unicode-ident",
] ]
@ -565,12 +563,6 @@ dependencies = [
"lazy_static", "lazy_static",
] ]
[[package]]
name = "slab"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589"
[[package]] [[package]]
name = "smallvec" name = "smallvec"
version = "1.13.2" version = "1.13.2"
@ -579,12 +571,12 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
[[package]] [[package]]
name = "socket2" name = "socket2"
version = "0.6.0" version = "0.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807" checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c"
dependencies = [ dependencies = [
"libc", "libc",
"windows-sys 0.59.0", "windows-sys",
] ]
[[package]] [[package]]
@ -595,15 +587,21 @@ checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
[[package]] [[package]]
name = "syn" name = "syn"
version = "2.0.106" version = "2.0.68"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" checksum = "901fa70d88b9d6c98022e23b4136f9f3e54e4662c3bc1bd1d84a42a9a0f0c1e9"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
"unicode-ident", "unicode-ident",
] ]
[[package]]
name = "sync_wrapper"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160"
[[package]] [[package]]
name = "sync_wrapper" name = "sync_wrapper"
version = "1.0.1" version = "1.0.1"
@ -612,18 +610,18 @@ checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394"
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "2.0.16" version = "1.0.61"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709"
dependencies = [ dependencies = [
"thiserror-impl", "thiserror-impl",
] ]
[[package]] [[package]]
name = "thiserror-impl" name = "thiserror-impl"
version = "2.0.16" version = "1.0.61"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -642,26 +640,24 @@ dependencies = [
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.47.1" version = "1.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998"
dependencies = [ dependencies = [
"backtrace", "backtrace",
"io-uring",
"libc", "libc",
"mio", "mio",
"pin-project-lite", "pin-project-lite",
"slab",
"socket2", "socket2",
"tokio-macros", "tokio-macros",
"windows-sys 0.59.0", "windows-sys",
] ]
[[package]] [[package]]
name = "tokio-macros" name = "tokio-macros"
version = "2.5.0" version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -681,34 +677,30 @@ dependencies = [
"tokio", "tokio",
"tower-layer", "tower-layer",
"tower-service", "tower-service",
]
[[package]]
name = "tower"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
dependencies = [
"futures-core",
"futures-util",
"pin-project-lite",
"sync_wrapper",
"tokio",
"tower-layer",
"tower-service",
"tracing", "tracing",
] ]
[[package]] [[package]]
name = "tower-http" name = "tower"
version = "0.6.6" version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" checksum = "36b837f86b25d7c0d7988f00a54e74739be6477f2aac6201b8f429a7569991b7"
dependencies = [
"tower-layer",
"tower-service",
]
[[package]]
name = "tower-http"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5"
dependencies = [ dependencies = [
"bitflags", "bitflags",
"bytes", "bytes",
"http", "http",
"http-body", "http-body",
"http-body-util",
"pin-project-lite", "pin-project-lite",
"tower-layer", "tower-layer",
"tower-service", "tower-service",
@ -717,15 +709,15 @@ dependencies = [
[[package]] [[package]]
name = "tower-layer" name = "tower-layer"
version = "0.3.3" version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0"
[[package]] [[package]]
name = "tower-service" name = "tower-service"
version = "0.3.3" version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52"
[[package]] [[package]]
name = "tracing" name = "tracing"
@ -840,20 +832,11 @@ dependencies = [
"windows-targets", "windows-targets",
] ]
[[package]]
name = "windows-sys"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets",
]
[[package]] [[package]]
name = "windows-targets" name = "windows-targets"
version = "0.52.6" version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
dependencies = [ dependencies = [
"windows_aarch64_gnullvm", "windows_aarch64_gnullvm",
"windows_aarch64_msvc", "windows_aarch64_msvc",
@ -867,48 +850,48 @@ dependencies = [
[[package]] [[package]]
name = "windows_aarch64_gnullvm" name = "windows_aarch64_gnullvm"
version = "0.52.6" version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
[[package]] [[package]]
name = "windows_aarch64_msvc" name = "windows_aarch64_msvc"
version = "0.52.6" version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
[[package]] [[package]]
name = "windows_i686_gnu" name = "windows_i686_gnu"
version = "0.52.6" version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
[[package]] [[package]]
name = "windows_i686_gnullvm" name = "windows_i686_gnullvm"
version = "0.52.6" version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
[[package]] [[package]]
name = "windows_i686_msvc" name = "windows_i686_msvc"
version = "0.52.6" version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
[[package]] [[package]]
name = "windows_x86_64_gnu" name = "windows_x86_64_gnu"
version = "0.52.6" version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
[[package]] [[package]]
name = "windows_x86_64_gnullvm" name = "windows_x86_64_gnullvm"
version = "0.52.6" version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
[[package]] [[package]]
name = "windows_x86_64_msvc" name = "windows_x86_64_msvc"
version = "0.52.6" version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"

View File

@ -1,9 +1,9 @@
[package] [package]
name = "multipart_file" name = "multipart_file"
version = "0.1.0" version = "0.1.0"
edition = "2024" edition = "2021"
[dependencies] [dependencies]
lib = { path = "../..", features = ["axum"] } lib = { path = "../..", features = ["axum"] }
axum = "0.8" axum = "0.7.5"
tokio = { version = "1.47", features = ["rt-multi-thread", "macros"] } tokio = { version = "1.40", features = ["rt-multi-thread", "macros"] }

View File

@ -1,10 +1,22 @@
use axum::extract::DefaultBodyLimit; use axum::extract::DefaultBodyLimit;
use lib::axum::app::AppBuilder; use lib::axum::app::AppBuilder;
use lib::axum::extractor::{MultipartFile, MultipartFiles}; use lib::axum::extractor::MultipartFiles;
use lib::routes; use lib::routes;
// 0 or more files // 0 or more
async fn several_files(MultipartFiles(files): MultipartFiles) -> String { async fn with_optional_file(files: Option<MultipartFiles>) -> String {
format!(
"{:?}",
files.map(|files| files
.0
.into_iter()
.map(|file| file.filename)
.collect::<Vec<_>>())
)
}
// 1 or more files
async fn handler(MultipartFiles(files): MultipartFiles) -> String {
format!( format!(
"{:?} uploaded", "{:?} uploaded",
files files
@ -14,26 +26,11 @@ async fn several_files(MultipartFiles(files): MultipartFiles) -> String {
) )
} }
// 1 file exactly
async fn single_file(MultipartFile(file): MultipartFile) -> String {
format!("{:?} uploaded", file.filename)
}
// 0 or 1 file
async fn optional_single_file(file: Option<MultipartFile>) -> String {
format!(
"{:?} uploaded",
file.map(|file| file.0.filename)
.unwrap_or(String::from("No file found"))
)
}
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let route = routes!( let route = routes!(
get "/" => several_files, get "/" => handler,
get "/file" => single_file, get "/opt" => with_optional_file
get "/opt/file" => optional_single_file
) )
.layer(DefaultBodyLimit::disable()); .layer(DefaultBodyLimit::disable());
AppBuilder::new().route(route).serve().await.unwrap(); AppBuilder::new().route(route).serve().await.unwrap();

44
flake.lock generated
View File

@ -1,44 +0,0 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1757020766,
"narHash": "sha256-PLoSjHRa2bUbi1x9HoXgTx2AiuzNXs54c8omhadyvp0=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "fe83bbdde2ccdc2cb9573aa846abe8363f79a97a",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-25.05",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-unstable": {
"locked": {
"lastModified": 1756787288,
"narHash": "sha256-rw/PHa1cqiePdBxhF66V7R+WAP8WekQ0mCDG4CFqT8Y=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "d0fc30899600b9b3466ddb260fd83deb486c32f1",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs",
"nixpkgs-unstable": "nixpkgs-unstable"
}
}
},
"root": "root",
"version": 7
}

View File

@ -1,38 +0,0 @@
{
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
nixpkgs-unstable.url = "github:NixOS/nixpkgs/nixos-unstable";
};
outputs = { nixpkgs, nixpkgs-unstable, ... }:
let
system = "x86_64-linux";
in
{
devShells.${system}.default =
let
pkgs = import nixpkgs {
inherit system;
};
unstable = import nixpkgs-unstable {
inherit system;
};
in
pkgs.mkShell {
packages = with pkgs; [
git
just
] ++ [
# Rust
gcc
# cargo
] ++ [
# Diesel
diesel-cli
libpq
];
shellHook = "fish";
};
};
}

View File

@ -1,29 +0,0 @@
#!/usr/bin/env just --justfile
# List all recipes
default:
@just --list
# Open a nix shell with all dependencies in path
develop:
nix develop
# Format all rust files
fmt:
cargo fmt --all
# Lint all files with clippy
lint:
cargo clippy --all-targets --all-features -- -D warnings
# Build a release version
release:
cargo build --release
# Run all tests
test:
cargo test --all-features --workspace
# Run coverage
coverage:
cargo llvm-cov

View File

@ -1,22 +1,23 @@
use axum::Router; use {
use axum::ServiceExt; axum::{
use axum::extract::Request; extract::Request, handler::Handler, response::IntoResponse, routing::Route, Router,
use axum::handler::Handler; ServiceExt,
use axum::response::IntoResponse; },
use axum::routing::Route; std::{
use std::convert::Infallible; convert::Infallible,
use std::io; io,
use std::net::IpAddr; net::{IpAddr, Ipv4Addr, SocketAddr},
use std::net::Ipv4Addr; },
use std::net::SocketAddr; tokio::net::TcpListener,
use tokio::net::TcpListener; tower::{layer::Layer, Service},
use tower::{Service, layer::Layer}; tower_http::{
use tower_http::cors::CorsLayer; cors::CorsLayer,
use tower_http::normalize_path::NormalizePathLayer; normalize_path::NormalizePathLayer,
use tower_http::trace; trace,
use tower_http::trace::HttpMakeClassifier; trace::{HttpMakeClassifier, TraceLayer},
use tower_http::trace::TraceLayer; },
use tracing::{Level, info}; tracing::{info, Level},
};
// TODO trim trailing slash into macro > let _app = NormalizePathLayer::trim_trailing_slash().layer(create_app!(routes)); // TODO trim trailing slash into macro > let _app = NormalizePathLayer::trim_trailing_slash().layer(create_app!(routes));
#[macro_export] #[macro_export]
@ -68,8 +69,8 @@ impl AppBuilder {
/// Adds a layer to the previously added routes /// Adds a layer to the previously added routes
pub fn layer<L>(mut self, layer: L) -> Self pub fn layer<L>(mut self, layer: L) -> Self
where where
L: Layer<Route> + Clone + Send + Sync + 'static, L: Layer<Route> + Clone + Send + 'static,
L::Service: Service<Request> + Clone + Send + Sync + 'static, L::Service: Service<Request> + Clone + Send + 'static,
<L::Service as Service<Request>>::Response: IntoResponse + 'static, <L::Service as Service<Request>>::Response: IntoResponse + 'static,
<L::Service as Service<Request>>::Error: Into<Infallible> + 'static, <L::Service as Service<Request>>::Error: Into<Infallible> + 'static,
<L::Service as Service<Request>>::Future: Send + 'static, <L::Service as Service<Request>>::Future: Send + 'static,
@ -151,13 +152,11 @@ impl AppBuilder {
let _ = fmt_trace(); // Allowed to fail let _ = fmt_trace(); // Allowed to fail
let listener = self.listener().await?; let listener = self.listener().await?;
let should_normalize = self.normalize_path.unwrap_or(true); if self.normalize_path.unwrap_or(true) {
let app = self.build(); let app = NormalizePathLayer::trim_trailing_slash().layer(self.build());
if should_normalize {
let app = NormalizePathLayer::trim_trailing_slash().layer(app);
axum::serve(listener, ServiceExt::<Request>::into_make_service(app)).await?; axum::serve(listener, ServiceExt::<Request>::into_make_service(app)).await?;
} else { } else {
let app = self.build();
axum::serve(listener, app.into_make_service()).await?; axum::serve(listener, app.into_make_service()).await?;
}; };
Ok(()) Ok(())
@ -200,7 +199,7 @@ mod tests {
AppBuilder::new() AppBuilder::new()
.socket((Ipv4Addr::LOCALHOST, 8080)) .socket((Ipv4Addr::LOCALHOST, 8080))
.routes([Router::new()]) .routes([Router::new()])
.fallback(async || "Fallback") .fallback(|| async { "Fallback" })
.cors(CorsLayer::new()) .cors(CorsLayer::new())
.normalize_path(true) .normalize_path(true)
.tracing(TraceLayer::new_for_http()) .tracing(TraceLayer::new_for_http())

View File

@ -1,14 +0,0 @@
use crate::axum::traits::BuildJson;
use axum::body::Body;
use axum::http::Request;
use axum::http::header::CONTENT_TYPE;
use mime::APPLICATION_JSON;
use serde::Serialize;
use serde_json::json;
impl BuildJson for axum::http::request::Builder {
fn json<T: Serialize>(self, body: T) -> Result<Request<Body>, axum::http::Error> {
self.header(CONTENT_TYPE, APPLICATION_JSON.as_ref())
.body(Body::new(json!(body).to_string()))
}
}

View File

@ -1,11 +1,11 @@
use axum::extract::FromRequest; use axum::{
use axum::extract::Multipart; async_trait,
use axum::extract::OptionalFromRequest; extract::{
use axum::extract::Request; multipart::{Field, MultipartError, MultipartRejection},
use axum::extract::multipart::Field; FromRequest, Multipart, Request,
use axum::extract::multipart::MultipartError; },
use axum::extract::multipart::MultipartRejection; response::IntoResponse,
use axum::response::IntoResponse; };
use mime::Mime; use mime::Mime;
use std::str::FromStr; use std::str::FromStr;
use thiserror::Error; use thiserror::Error;
@ -110,6 +110,7 @@ impl IntoResponse for MultipartFileRejection {
} }
} }
#[async_trait]
impl<S> FromRequest<S> for MultipartFile impl<S> FromRequest<S> for MultipartFile
where where
S: Send + Sync, S: Send + Sync,
@ -141,40 +142,7 @@ where
} }
} }
impl<S> OptionalFromRequest<S> for MultipartFile #[async_trait]
where
S: Send + Sync,
{
type Rejection = MultipartFileRejection;
/// Extracts a single file from a multipart request.
/// Expects exactly one file. A file must have a name, bytes and optionally a content type.
/// This extractor consumes the request and must ble placed last in the handler.
/// # Example
/// ```
/// use std::io::Read;
/// use axum::response::Html;
/// use lib::axum::extractor::{MultipartFile, MultipartFiles};
///
/// async fn upload_file(opt_file: Option<MultipartFile>) -> Html<String> {
/// Html(opt_file
/// .map(|MultipartFile(file)| String::from_utf8(file.bytes).unwrap())
/// .unwrap_or_else(|| String::from("<p>Not Found</p>"))
/// )
/// }
/// ```
async fn from_request(req: Request, state: &S) -> Result<Option<Self>, Self::Rejection> {
let multipart = Multipart::from_request(req, state).await?;
let files = get_files(multipart).await?;
if files.len() > 1 {
Err(MultipartFileRejection::SeveralFiles)
} else {
let file = files.first().ok_or(MultipartFileRejection::NoFiles)?;
Ok(Some(MultipartFile(file.clone())))
}
}
}
impl<S> FromRequest<S> for MultipartFiles impl<S> FromRequest<S> for MultipartFiles
where where
S: Send + Sync, S: Send + Sync,
@ -182,7 +150,7 @@ where
type Rejection = MultipartFileRejection; type Rejection = MultipartFileRejection;
/// Extracts multiple files from a multipart request. /// Extracts multiple files from a multipart request.
/// Can contain 0 files. A file must have a name, bytes and optionally a content type. /// Expects at least one file. A file must have a name, bytes and optionally a content type.
/// This extractor consumes the request and must ble placed last in the handler. /// This extractor consumes the request and must ble placed last in the handler.
/// # Example /// # Example
/// ``` /// ```
@ -202,11 +170,15 @@ where
async fn from_request(req: Request, state: &S) -> Result<Self, Self::Rejection> { async fn from_request(req: Request, state: &S) -> Result<Self, Self::Rejection> {
let multipart = Multipart::from_request(req, state).await?; let multipart = Multipart::from_request(req, state).await?;
let files = get_files(multipart).await?; let files = get_files(multipart).await?;
if files.is_empty() {
Err(MultipartFileRejection::NoFiles)
} else {
Ok(MultipartFiles(files)) Ok(MultipartFiles(files))
} }
}
} }
async fn get_files(mut multipart: Multipart) -> Result<Vec<File>, MultipartFileRejection> { async fn get_files<'a>(mut multipart: Multipart) -> Result<Vec<File>, MultipartFileRejection> {
let mut files = vec![]; let mut files = vec![];
while let Some(field) = multipart.next_field().await? { while let Some(field) = multipart.next_field().await? {
files.push(File::from_field(field).await?); files.push(File::from_field(field).await?);

View File

@ -1,11 +1,8 @@
pub mod app; pub mod app;
#[cfg(feature = "serde")]
pub mod builder;
pub mod extractor; pub mod extractor;
pub mod load; pub mod load;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
pub mod response; pub mod response;
pub mod router; pub mod router;
pub mod traits;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
pub mod wrappers; pub mod wrappers;

View File

@ -1,14 +1,10 @@
use { use {
crate::{serde::response::BaseResponse, serde::traits::DeserializeInto}, crate::serde::response::BaseResponse,
axum::{ axum::{
Json,
body::to_bytes,
response::{IntoResponse, Response}, response::{IntoResponse, Response},
Json,
}, },
serde::{ serde::Serialize,
Serialize,
de::{DeserializeOwned, Error},
},
}; };
impl<T: Serialize> IntoResponse for BaseResponse<T> { impl<T: Serialize> IntoResponse for BaseResponse<T> {
@ -17,15 +13,6 @@ impl<T: Serialize> IntoResponse for BaseResponse<T> {
} }
} }
impl DeserializeInto for Response {
async fn deserialize_into<T: DeserializeOwned>(self) -> Result<T, serde_json::Error> {
let body = to_bytes(self.into_body(), usize::MAX)
.await
.map_err(|e| serde_json::Error::custom(format!("Failed to read response body: {e}")))?;
serde_json::from_slice(&body)
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use axum::http::header::CONTENT_TYPE; use axum::http::header::CONTENT_TYPE;

View File

@ -13,8 +13,8 @@
/// use lib::router; /// use lib::router;
/// async fn simplify(path: axum::extract::path::Path<String>) {} /// async fn simplify(path: axum::extract::path::Path<String>) {}
/// router!("/simplify", lib::routes!( /// router!("/simplify", lib::routes!(
/// get "/{exp}" => simplify, /// get "/:exp" => simplify,
/// get "/table/{exp}" => async || {} /// get "/table/:exp" => || async {}
/// )); /// ));
/// ``` /// ```
#[macro_export] #[macro_export]
@ -92,8 +92,8 @@ macro_rules! join_routes {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use axum::Router;
use axum::extract::State; use axum::extract::State;
use axum::Router;
async fn index() {} async fn index() {}

View File

@ -1,7 +0,0 @@
use axum::body::Body;
use axum::http::Request;
use serde::Serialize;
pub trait BuildJson {
fn json<T: Serialize>(self, body: T) -> Result<Request<Body>, axum::http::Error>;
}

View File

@ -1,28 +0,0 @@
use deadpool_diesel::Status;
use derive_more::From;
use diesel_async::AsyncPgConnection;
use diesel_async::pooled_connection::deadpool::{Object, PoolError};
use lib::diesel::pool::PgPool;
pub trait GetConnection: Clone + Send + Sync {
fn get(
&self,
) -> impl Future<Output = Result<Object<AsyncPgConnection>, GetConnectionError>> + Send;
fn status(&self) -> Status;
}
impl GetConnection for PgPool {
async fn get(&self) -> Result<Object<AsyncPgConnection>, GetConnectionError> {
self.get().await.map_err(Into::into)
}
#[inline]
fn status(&self) -> Status {
self.status()
}
}
#[derive(Debug, From)]
pub enum GetConnectionError {
PoolError(PoolError),
DieselError(diesel::result::Error),
}

View File

@ -1,22 +0,0 @@
use diesel::pg::Pg;
use diesel_async::AsyncConnection;
use diesel_async::async_connection_wrapper::AsyncConnectionWrapper;
use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
use tokio::task::JoinError;
/// Run Diesel migrations using an async connection.
/// Only works with Postgres.
pub async fn run_migrations<A>(
async_connection: A,
migrations: EmbeddedMigrations,
) -> Result<(), JoinError>
where
A: AsyncConnection<Backend = Pg> + 'static,
{
let mut async_wrapper = AsyncConnectionWrapper::<A>::from(async_connection);
tokio::task::spawn_blocking(move || {
async_wrapper.run_pending_migrations(migrations).unwrap();
})
.await
}

View File

@ -1,5 +1,3 @@
pub mod get_connection;
pub mod migration;
pub mod pool; pub mod pool;
/// Re-export diesel::result::Error as DieselError /// Re-export diesel::result::Error as DieselError

View File

@ -1,7 +1,7 @@
use deadpool_diesel::postgres::BuildError; use deadpool_diesel::postgres::BuildError;
use diesel_async::AsyncPgConnection;
use diesel_async::pooled_connection::AsyncDieselConnectionManager;
use diesel_async::pooled_connection::deadpool::Pool; use diesel_async::pooled_connection::deadpool::Pool;
use diesel_async::pooled_connection::AsyncDieselConnectionManager;
use diesel_async::AsyncPgConnection;
/// A type alias for the asynchronous PostgreSQL connection pool. /// A type alias for the asynchronous PostgreSQL connection pool.
pub type PgPool = Pool<AsyncPgConnection>; pub type PgPool = Pool<AsyncPgConnection>;

View File

@ -19,8 +19,6 @@ pub mod io;
pub mod nom; pub mod nom;
#[cfg(feature = "serde")] #[cfg(feature = "serde")]
pub mod serde; pub mod serde;
#[cfg(feature = "test")]
pub mod test;
#[cfg(feature = "time")] #[cfg(feature = "time")]
pub mod time; pub mod time;
pub mod traits; pub mod traits;

View File

@ -1,22 +1,23 @@
use nom::IResult; use {
use nom::bytes::complete::take_while_m_n; nom::{
use nom::character::complete::char; bytes::complete::take_while_m_n,
use nom::character::complete::multispace0; character::complete::{char, multispace0},
use nom::combinator::eof; combinator::eof,
use nom::error::ParseError; sequence::{delimited, terminated},
use nom::sequence::delimited; IResult, InputIter, InputLength, InputTake, Slice,
use nom::sequence::terminated; },
use nom::{Input, Parser}; std::ops::RangeFrom,
};
// TODO generic input
/// Trim leading and trailing whitespace from the input Parser /// Trim leading and trailing whitespace from the input Parser
/// - Parameters /// - Parameters
/// - `inner`: The parser to trim /// - `inner`: The parser to trim
/// - Returns: A parser that trims leading and trailing whitespace from the input and then runs the value from the inner parser /// - Returns: A parser that trims leading and trailing whitespace from the input and then runs the value from the inner parser
pub fn trim<I, O, F, E: ParseError<I>>(inner: F) -> impl Parser<I, Output = O, Error = E> pub fn trim<'a, Parser, R>(inner: Parser) -> impl FnMut(&'a str) -> IResult<&'a str, R>
where where
I: Input, Parser: FnMut(&'a str) -> IResult<&'a str, R>,
F: Parser<I, Output = O, Error = E>,
<I as Input>::Item: nom::AsChar,
{ {
delimited(multispace0, inner, multispace0) delimited(multispace0, inner, multispace0)
} }
@ -26,11 +27,9 @@ where
/// - Parameters /// - Parameters
/// - `inner`: The parser to run inside the parentheses /// - `inner`: The parser to run inside the parentheses
/// - Returns: A parser that parses a parenthesized expression /// - Returns: A parser that parses a parenthesized expression
pub fn parenthesized<I, O, F, E: ParseError<I>>(inner: F) -> impl Parser<I, Output = O, Error = E> pub fn parenthesized<'a, Parser, R>(inner: Parser) -> impl FnMut(&'a str) -> IResult<&'a str, R>
where where
I: Input, Parser: FnMut(&'a str) -> IResult<&'a str, R>,
F: Parser<I, Output = O, Error = E>,
<I as Input>::Item: nom::AsChar,
{ {
delimited(char('('), inner, char(')')) delimited(char('('), inner, char(')'))
} }
@ -40,10 +39,10 @@ where
/// - `n`: The length of the string to take /// - `n`: The length of the string to take
/// - `predicate`: The predicate to call to validate the input /// - `predicate`: The predicate to call to validate the input
/// - Returns: A parser that takes `n` characters from the input /// - Returns: A parser that takes `n` characters from the input
pub fn take_where<F, I>(n: usize, predicate: F) -> impl FnMut(I) -> IResult<I, I> pub fn take_where<F, Input>(n: usize, predicate: F) -> impl Fn(Input) -> IResult<Input, Input>
where where
I: Input, Input: InputTake + InputIter + InputLength + Slice<RangeFrom<usize>>,
F: Fn(<I as Input>::Item) -> bool, F: Fn(<Input as InputIter>::Item) -> bool + Copy,
{ {
take_while_m_n(n, n, predicate) take_while_m_n(n, n, predicate)
} }
@ -55,43 +54,40 @@ where
/// - Returns: A parser that runs the inner parser and then the end of the input /// - Returns: A parser that runs the inner parser and then the end of the input
/// # Example /// # Example
/// ``` /// ```
/// use lib::nom::combinators::exhausted;
/// use nom::bytes::complete::{tag}; /// use nom::bytes::complete::{tag};
/// use nom::Parser; /// use lib::nom::combinators::exhausted;
/// ///
/// let input = "test"; /// let input = "test";
/// let (remaining, result) = exhausted(tag::<&str, &str, nom::error::Error<&str>>("test")).parse(input).unwrap(); /// let (remaining, result) = exhausted(tag("test"))(input).unwrap();
/// assert_eq!(remaining, ""); /// assert_eq!(remaining, "");
/// assert_eq!(result, "test"); /// assert_eq!(result, "test");
/// ``` /// ```
/// - Fails if the input is not exhausted /// - Fails if the input is not exhausted
/// ``` /// ```
/// use lib::nom::combinators::exhausted;
/// use nom::bytes::complete::{tag}; /// use nom::bytes::complete::{tag};
/// use nom::Parser; /// use lib::nom::combinators::exhausted;
/// ///
/// let input = "test"; /// let input = "test";
/// assert!(exhausted(tag::<&str, &str, nom::error::Error<&str>>("tes")).parse(input).is_err()); /// assert!(exhausted(tag("tes"))(input).is_err());
/// ``` /// ```
pub fn exhausted<F, I, O, E: ParseError<I>>(inner: F) -> impl Parser<I, Output = O, Error = E> pub fn exhausted<'a, Parser, R>(inner: Parser) -> impl FnMut(&'a str) -> IResult<&'a str, R>
where where
I: Input, Parser: FnMut(&'a str) -> IResult<&'a str, R>,
F: Parser<I, Output = O, Error = E>,
{ {
terminated(inner, eof) terminated(inner, eof)
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use nom::{bytes::complete::take_while, sequence::tuple};
use super::*; use super::*;
use nom::bytes::complete::take_while;
#[test] #[test]
fn test_trim_both_sides() { fn test_trim_both_sides() {
let input = " test "; let input = " test ";
let (remaining, result) = trim(take_where(4, |c: char| c.is_ascii_alphabetic())) let (remaining, result) =
.parse(input) trim(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
.unwrap();
assert_eq!(remaining, ""); assert_eq!(remaining, "");
assert_eq!(result, "test"); assert_eq!(result, "test");
} }
@ -99,9 +95,8 @@ mod tests {
#[test] #[test]
fn test_trim_leading() { fn test_trim_leading() {
let input = " test"; let input = " test";
let (remaining, result) = trim(take_where(4, |c: char| c.is_ascii_alphabetic())) let (remaining, result) =
.parse(input) trim(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
.unwrap();
assert_eq!(remaining, ""); assert_eq!(remaining, "");
assert_eq!(result, "test"); assert_eq!(result, "test");
} }
@ -109,9 +104,8 @@ mod tests {
#[test] #[test]
fn test_trim_trailing() { fn test_trim_trailing() {
let input = "test "; let input = "test ";
let (remaining, result) = trim(take_where(4, |c: char| c.is_ascii_alphabetic())) let (remaining, result) =
.parse(input) trim(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
.unwrap();
assert_eq!(remaining, ""); assert_eq!(remaining, "");
assert_eq!(result, "test"); assert_eq!(result, "test");
} }
@ -119,9 +113,8 @@ mod tests {
#[test] #[test]
fn test_trim_no_trim() { fn test_trim_no_trim() {
let input = "test"; let input = "test";
let (remaining, result) = trim(take_where(4, |c: char| c.is_ascii_alphabetic())) let (remaining, result) =
.parse(input) trim(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
.unwrap();
assert_eq!(remaining, ""); assert_eq!(remaining, "");
assert_eq!(result, "test"); assert_eq!(result, "test");
} }
@ -129,9 +122,8 @@ mod tests {
#[test] #[test]
fn test_parenthesized() { fn test_parenthesized() {
let input = "(test)"; let input = "(test)";
let (remaining, result) = parenthesized(take_where(4, |c: char| c.is_ascii_alphabetic())) let (remaining, result) =
.parse(input) parenthesized(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
.unwrap();
assert_eq!(remaining, ""); assert_eq!(remaining, "");
assert_eq!(result, "test"); assert_eq!(result, "test");
} }
@ -139,11 +131,7 @@ mod tests {
#[test] #[test]
fn test_parenthesized_parse_until_end() { fn test_parenthesized_parse_until_end() {
let input = "(test)"; let input = "(test)";
assert!( assert!(parenthesized(take_while(|_| true))(input).is_err());
parenthesized::<&str, &str, _, nom::error::Error<&str>>(take_while(|_| true))
.parse(input)
.is_err()
);
} }
#[test] #[test]
@ -164,7 +152,7 @@ mod tests {
fn test_take_where_too_much() { fn test_take_where_too_much() {
let input = "testing"; let input = "testing";
assert_eq!( assert_eq!(
take_where(4, |c: char| c.is_ascii_alphabetic()).parse(input), take_where(4, |c: char| c.is_ascii_alphabetic())(input),
Ok(("ing", "test")) Ok(("ing", "test"))
); );
} }
@ -172,19 +160,14 @@ mod tests {
#[test] #[test]
fn test_take_where_predicate_false() { fn test_take_where_predicate_false() {
let input = "test"; let input = "test";
assert!( assert!(take_where(4, |c: char| c.is_ascii_digit())(input).is_err());
take_where(4, |c: char| c.is_ascii_digit())
.parse(input)
.is_err()
);
} }
#[test] #[test]
fn test_exhausted() { fn test_exhausted() {
let input = "test"; let input = "test";
let (remaining, result) = exhausted(take_where(4, |c: char| c.is_ascii_alphabetic())) let (remaining, result) =
.parse(input) exhausted(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
.unwrap();
assert_eq!(remaining, ""); assert_eq!(remaining, "");
assert_eq!(result, "test"); assert_eq!(result, "test");
} }
@ -192,21 +175,16 @@ mod tests {
#[test] #[test]
fn test_exhausted_not_exhausted() { fn test_exhausted_not_exhausted() {
let input = "test "; let input = "test ";
assert!( assert!(exhausted(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).is_err());
exhausted(take_where(4, |c: char| c.is_ascii_alphabetic()))
.parse(input)
.is_err()
);
} }
#[test] #[test]
fn test_exhausted_tuple() { fn test_exhausted_tuple() {
let input = "test"; let input = "test";
let (remaining, result) = exhausted(( let (remaining, result) = exhausted(tuple((
take_where(3, |c: char| c.is_ascii_alphabetic()), take_where(3, |c: char| c.is_ascii_alphabetic()),
take_while(|c: char| c.is_ascii_alphabetic()), take_while(|c: char| c.is_ascii_alphabetic()),
)) )))(input)
.parse(input)
.unwrap(); .unwrap();
assert_eq!(remaining, ""); assert_eq!(remaining, "");
assert_eq!(result, ("tes", "t")); assert_eq!(result, ("tes", "t"));

View File

@ -1,6 +1,6 @@
use { use {
crate::traits::IntoResult, crate::traits::IntoResult,
nom::{IResult, error::Error}, nom::{error::Error, IResult},
}; };
impl<T, R> IntoResult<T> for IResult<R, T> { impl<T, R> IntoResult<T> for IResult<R, T> {

View File

@ -1,2 +1 @@
pub mod response; pub mod response;
pub mod traits;

View File

@ -1,7 +0,0 @@
use serde::de::DeserializeOwned;
pub trait DeserializeInto {
fn deserialize_into<T: DeserializeOwned>(
self,
) -> impl Future<Output = Result<T, serde_json::Error>>;
}

View File

@ -1,43 +0,0 @@
use crate::diesel::DieselError;
use crate::diesel::get_connection::{GetConnection, GetConnectionError};
use crate::diesel::pool::PgPool;
use deadpool_diesel::Status;
use deadpool_diesel::postgres::BuildError;
use derive_more::From;
use diesel_async::pooled_connection::deadpool::Object;
use diesel_async::{AsyncConnection, AsyncPgConnection};
use lib::diesel::pool::create_pool_from_url_with_size;
#[derive(Clone)]
pub struct PoolStub(PgPool);
#[derive(Debug, PartialEq, From)]
pub enum Error {
Connection(diesel::ConnectionError),
Database(DieselError),
}
pub async fn setup_test_transaction(url: impl AsRef<str>) -> Result<AsyncPgConnection, Error> {
let mut conn = AsyncPgConnection::establish(url.as_ref()).await?;
conn.begin_test_transaction().await?;
Ok(conn)
}
pub async fn create_test_pool_url_with_size(
url: impl Into<String>,
size: usize,
) -> Result<PoolStub, BuildError> {
let pool = create_pool_from_url_with_size(url, size)?;
Ok(PoolStub(pool))
}
impl GetConnection for PoolStub {
async fn get(&self) -> Result<Object<AsyncPgConnection>, GetConnectionError> {
let mut conn = self.0.get().await?;
conn.begin_test_transaction().await?;
Ok(conn)
}
fn status(&self) -> Status {
unimplemented!("PoolStub does not support status")
}
}

View File

@ -1,3 +0,0 @@
#[cfg(feature = "diesel")]
pub mod diesel_pool;
pub mod test_containers;

View File

@ -1,45 +0,0 @@
use crate::diesel::pool::{PgPool, create_pool_from_url};
use deadpool_diesel::postgres::BuildError;
use derive_more::{Constructor, From};
use diesel_async::pooled_connection::deadpool::PoolError;
use lib::diesel::DieselError;
use testcontainers_modules::postgres::Postgres;
use testcontainers_modules::testcontainers::runners::AsyncRunner;
use testcontainers_modules::testcontainers::{ContainerAsync, TestcontainersError};
use tokio::task::JoinError;
/// When the TestContainer is dropped, the container will be removed.
/// # Errors
/// If destructed and the container field is dropped, the container will be dropped, and using the pool will cause an error.
#[derive(Constructor)]
pub struct TestContainer {
pub container: ContainerAsync<Postgres>,
pub pool: PgPool,
}
const TEST_CONTAINERS_INTERNAL_PORT: u16 = 5432;
pub async fn create_test_containers_pool() -> Result<TestContainer, ContainerError> {
let container = create_postgres_container().await?;
let connection_string = format!(
"postgres://postgres:postgres@127.0.0.1:{}/postgres",
container
.get_host_port_ipv4(TEST_CONTAINERS_INTERNAL_PORT)
.await?
);
let pool = create_pool_from_url(connection_string)?;
Ok(TestContainer::new(container, pool))
}
pub async fn create_postgres_container() -> Result<ContainerAsync<Postgres>, TestcontainersError> {
Postgres::default().start().await
}
#[derive(Debug, From)]
pub enum ContainerError {
TestContainers(TestcontainersError),
BuildError(BuildError),
PoolError(PoolError),
DieselError(DieselError),
JoinError(JoinError),
}

View File

@ -23,8 +23,9 @@
#[macro_export] #[macro_export]
macro_rules! map { macro_rules! map {
() => { std::collections::HashMap::new() }; () => { std::collections::HashMap::new() };
($default:ty; $($key:expr),+ $(,)?) => { ($default:ty; $($key:expr),* $(,)?) => {
{ {
#[allow(unused_mut)]
let mut temp_map = std::collections::HashMap::new(); let mut temp_map = std::collections::HashMap::new();
$( $(
temp_map.insert($key, <$default>::default()); temp_map.insert($key, <$default>::default());
@ -75,8 +76,8 @@ mod tests {
} }
#[test] #[test]
fn test_map_only_keys_1_key() { fn test_map_only_keys_0_keys() {
let map: HashMap<usize, usize> = map!(usize; 1); let map: HashMap<usize, usize> = map!(usize;);
assert_eq!(map.len(), 1); assert_eq!(map.len(), 0);
} }
} }