Compare commits
15 Commits
80f4af9087
...
master
Author | SHA1 | Date | |
---|---|---|---|
9f85b7ae97
|
|||
972178932c
|
|||
2f00a27ce1
|
|||
894d5159e5
|
|||
5dbcc93d4e
|
|||
a2075892f5
|
|||
f8c629d123
|
|||
05afb64b07
|
|||
d84078d6c3
|
|||
f14543af6b
|
|||
d260b2d676
|
|||
35ccc235c8
|
|||
7a46101b42
|
|||
5a77407297
|
|||
2f1eb4df3a
|
@ -12,7 +12,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --verbose
|
run: cargo build --verbose
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: cargo test --verbose --all-features --workspace
|
run: cargo test --verbose --all-features --workspace
|
12
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
12
.idea/inspectionProfiles/Project_Default.xml
generated
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
<component name="InspectionProjectProfileManager">
|
||||||
|
<profile version="1.0">
|
||||||
|
<option name="myName" value="Project Default" />
|
||||||
|
<inspection_tool class="GrazieInspection" enabled="false" level="GRAMMAR_ERROR" enabled_by_default="false" />
|
||||||
|
<inspection_tool class="LanguageDetectionInspection" enabled="false" level="WARNING" enabled_by_default="false" />
|
||||||
|
<inspection_tool class="SpellCheckingInspection" enabled="false" level="TYPO" enabled_by_default="false">
|
||||||
|
<option name="processCode" value="true" />
|
||||||
|
<option name="processLiterals" value="true" />
|
||||||
|
<option name="processComments" value="true" />
|
||||||
|
</inspection_tool>
|
||||||
|
</profile>
|
||||||
|
</component>
|
537
Cargo.lock
generated
537
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
32
Cargo.toml
32
Cargo.toml
@ -3,14 +3,14 @@ members = ["crates/*"]
|
|||||||
exclude = ["examples"]
|
exclude = ["examples"]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
rust-version = "1.80.1"
|
rust-version = "1.88"
|
||||||
authors = ["Martin Berg Alstad"]
|
authors = ["Martin Berg Alstad"]
|
||||||
homepage = "emberal.github.io"
|
homepage = "martials.no"
|
||||||
|
|
||||||
[package]
|
[package]
|
||||||
name = "lib"
|
name = "lib"
|
||||||
version = "1.4.3"
|
version = "2.0.0"
|
||||||
description = "A library with utilities and helper fuctions."
|
description = "A library with utilities and helper fuctions."
|
||||||
edition = { workspace = true }
|
edition = { workspace = true }
|
||||||
rust-version = { workspace = true }
|
rust-version = { workspace = true }
|
||||||
@ -21,20 +21,19 @@ homepage = { workspace = true }
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
# Api
|
# Api
|
||||||
axum = { version = "0.7", optional = true, features = ["multipart"] }
|
axum = { version = "0.8", optional = true, features = ["multipart"] }
|
||||||
tower = { version = "0.5", optional = true }
|
tower = { version = "0.5", optional = true }
|
||||||
tower-http = { version = "0.5", optional = true, features = ["trace", "cors", "normalize-path"] }
|
tower-http = { version = "0.6", optional = true, features = ["trace", "cors", "normalize-path"] }
|
||||||
mime = { version = "0.3", optional = true }
|
mime = { version = "0.3", optional = true }
|
||||||
# Async
|
# Async
|
||||||
async-trait = { workspace = true }
|
|
||||||
tokio = { workspace = true, optional = true, features = ["fs", "rt-multi-thread"] }
|
tokio = { workspace = true, optional = true, features = ["fs", "rt-multi-thread"] }
|
||||||
tokio-util = { version = "0.7", optional = true, features = ["io"] }
|
tokio-util = { version = "0.7", optional = true, features = ["io"] }
|
||||||
# Database
|
# Database
|
||||||
diesel = { workspace = true, optional = true, features = ["postgres"] }
|
diesel = { workspace = true, optional = true, features = ["postgres"] }
|
||||||
diesel-async = { workspace = true, optional = true, features = ["postgres", "deadpool"] }
|
diesel-async = { workspace = true, optional = true, features = ["postgres", "deadpool", "async-connection-wrapper"] }
|
||||||
diesel-crud-derive = { path = "crates/diesel_crud_derive", optional = true }
|
diesel-crud-derive = { path = "crates/diesel_crud_derive", optional = true }
|
||||||
diesel-crud-trait = { path = "crates/diesel_crud_trait", optional = true }
|
diesel-crud-trait = { path = "crates/diesel_crud_trait", optional = true }
|
||||||
diesel_async_migrations = { version = "0.15", optional = true }
|
diesel_migrations = { workspace = true, optional = true }
|
||||||
deadpool-diesel = { workspace = true, optional = true, features = ["postgres"] }
|
deadpool-diesel = { workspace = true, optional = true, features = ["postgres"] }
|
||||||
# Error handling
|
# Error handling
|
||||||
thiserror = { workspace = true, optional = true }
|
thiserror = { workspace = true, optional = true }
|
||||||
@ -42,7 +41,7 @@ thiserror = { workspace = true, optional = true }
|
|||||||
tracing = { version = "0.1", optional = true }
|
tracing = { version = "0.1", optional = true }
|
||||||
tracing-subscriber = { version = "0.3", optional = true }
|
tracing-subscriber = { version = "0.3", optional = true }
|
||||||
# Parsing
|
# Parsing
|
||||||
nom = { version = "7.1", optional = true }
|
nom = { version = "8.0", optional = true }
|
||||||
# Procedural macros
|
# Procedural macros
|
||||||
into-response-derive = { path = "crates/into_response_derive", optional = true }
|
into-response-derive = { path = "crates/into_response_derive", optional = true }
|
||||||
read-files = { path = "crates/read_files", optional = true }
|
read-files = { path = "crates/read_files", optional = true }
|
||||||
@ -50,7 +49,7 @@ read-files = { path = "crates/read_files", optional = true }
|
|||||||
serde = { version = "1.0", optional = true, features = ["derive"] }
|
serde = { version = "1.0", optional = true, features = ["derive"] }
|
||||||
serde_json = { version = "1.0", optional = true }
|
serde_json = { version = "1.0", optional = true }
|
||||||
# Test
|
# Test
|
||||||
testcontainers-modules = { version = "0.10", features = ["postgres"], optional = true }
|
testcontainers-modules = { workspace = true, features = ["postgres"], optional = true }
|
||||||
# Time
|
# Time
|
||||||
chrono = { version = "0.4", optional = true, features = ["serde"] }
|
chrono = { version = "0.4", optional = true, features = ["serde"] }
|
||||||
# Utils
|
# Utils
|
||||||
@ -59,24 +58,27 @@ derive_more = { workspace = true, features = ["from", "constructor"] }
|
|||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
# Async
|
# Async
|
||||||
tokio = "1.40"
|
tokio = "1.40"
|
||||||
async-trait = "0.1"
|
|
||||||
# Database
|
# Database
|
||||||
diesel = "2.2"
|
diesel = "2.2"
|
||||||
diesel-async = "0.5"
|
diesel-async = "0.5"
|
||||||
|
diesel_migrations = "2.2"
|
||||||
deadpool-diesel = "0.6"
|
deadpool-diesel = "0.6"
|
||||||
# Error handling
|
# Error handling
|
||||||
thiserror = "1.0"
|
thiserror = "2.0"
|
||||||
# Procedural macros
|
# Procedural macros
|
||||||
syn = "2.0"
|
syn = "2.0"
|
||||||
quote = "1.0"
|
quote = "1.0"
|
||||||
deluxe = "0.5"
|
deluxe = "0.5"
|
||||||
proc-macro2 = "1.0"
|
proc-macro2 = "1.0"
|
||||||
|
# Test
|
||||||
|
testcontainers-modules = "0.11"
|
||||||
# Utils
|
# Utils
|
||||||
derive_more = "1.0"
|
derive_more = "2.0"
|
||||||
|
regex = "1.11"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
axum = ["dep:axum", "dep:tower", "dep:tower-http", "dep:thiserror", "dep:tracing", "dep:tracing-subscriber", "dep:tokio", "dep:mime"]
|
axum = ["dep:axum", "dep:tower", "dep:tower-http", "dep:thiserror", "dep:tracing", "dep:tracing-subscriber", "dep:tokio", "dep:mime"]
|
||||||
diesel = ["dep:diesel-crud-trait", "dep:diesel", "dep:diesel-async", "dep:deadpool-diesel", "dep:diesel_async_migrations"]
|
diesel = ["dep:diesel-crud-trait", "dep:diesel", "dep:diesel-async", "dep:deadpool-diesel", "dep:diesel_migrations"]
|
||||||
io = ["dep:tokio", "dep:tokio-util"]
|
io = ["dep:tokio", "dep:tokio-util"]
|
||||||
iter = []
|
iter = []
|
||||||
nom = ["dep:nom"]
|
nom = ["dep:nom"]
|
||||||
|
@ -1,15 +0,0 @@
|
|||||||
[tasks.clippy]
|
|
||||||
command = "cargo"
|
|
||||||
args = ["clippy", "--all-targets", "--all-features", "--", "-D", "warnings"]
|
|
||||||
|
|
||||||
[tasks.fmt]
|
|
||||||
command = "cargo"
|
|
||||||
args = ["fmt", "--all"]
|
|
||||||
|
|
||||||
[tasks.test]
|
|
||||||
command = "cargo"
|
|
||||||
args = ["test", "--all-features"]
|
|
||||||
|
|
||||||
[tasks.coverage]
|
|
||||||
command = "cargo"
|
|
||||||
args = ["llvm-cov", "--all-features"]
|
|
@ -1,3 +1,3 @@
|
|||||||
# Lib
|
# Rust Lib 🦀
|
||||||
|
|
||||||
-_-
|
Rust library for common code, traits and utility functions
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use crate::common::PrimaryKey;
|
use crate::common::PrimaryKey;
|
||||||
use deluxe::{extract_attributes, ExtractAttributes};
|
use deluxe::{ExtractAttributes, extract_attributes};
|
||||||
use proc_macro2::Ident;
|
use proc_macro2::Ident;
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
@ -9,6 +9,6 @@ pub(crate) struct PrimaryKey {
|
|||||||
|
|
||||||
pub(crate) fn return_type(output: proc_macro2::TokenStream) -> proc_macro2::TokenStream {
|
pub(crate) fn return_type(output: proc_macro2::TokenStream) -> proc_macro2::TokenStream {
|
||||||
quote! {
|
quote! {
|
||||||
std::pin::Pin<Box<dyn core::future::Future<Output = Result<#output, lib::diesel_crud_trait::CrudError>> + Send + 'async_trait>>
|
Result<#output, lib::diesel_crud_trait::CrudError>
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::{common, Attributes};
|
use crate::{Attributes, common};
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
|
|
||||||
pub(crate) fn derive_diesel_crud_create_impl(
|
pub(crate) fn derive_diesel_crud_create_impl(
|
||||||
@ -16,37 +16,24 @@ pub(crate) fn derive_diesel_crud_create_impl(
|
|||||||
#[automatically_derived]
|
#[automatically_derived]
|
||||||
impl lib::diesel_crud_trait::DieselCrudCreate<#table::table> for #struct_ident {
|
impl lib::diesel_crud_trait::DieselCrudCreate<#table::table> for #struct_ident {
|
||||||
type Insert = #insert;
|
type Insert = #insert;
|
||||||
fn insert<'a, 'async_trait>(insert: Self::Insert, conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
|
async fn insert(insert: Self::Insert, conn: &mut diesel_async::AsyncPgConnection) -> #return_type {
|
||||||
where
|
use diesel::associations::HasTable;
|
||||||
Self: Sized + Sync + 'a,
|
diesel_async::RunQueryDsl::get_result(
|
||||||
'a: 'async_trait,
|
diesel::dsl::insert_into(#table::table::table()).values(insert),
|
||||||
{
|
conn
|
||||||
Box::pin(async move {
|
)
|
||||||
use diesel::associations::HasTable;
|
.await
|
||||||
diesel_async::RunQueryDsl::get_result(
|
.map_err(Into::into)
|
||||||
diesel::dsl::insert_into(#table::table::table()).values(insert),
|
|
||||||
conn
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(Into::into)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_many<'a, 'b, 'async_trait>(insert: &'a [Self::Insert], conn: &'b mut diesel_async::AsyncPgConnection) -> #many_return_type
|
async fn insert_many(insert: &[Self::Insert], conn: &mut diesel_async::AsyncPgConnection) -> #many_return_type {
|
||||||
where
|
use diesel::associations::HasTable;
|
||||||
Self: Sized + Sync + 'async_trait,
|
diesel_async::RunQueryDsl::get_results(
|
||||||
'a: 'async_trait,
|
diesel::dsl::insert_into(#table::table::table()).values(insert),
|
||||||
'b: 'async_trait,
|
conn
|
||||||
{
|
)
|
||||||
Box::pin(async move {
|
.await
|
||||||
use diesel::associations::HasTable;
|
.map_err(Into::into)
|
||||||
diesel_async::RunQueryDsl::get_results(
|
|
||||||
diesel::dsl::insert_into(#table::table::table()).values(insert),
|
|
||||||
conn
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(Into::into)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::{common, Attributes, PrimaryKey};
|
use crate::{Attributes, PrimaryKey, common};
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
|
|
||||||
pub(crate) fn derive_diesel_crud_delete_impl(
|
pub(crate) fn derive_diesel_crud_delete_impl(
|
||||||
@ -22,24 +22,18 @@ pub(crate) fn derive_diesel_crud_delete_impl(
|
|||||||
#[automatically_derived]
|
#[automatically_derived]
|
||||||
impl lib::diesel_crud_trait::DieselCrudDelete for #struct_ident {
|
impl lib::diesel_crud_trait::DieselCrudDelete for #struct_ident {
|
||||||
type PK = #pk_type;
|
type PK = #pk_type;
|
||||||
fn delete<'a, 'async_trait>(pk: Self::PK, conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
|
async fn delete(pk: Self::PK, conn: &mut diesel_async::AsyncPgConnection) -> #return_type {
|
||||||
where
|
use diesel::QueryDsl;
|
||||||
Self: Sized + Sync + 'a,
|
use diesel::associations::HasTable;
|
||||||
'a: 'async_trait,
|
diesel_async::RunQueryDsl::get_result(
|
||||||
{
|
diesel::delete(
|
||||||
Box::pin(async move {
|
#table::table
|
||||||
use diesel::QueryDsl;
|
.filter(diesel::expression_methods::ExpressionMethods::eq(#table::#pk_ident, pk))
|
||||||
use diesel::associations::HasTable;
|
),
|
||||||
diesel_async::RunQueryDsl::get_result(
|
conn,
|
||||||
diesel::delete(
|
)
|
||||||
#table::table
|
.await
|
||||||
.filter(diesel::expression_methods::ExpressionMethods::eq(#table::#pk_ident, pk))
|
.map_err(Into::into)
|
||||||
),
|
|
||||||
conn,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(Into::into)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
extern crate proc_macro;
|
extern crate proc_macro;
|
||||||
|
|
||||||
use crate::attributes::{extract_attrs, Attributes};
|
use crate::attributes::{Attributes, extract_attrs};
|
||||||
use crate::common::PrimaryKey;
|
use crate::common::PrimaryKey;
|
||||||
use crate::create::derive_diesel_crud_create_impl;
|
use crate::create::derive_diesel_crud_create_impl;
|
||||||
use crate::delete::derive_diesel_crud_delete_impl;
|
use crate::delete::derive_diesel_crud_delete_impl;
|
||||||
@ -8,7 +8,7 @@ use crate::list::derive_diesel_crud_list_impl;
|
|||||||
use crate::read::derive_diesel_crud_read_impl;
|
use crate::read::derive_diesel_crud_read_impl;
|
||||||
use crate::update::derive_diesel_crud_update_impl;
|
use crate::update::derive_diesel_crud_update_impl;
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
use syn::{parse_macro_input, DeriveInput};
|
use syn::{DeriveInput, parse_macro_input};
|
||||||
|
|
||||||
mod attributes;
|
mod attributes;
|
||||||
mod common;
|
mod common;
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::{common, Attributes};
|
use crate::{Attributes, common};
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
|
|
||||||
pub(crate) fn derive_diesel_crud_list_impl(
|
pub(crate) fn derive_diesel_crud_list_impl(
|
||||||
@ -13,15 +13,9 @@ pub(crate) fn derive_diesel_crud_list_impl(
|
|||||||
quote! {
|
quote! {
|
||||||
#[automatically_derived]
|
#[automatically_derived]
|
||||||
impl lib::diesel_crud_trait::DieselCrudList for #struct_ident {
|
impl lib::diesel_crud_trait::DieselCrudList for #struct_ident {
|
||||||
fn list<'a, 'async_trait>(conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
|
async fn list(conn: &mut diesel_async::AsyncPgConnection) -> #return_type {
|
||||||
where
|
use diesel::associations::HasTable;
|
||||||
Self: Sized + Sync + 'a,
|
diesel_async::RunQueryDsl::get_results(#table::table::table(), conn).await.map_err(Into::into)
|
||||||
'a: 'async_trait
|
|
||||||
{
|
|
||||||
Box::pin(async move {
|
|
||||||
use diesel::associations::HasTable;
|
|
||||||
diesel_async::RunQueryDsl::get_results(#table::table::table(), conn).await.map_err(Into::into)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use crate::common::PrimaryKey;
|
use crate::common::PrimaryKey;
|
||||||
use crate::{common, Attributes};
|
use crate::{Attributes, common};
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
|
|
||||||
pub(crate) fn derive_diesel_crud_read_impl(
|
pub(crate) fn derive_diesel_crud_read_impl(
|
||||||
@ -20,20 +20,14 @@ pub(crate) fn derive_diesel_crud_read_impl(
|
|||||||
#[automatically_derived]
|
#[automatically_derived]
|
||||||
impl lib::diesel_crud_trait::DieselCrudRead for #struct_ident {
|
impl lib::diesel_crud_trait::DieselCrudRead for #struct_ident {
|
||||||
type PK = #pk_type;
|
type PK = #pk_type;
|
||||||
fn read<'a, 'async_trait>(pk: Self::PK, conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
|
async fn read(pk: Self::PK, conn: &mut diesel_async::AsyncPgConnection) -> #return_type {
|
||||||
where
|
use diesel::associations::HasTable;
|
||||||
Self: Sized + Sync + 'a,
|
diesel_async::RunQueryDsl::get_result(
|
||||||
'a: 'async_trait
|
diesel::QueryDsl::find(#table::table::table(), pk),
|
||||||
{
|
conn
|
||||||
Box::pin(async move {
|
)
|
||||||
use diesel::associations::HasTable;
|
.await
|
||||||
diesel_async::RunQueryDsl::get_result(
|
.map_err(Into::into)
|
||||||
diesel::QueryDsl::find(#table::table::table(), pk),
|
|
||||||
conn
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(Into::into)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::{common, Attributes};
|
use crate::{Attributes, common};
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
|
|
||||||
pub(crate) fn derive_diesel_crud_update_impl(
|
pub(crate) fn derive_diesel_crud_update_impl(
|
||||||
@ -15,20 +15,14 @@ pub(crate) fn derive_diesel_crud_update_impl(
|
|||||||
#[automatically_derived]
|
#[automatically_derived]
|
||||||
impl lib::diesel_crud_trait::DieselCrudUpdate for #struct_ident {
|
impl lib::diesel_crud_trait::DieselCrudUpdate for #struct_ident {
|
||||||
type Update = #update;
|
type Update = #update;
|
||||||
fn update<'a, 'async_trait>(update: Self::Update, conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
|
async fn update(update: Self::Update, conn: &mut diesel_async::AsyncPgConnection) -> #return_type {
|
||||||
where
|
use diesel::associations::HasTable;
|
||||||
Self: Sized + Sync + 'a,
|
diesel_async::RunQueryDsl::get_result(
|
||||||
'a: 'async_trait,
|
diesel::dsl::update(#table::table::table()).set(update),
|
||||||
{
|
conn,
|
||||||
Box::pin(async move {
|
)
|
||||||
use diesel::associations::HasTable;
|
.await
|
||||||
diesel_async::RunQueryDsl::get_result(
|
.map_err(Into::into)
|
||||||
diesel::dsl::update(#table::table::table()).set(update),
|
|
||||||
conn,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
.map_err(Into::into)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,4 @@ rust-version = { workspace = true }
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
diesel = { workspace = true, features = ["postgres"] }
|
diesel = { workspace = true, features = ["postgres"] }
|
||||||
diesel-async = { workspace = true, features = ["postgres", "deadpool"] }
|
diesel-async = { workspace = true, features = ["postgres", "deadpool"] }
|
||||||
async-trait = "0.1"
|
thiserror = { workspace = true }
|
||||||
deadpool-diesel = { version = "0.6", features = ["postgres"] }
|
|
||||||
thiserror = "1.0"
|
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
mod error;
|
mod error;
|
||||||
|
|
||||||
use async_trait::async_trait;
|
|
||||||
use diesel::{AsChangeset, Insertable};
|
use diesel::{AsChangeset, Insertable};
|
||||||
use diesel_async::AsyncPgConnection;
|
use diesel_async::AsyncPgConnection;
|
||||||
pub use error::CrudError;
|
pub use error::CrudError;
|
||||||
@ -28,17 +27,19 @@ pub trait DieselCrud<Table>:
|
|||||||
/// - `conn` - The database connection
|
/// - `conn` - The database connection
|
||||||
/// # Returns
|
/// # Returns
|
||||||
/// A result containing the inserted entity or a `CrudError`
|
/// A result containing the inserted entity or a `CrudError`
|
||||||
#[async_trait]
|
|
||||||
pub trait DieselCrudCreate<Table>
|
pub trait DieselCrudCreate<Table>
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
type Insert: Insertable<Table>;
|
type Insert: Insertable<Table>;
|
||||||
async fn insert(insert: Self::Insert, conn: &mut AsyncPgConnection) -> Result<Self, CrudError>;
|
fn insert(
|
||||||
async fn insert_many(
|
insert: Self::Insert,
|
||||||
|
conn: &mut AsyncPgConnection,
|
||||||
|
) -> impl Future<Output = Result<Self, CrudError>>;
|
||||||
|
fn insert_many(
|
||||||
insert: &[Self::Insert],
|
insert: &[Self::Insert],
|
||||||
conn: &mut AsyncPgConnection,
|
conn: &mut AsyncPgConnection,
|
||||||
) -> Result<Vec<Self>, CrudError>;
|
) -> impl Future<Output = Result<Vec<Self>, CrudError>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets an entity from the database
|
/// Gets an entity from the database
|
||||||
@ -52,13 +53,15 @@ where
|
|||||||
/// # Returns
|
/// # Returns
|
||||||
/// A result containing the entity or a `CrudError`.
|
/// A result containing the entity or a `CrudError`.
|
||||||
/// If the entity is not found, the error should be `CrudError::NotFound`.
|
/// If the entity is not found, the error should be `CrudError::NotFound`.
|
||||||
#[async_trait]
|
|
||||||
pub trait DieselCrudRead
|
pub trait DieselCrudRead
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
type PK;
|
type PK;
|
||||||
async fn read(pk: Self::PK, conn: &mut AsyncPgConnection) -> Result<Self, CrudError>;
|
fn read(
|
||||||
|
pk: Self::PK,
|
||||||
|
conn: &mut AsyncPgConnection,
|
||||||
|
) -> impl Future<Output = Result<Self, CrudError>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Updates an entity in the database
|
/// Updates an entity in the database
|
||||||
@ -73,13 +76,15 @@ where
|
|||||||
/// # Returns
|
/// # Returns
|
||||||
/// A result containing the old entry of the entity if successful or a `CrudError`.
|
/// A result containing the old entry of the entity if successful or a `CrudError`.
|
||||||
/// If the entity is not found, the error should be `CrudError::NotFound`.
|
/// If the entity is not found, the error should be `CrudError::NotFound`.
|
||||||
#[async_trait]
|
|
||||||
pub trait DieselCrudUpdate
|
pub trait DieselCrudUpdate
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
type Update: AsChangeset;
|
type Update: AsChangeset;
|
||||||
async fn update(update: Self::Update, conn: &mut AsyncPgConnection) -> Result<Self, CrudError>;
|
fn update(
|
||||||
|
update: Self::Update,
|
||||||
|
conn: &mut AsyncPgConnection,
|
||||||
|
) -> impl Future<Output = Result<Self, CrudError>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Deletes an entity from the database
|
/// Deletes an entity from the database
|
||||||
@ -93,13 +98,15 @@ where
|
|||||||
/// # Returns
|
/// # Returns
|
||||||
/// A result containing the deleted entity or a `CrudError`.
|
/// A result containing the deleted entity or a `CrudError`.
|
||||||
/// If the entity is not found, the error should be `CrudError::NotFound`.
|
/// If the entity is not found, the error should be `CrudError::NotFound`.
|
||||||
#[async_trait]
|
|
||||||
pub trait DieselCrudDelete
|
pub trait DieselCrudDelete
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
type PK;
|
type PK;
|
||||||
async fn delete(pk: Self::PK, conn: &mut AsyncPgConnection) -> Result<Self, CrudError>;
|
fn delete(
|
||||||
|
pk: Self::PK,
|
||||||
|
conn: &mut AsyncPgConnection,
|
||||||
|
) -> impl Future<Output = Result<Self, CrudError>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Lists all entities in the table
|
/// Lists all entities in the table
|
||||||
@ -109,10 +116,9 @@ where
|
|||||||
/// - `conn` - The database connection
|
/// - `conn` - The database connection
|
||||||
/// # Returns
|
/// # Returns
|
||||||
/// A result containing a Vec of entities or a `CrudError`.
|
/// A result containing a Vec of entities or a `CrudError`.
|
||||||
#[async_trait]
|
|
||||||
pub trait DieselCrudList
|
pub trait DieselCrudList
|
||||||
where
|
where
|
||||||
Self: Sized,
|
Self: Sized,
|
||||||
{
|
{
|
||||||
async fn list(conn: &mut AsyncPgConnection) -> Result<Vec<Self>, CrudError>;
|
fn list(conn: &mut AsyncPgConnection) -> impl Future<Output = Result<Vec<Self>, CrudError>>;
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
extern crate proc_macro;
|
extern crate proc_macro;
|
||||||
use {
|
use {
|
||||||
proc_macro::TokenStream,
|
proc_macro::TokenStream,
|
||||||
syn::{parse_macro_input, DeriveInput},
|
syn::{DeriveInput, parse_macro_input},
|
||||||
};
|
};
|
||||||
|
|
||||||
mod derive;
|
mod derive;
|
||||||
|
@ -10,4 +10,4 @@ proc-macro = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
syn = { workspace = true }
|
syn = { workspace = true }
|
||||||
quote = { workspace = true }
|
quote = { workspace = true }
|
||||||
regex = "1.10"
|
regex = { workspace = true }
|
||||||
|
@ -10,8 +10,8 @@ use std::{
|
|||||||
|
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
use syn::{
|
use syn::{
|
||||||
parse::{Parse, ParseStream},
|
|
||||||
LitStr, Token,
|
LitStr, Token,
|
||||||
|
parse::{Parse, ParseStream},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn read_files_to_string_impl(args: Args) -> TokenStream {
|
pub fn read_files_to_string_impl(args: Args) -> TokenStream {
|
||||||
|
@ -9,12 +9,9 @@ homepage.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
diesel = { workspace = true }
|
diesel = { workspace = true }
|
||||||
diesel-async = { workspace = true }
|
diesel-async = { workspace = true }
|
||||||
lib = { path = "../../../lib", features = ["diesel", "derive"] }
|
lib = { path = "../../../rust-lib", features = ["diesel", "derive", "test"] }
|
||||||
derive_more = { workspace = true, features = ["constructor", "from"] }
|
|
||||||
thiserror = { workspace = true }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tokio = { workspace = true, features = ["macros"] }
|
tokio = { workspace = true, features = ["macros"] }
|
||||||
dotenvy_macro = "0.15"
|
testcontainers-modules = { workspace = true, features = ["postgres"] }
|
||||||
testcontainers-modules = { version = "0.9", features = ["postgres"] }
|
diesel_migrations = { workspace = true }
|
||||||
diesel_async_migrations = "0.14"
|
|
||||||
|
@ -2,8 +2,8 @@
|
|||||||
# see https://diesel.rs/guides/configuring-diesel-cli
|
# see https://diesel.rs/guides/configuring-diesel-cli
|
||||||
|
|
||||||
[print_schema]
|
[print_schema]
|
||||||
file = "src/schema.rs"
|
file = "tests/schema.rs"
|
||||||
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
|
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
|
||||||
|
|
||||||
[migrations_directory]
|
[migrations_directory]
|
||||||
dir = "/home/martin/git/rust/lib/crates/tests/migrations"
|
dir = "./migrations"
|
||||||
|
@ -5,15 +5,9 @@ use lib::diesel_crud_derive::{
|
|||||||
use lib::diesel_crud_trait::DieselCrudCreate;
|
use lib::diesel_crud_trait::DieselCrudCreate;
|
||||||
use test_containers::create_test_containers_pool;
|
use test_containers::create_test_containers_pool;
|
||||||
|
|
||||||
|
mod schema;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod test_containers;
|
mod test_containers;
|
||||||
|
|
||||||
diesel::table! {
|
|
||||||
user (email) {
|
|
||||||
#[max_length = 255]
|
|
||||||
email -> Varchar,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(
|
#[derive(
|
||||||
Debug,
|
Debug,
|
||||||
@ -29,19 +23,21 @@ diesel::table! {
|
|||||||
DieselCrudUpdate,
|
DieselCrudUpdate,
|
||||||
)]
|
)]
|
||||||
#[diesel_crud(insert = InsertUser)]
|
#[diesel_crud(insert = InsertUser)]
|
||||||
#[diesel(table_name = user)]
|
#[diesel(table_name = schema::user)]
|
||||||
struct User {
|
struct User {
|
||||||
#[diesel_crud(pk)]
|
#[diesel_crud(pk)]
|
||||||
email: String,
|
email: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Insertable)]
|
#[derive(Clone, Insertable)]
|
||||||
#[diesel(table_name = user)]
|
#[diesel(table_name = schema::user)]
|
||||||
struct InsertUser {
|
struct InsertUser {
|
||||||
email: String,
|
email: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
// TODO make test work in action
|
||||||
|
#[ignore]
|
||||||
|
#[tokio::test(flavor = "multi_thread")]
|
||||||
async fn test_insert_user() {
|
async fn test_insert_user() {
|
||||||
let container = create_test_containers_pool().await.unwrap();
|
let container = create_test_containers_pool().await.unwrap();
|
||||||
let mut conn = container.pool.get().await.unwrap();
|
let mut conn = container.pool.get().await.unwrap();
|
||||||
|
6
crates/tests/tests/schema.rs
Normal file
6
crates/tests/tests/schema.rs
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
diesel::table! {
|
||||||
|
user (email) {
|
||||||
|
#[max_length = 255]
|
||||||
|
email -> Varchar,
|
||||||
|
}
|
||||||
|
}
|
@ -1,50 +1,11 @@
|
|||||||
use derive_more::{Constructor, From};
|
use diesel_migrations::EmbeddedMigrations;
|
||||||
use diesel_async::pooled_connection::deadpool::{BuildError, PoolError};
|
use lib::diesel::migration::run_migrations;
|
||||||
use diesel_async::AsyncPgConnection;
|
use lib::test::test_containers::{ContainerError, TestContainer};
|
||||||
use diesel_async_migrations::EmbeddedMigrations;
|
|
||||||
use lib::diesel::pool::{create_pool_from_url, PgPool};
|
|
||||||
use lib::diesel::DieselError;
|
|
||||||
use testcontainers_modules::postgres::Postgres;
|
|
||||||
use testcontainers_modules::testcontainers::runners::AsyncRunner;
|
|
||||||
use testcontainers_modules::testcontainers::{ContainerAsync, TestcontainersError};
|
|
||||||
|
|
||||||
/// When the TestContainer is dropped, the container will be removed.
|
pub async fn create_test_containers_pool() -> Result<TestContainer, ContainerError> {
|
||||||
/// # Errors
|
let test_container = lib::test::test_containers::create_test_containers_pool().await?;
|
||||||
/// If destructed and the container field is dropped, the container will be stopped
|
let connection = test_container.pool.get().await?;
|
||||||
/// and all connections from the pool will result in DatabaseError.
|
const MIGRATIONS: EmbeddedMigrations = diesel_migrations::embed_migrations!("./migrations");
|
||||||
#[derive(Constructor)]
|
run_migrations(connection, MIGRATIONS).await?;
|
||||||
pub struct TestContainer {
|
Ok(test_container)
|
||||||
pub container: ContainerAsync<Postgres>,
|
|
||||||
pub pool: PgPool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, From)]
|
|
||||||
pub enum ContainerError {
|
|
||||||
TestContainers(TestcontainersError),
|
|
||||||
BuildError(BuildError),
|
|
||||||
PoolError(PoolError),
|
|
||||||
DieselError(DieselError),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn create_test_containers_pool<'a>() -> Result<TestContainer, ContainerError> {
|
|
||||||
let container = create_postgres_container().await?;
|
|
||||||
let connection_string = format!(
|
|
||||||
"postgres://postgres:postgres@localhost:{}/postgres",
|
|
||||||
container.get_host_port_ipv4(5432).await?
|
|
||||||
);
|
|
||||||
let pool = create_pool_from_url(connection_string)?;
|
|
||||||
run_migrations(pool.get().await?.as_mut()).await?;
|
|
||||||
Ok(TestContainer::new(container, pool))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn run_migrations(
|
|
||||||
conn: &mut AsyncPgConnection,
|
|
||||||
) -> Result<(), diesel::result::Error> {
|
|
||||||
static EMBEDDED_MIGRATIONS: EmbeddedMigrations =
|
|
||||||
diesel_async_migrations::embed_migrations!("./migrations");
|
|
||||||
EMBEDDED_MIGRATIONS.run_pending_migrations(conn).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn create_postgres_container() -> Result<ContainerAsync<Postgres>, TestcontainersError> {
|
|
||||||
Postgres::default().start().await
|
|
||||||
}
|
}
|
||||||
|
2
examples/multipart_file/Cargo.lock
generated
2
examples/multipart_file/Cargo.lock
generated
@ -1,6 +1,6 @@
|
|||||||
# This file is automatically @generated by Cargo.
|
# This file is automatically @generated by Cargo.
|
||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
version = 3
|
version = 4
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "addr2line"
|
name = "addr2line"
|
||||||
|
44
flake.lock
generated
Normal file
44
flake.lock
generated
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1751211869,
|
||||||
|
"narHash": "sha256-1Cu92i1KSPbhPCKxoiVG5qnoRiKTgR5CcGSRyLpOd7Y=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "b43c397f6c213918d6cfe6e3550abfe79b5d1c51",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "nixos-25.05",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs-unstable": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1741173522,
|
||||||
|
"narHash": "sha256-k7VSqvv0r1r53nUI/IfPHCppkUAddeXn843YlAC5DR0=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "d69ab0d71b22fa1ce3dbeff666e6deb4917db049",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "nixos-unstable",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": "nixpkgs",
|
||||||
|
"nixpkgs-unstable": "nixpkgs-unstable"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
38
flake.nix
Normal file
38
flake.nix
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
{
|
||||||
|
inputs = {
|
||||||
|
nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05";
|
||||||
|
nixpkgs-unstable.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs = { nixpkgs, nixpkgs-unstable, ... }:
|
||||||
|
let
|
||||||
|
system = "x86_64-linux";
|
||||||
|
in
|
||||||
|
{
|
||||||
|
devShells.${system}.default =
|
||||||
|
let
|
||||||
|
pkgs = import nixpkgs {
|
||||||
|
inherit system;
|
||||||
|
};
|
||||||
|
unstable = import nixpkgs-unstable {
|
||||||
|
inherit system;
|
||||||
|
};
|
||||||
|
in
|
||||||
|
pkgs.mkShell {
|
||||||
|
packages = with pkgs; [
|
||||||
|
git
|
||||||
|
just
|
||||||
|
] ++ [
|
||||||
|
# Rust
|
||||||
|
gcc
|
||||||
|
# cargo
|
||||||
|
] ++ [
|
||||||
|
# Diesel
|
||||||
|
diesel-cli
|
||||||
|
unstable.libpq
|
||||||
|
];
|
||||||
|
|
||||||
|
shellHook = "fish";
|
||||||
|
};
|
||||||
|
};
|
||||||
|
}
|
29
justfile
Executable file
29
justfile
Executable file
@ -0,0 +1,29 @@
|
|||||||
|
#!/usr/bin/env just --justfile
|
||||||
|
|
||||||
|
# List all recipes
|
||||||
|
default:
|
||||||
|
@just --list
|
||||||
|
|
||||||
|
# Open a nix shell with all dependencies in path
|
||||||
|
develop:
|
||||||
|
nix develop
|
||||||
|
|
||||||
|
# Format all rust files
|
||||||
|
fmt:
|
||||||
|
cargo fmt --all
|
||||||
|
|
||||||
|
# Lint all files with clippy
|
||||||
|
lint:
|
||||||
|
cargo clippy --all-targets --all-features -- -D warnings
|
||||||
|
|
||||||
|
# Build a release version
|
||||||
|
release:
|
||||||
|
cargo build --release
|
||||||
|
|
||||||
|
# Run all tests
|
||||||
|
test:
|
||||||
|
cargo test --all-features --workspace
|
||||||
|
|
||||||
|
# Run coverage
|
||||||
|
coverage:
|
||||||
|
cargo llvm-cov
|
@ -1,23 +1,22 @@
|
|||||||
use {
|
use axum::Router;
|
||||||
axum::{
|
use axum::ServiceExt;
|
||||||
extract::Request, handler::Handler, response::IntoResponse, routing::Route, Router,
|
use axum::extract::Request;
|
||||||
ServiceExt,
|
use axum::handler::Handler;
|
||||||
},
|
use axum::response::IntoResponse;
|
||||||
std::{
|
use axum::routing::Route;
|
||||||
convert::Infallible,
|
use std::convert::Infallible;
|
||||||
io,
|
use std::io;
|
||||||
net::{IpAddr, Ipv4Addr, SocketAddr},
|
use std::net::IpAddr;
|
||||||
},
|
use std::net::Ipv4Addr;
|
||||||
tokio::net::TcpListener,
|
use std::net::SocketAddr;
|
||||||
tower::{layer::Layer, Service},
|
use tokio::net::TcpListener;
|
||||||
tower_http::{
|
use tower::{Service, layer::Layer};
|
||||||
cors::CorsLayer,
|
use tower_http::cors::CorsLayer;
|
||||||
normalize_path::NormalizePathLayer,
|
use tower_http::normalize_path::NormalizePathLayer;
|
||||||
trace,
|
use tower_http::trace;
|
||||||
trace::{HttpMakeClassifier, TraceLayer},
|
use tower_http::trace::HttpMakeClassifier;
|
||||||
},
|
use tower_http::trace::TraceLayer;
|
||||||
tracing::{info, Level},
|
use tracing::{Level, info};
|
||||||
};
|
|
||||||
|
|
||||||
// TODO trim trailing slash into macro > let _app = NormalizePathLayer::trim_trailing_slash().layer(create_app!(routes));
|
// TODO trim trailing slash into macro > let _app = NormalizePathLayer::trim_trailing_slash().layer(create_app!(routes));
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
@ -69,8 +68,8 @@ impl AppBuilder {
|
|||||||
/// Adds a layer to the previously added routes
|
/// Adds a layer to the previously added routes
|
||||||
pub fn layer<L>(mut self, layer: L) -> Self
|
pub fn layer<L>(mut self, layer: L) -> Self
|
||||||
where
|
where
|
||||||
L: Layer<Route> + Clone + Send + 'static,
|
L: Layer<Route> + Clone + Send + Sync + 'static,
|
||||||
L::Service: Service<Request> + Clone + Send + 'static,
|
L::Service: Service<Request> + Clone + Send + Sync + 'static,
|
||||||
<L::Service as Service<Request>>::Response: IntoResponse + 'static,
|
<L::Service as Service<Request>>::Response: IntoResponse + 'static,
|
||||||
<L::Service as Service<Request>>::Error: Into<Infallible> + 'static,
|
<L::Service as Service<Request>>::Error: Into<Infallible> + 'static,
|
||||||
<L::Service as Service<Request>>::Future: Send + 'static,
|
<L::Service as Service<Request>>::Future: Send + 'static,
|
||||||
@ -201,7 +200,7 @@ mod tests {
|
|||||||
AppBuilder::new()
|
AppBuilder::new()
|
||||||
.socket((Ipv4Addr::LOCALHOST, 8080))
|
.socket((Ipv4Addr::LOCALHOST, 8080))
|
||||||
.routes([Router::new()])
|
.routes([Router::new()])
|
||||||
.fallback(|| async { "Fallback" })
|
.fallback(async || "Fallback")
|
||||||
.cors(CorsLayer::new())
|
.cors(CorsLayer::new())
|
||||||
.normalize_path(true)
|
.normalize_path(true)
|
||||||
.tracing(TraceLayer::new_for_http())
|
.tracing(TraceLayer::new_for_http())
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use crate::axum::traits::BuildJson;
|
use crate::axum::traits::BuildJson;
|
||||||
use axum::body::Body;
|
use axum::body::Body;
|
||||||
use axum::http::header::CONTENT_TYPE;
|
|
||||||
use axum::http::Request;
|
use axum::http::Request;
|
||||||
|
use axum::http::header::CONTENT_TYPE;
|
||||||
use mime::APPLICATION_JSON;
|
use mime::APPLICATION_JSON;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
use axum::{
|
use axum::{
|
||||||
async_trait,
|
|
||||||
extract::{
|
extract::{
|
||||||
multipart::{Field, MultipartError, MultipartRejection},
|
|
||||||
FromRequest, Multipart, Request,
|
FromRequest, Multipart, Request,
|
||||||
|
multipart::{Field, MultipartError, MultipartRejection},
|
||||||
},
|
},
|
||||||
response::IntoResponse,
|
response::IntoResponse,
|
||||||
};
|
};
|
||||||
@ -110,7 +109,6 @@ impl IntoResponse for MultipartFileRejection {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl<S> FromRequest<S> for MultipartFile
|
impl<S> FromRequest<S> for MultipartFile
|
||||||
where
|
where
|
||||||
S: Send + Sync,
|
S: Send + Sync,
|
||||||
@ -142,7 +140,6 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl<S> FromRequest<S> for MultipartFiles
|
impl<S> FromRequest<S> for MultipartFiles
|
||||||
where
|
where
|
||||||
S: Send + Sync,
|
S: Send + Sync,
|
||||||
@ -178,7 +175,7 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_files<'a>(mut multipart: Multipart) -> Result<Vec<File>, MultipartFileRejection> {
|
async fn get_files(mut multipart: Multipart) -> Result<Vec<File>, MultipartFileRejection> {
|
||||||
let mut files = vec![];
|
let mut files = vec![];
|
||||||
while let Some(field) = multipart.next_field().await? {
|
while let Some(field) = multipart.next_field().await? {
|
||||||
files.push(File::from_field(field).await?);
|
files.push(File::from_field(field).await?);
|
||||||
|
@ -1,14 +1,13 @@
|
|||||||
use {
|
use {
|
||||||
crate::{serde::response::BaseResponse, serde::traits::DeserializeInto},
|
crate::{serde::response::BaseResponse, serde::traits::DeserializeInto},
|
||||||
async_trait::async_trait,
|
|
||||||
axum::{
|
axum::{
|
||||||
|
Json,
|
||||||
body::to_bytes,
|
body::to_bytes,
|
||||||
response::{IntoResponse, Response},
|
response::{IntoResponse, Response},
|
||||||
Json,
|
|
||||||
},
|
},
|
||||||
serde::{
|
serde::{
|
||||||
de::{DeserializeOwned, Error},
|
|
||||||
Serialize,
|
Serialize,
|
||||||
|
de::{DeserializeOwned, Error},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -18,12 +17,11 @@ impl<T: Serialize> IntoResponse for BaseResponse<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl DeserializeInto for Response {
|
impl DeserializeInto for Response {
|
||||||
async fn deserialize_into<T: DeserializeOwned>(self) -> Result<T, serde_json::Error> {
|
async fn deserialize_into<T: DeserializeOwned>(self) -> Result<T, serde_json::Error> {
|
||||||
let body = to_bytes(self.into_body(), usize::MAX).await.map_err(|e| {
|
let body = to_bytes(self.into_body(), usize::MAX)
|
||||||
serde_json::Error::custom(format!("Failed to read response body: {}", e))
|
.await
|
||||||
})?;
|
.map_err(|e| serde_json::Error::custom(format!("Failed to read response body: {e}")))?;
|
||||||
serde_json::from_slice(&body)
|
serde_json::from_slice(&body)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -13,8 +13,8 @@
|
|||||||
/// use lib::router;
|
/// use lib::router;
|
||||||
/// async fn simplify(path: axum::extract::path::Path<String>) {}
|
/// async fn simplify(path: axum::extract::path::Path<String>) {}
|
||||||
/// router!("/simplify", lib::routes!(
|
/// router!("/simplify", lib::routes!(
|
||||||
/// get "/:exp" => simplify,
|
/// get "/{exp}" => simplify,
|
||||||
/// get "/table/:exp" => || async {}
|
/// get "/table/{exp}" => async || {}
|
||||||
/// ));
|
/// ));
|
||||||
/// ```
|
/// ```
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
@ -92,8 +92,8 @@ macro_rules! join_routes {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use axum::extract::State;
|
|
||||||
use axum::Router;
|
use axum::Router;
|
||||||
|
use axum::extract::State;
|
||||||
|
|
||||||
async fn index() {}
|
async fn index() {}
|
||||||
|
|
||||||
|
@ -1,17 +1,16 @@
|
|||||||
use axum::async_trait;
|
|
||||||
use deadpool_diesel::Status;
|
use deadpool_diesel::Status;
|
||||||
use derive_more::From;
|
use derive_more::From;
|
||||||
use diesel_async::pooled_connection::deadpool::{Object, PoolError};
|
|
||||||
use diesel_async::AsyncPgConnection;
|
use diesel_async::AsyncPgConnection;
|
||||||
|
use diesel_async::pooled_connection::deadpool::{Object, PoolError};
|
||||||
use lib::diesel::pool::PgPool;
|
use lib::diesel::pool::PgPool;
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
pub trait GetConnection: Clone + Send + Sync {
|
pub trait GetConnection: Clone + Send + Sync {
|
||||||
async fn get(&self) -> Result<Object<AsyncPgConnection>, GetConnectionError>;
|
fn get(
|
||||||
|
&self,
|
||||||
|
) -> impl Future<Output = Result<Object<AsyncPgConnection>, GetConnectionError>> + Send;
|
||||||
fn status(&self) -> Status;
|
fn status(&self) -> Status;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl GetConnection for PgPool {
|
impl GetConnection for PgPool {
|
||||||
async fn get(&self) -> Result<Object<AsyncPgConnection>, GetConnectionError> {
|
async fn get(&self) -> Result<Object<AsyncPgConnection>, GetConnectionError> {
|
||||||
self.get().await.map_err(Into::into)
|
self.get().await.map_err(Into::into)
|
||||||
|
@ -1,9 +1,22 @@
|
|||||||
use diesel_async::AsyncPgConnection;
|
use diesel::pg::Pg;
|
||||||
use diesel_async_migrations::EmbeddedMigrations;
|
use diesel_async::AsyncConnection;
|
||||||
|
use diesel_async::async_connection_wrapper::AsyncConnectionWrapper;
|
||||||
|
use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
|
||||||
|
use tokio::task::JoinError;
|
||||||
|
|
||||||
pub async fn run_migrations(
|
/// Run Diesel migrations using an async connection.
|
||||||
migrations: &EmbeddedMigrations,
|
/// Only works with Postgres.
|
||||||
conn: &mut AsyncPgConnection,
|
pub async fn run_migrations<A>(
|
||||||
) -> Result<(), diesel::result::Error> {
|
async_connection: A,
|
||||||
migrations.run_pending_migrations(conn).await
|
migrations: EmbeddedMigrations,
|
||||||
|
) -> Result<(), JoinError>
|
||||||
|
where
|
||||||
|
A: AsyncConnection<Backend = Pg> + 'static,
|
||||||
|
{
|
||||||
|
let mut async_wrapper = AsyncConnectionWrapper::<A>::from(async_connection);
|
||||||
|
|
||||||
|
tokio::task::spawn_blocking(move || {
|
||||||
|
async_wrapper.run_pending_migrations(migrations).unwrap();
|
||||||
|
})
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use deadpool_diesel::postgres::BuildError;
|
use deadpool_diesel::postgres::BuildError;
|
||||||
use diesel_async::pooled_connection::deadpool::Pool;
|
|
||||||
use diesel_async::pooled_connection::AsyncDieselConnectionManager;
|
|
||||||
use diesel_async::AsyncPgConnection;
|
use diesel_async::AsyncPgConnection;
|
||||||
|
use diesel_async::pooled_connection::AsyncDieselConnectionManager;
|
||||||
|
use diesel_async::pooled_connection::deadpool::Pool;
|
||||||
|
|
||||||
/// A type alias for the asynchronous PostgreSQL connection pool.
|
/// A type alias for the asynchronous PostgreSQL connection pool.
|
||||||
pub type PgPool = Pool<AsyncPgConnection>;
|
pub type PgPool = Pool<AsyncPgConnection>;
|
||||||
|
@ -1,23 +1,22 @@
|
|||||||
use {
|
use nom::IResult;
|
||||||
nom::{
|
use nom::bytes::complete::take_while_m_n;
|
||||||
bytes::complete::take_while_m_n,
|
use nom::character::complete::char;
|
||||||
character::complete::{char, multispace0},
|
use nom::character::complete::multispace0;
|
||||||
combinator::eof,
|
use nom::combinator::eof;
|
||||||
sequence::{delimited, terminated},
|
use nom::error::ParseError;
|
||||||
IResult, InputIter, InputLength, InputTake, Slice,
|
use nom::sequence::delimited;
|
||||||
},
|
use nom::sequence::terminated;
|
||||||
std::ops::RangeFrom,
|
use nom::{Input, Parser};
|
||||||
};
|
|
||||||
|
|
||||||
// TODO generic input
|
|
||||||
|
|
||||||
/// Trim leading and trailing whitespace from the input Parser
|
/// Trim leading and trailing whitespace from the input Parser
|
||||||
/// - Parameters
|
/// - Parameters
|
||||||
/// - `inner`: The parser to trim
|
/// - `inner`: The parser to trim
|
||||||
/// - Returns: A parser that trims leading and trailing whitespace from the input and then runs the value from the inner parser
|
/// - Returns: A parser that trims leading and trailing whitespace from the input and then runs the value from the inner parser
|
||||||
pub fn trim<'a, Parser, R>(inner: Parser) -> impl FnMut(&'a str) -> IResult<&'a str, R>
|
pub fn trim<I, O, F, E: ParseError<I>>(inner: F) -> impl Parser<I, Output = O, Error = E>
|
||||||
where
|
where
|
||||||
Parser: FnMut(&'a str) -> IResult<&'a str, R>,
|
I: Input,
|
||||||
|
F: Parser<I, Output = O, Error = E>,
|
||||||
|
<I as Input>::Item: nom::AsChar,
|
||||||
{
|
{
|
||||||
delimited(multispace0, inner, multispace0)
|
delimited(multispace0, inner, multispace0)
|
||||||
}
|
}
|
||||||
@ -27,9 +26,11 @@ where
|
|||||||
/// - Parameters
|
/// - Parameters
|
||||||
/// - `inner`: The parser to run inside the parentheses
|
/// - `inner`: The parser to run inside the parentheses
|
||||||
/// - Returns: A parser that parses a parenthesized expression
|
/// - Returns: A parser that parses a parenthesized expression
|
||||||
pub fn parenthesized<'a, Parser, R>(inner: Parser) -> impl FnMut(&'a str) -> IResult<&'a str, R>
|
pub fn parenthesized<I, O, F, E: ParseError<I>>(inner: F) -> impl Parser<I, Output = O, Error = E>
|
||||||
where
|
where
|
||||||
Parser: FnMut(&'a str) -> IResult<&'a str, R>,
|
I: Input,
|
||||||
|
F: Parser<I, Output = O, Error = E>,
|
||||||
|
<I as Input>::Item: nom::AsChar,
|
||||||
{
|
{
|
||||||
delimited(char('('), inner, char(')'))
|
delimited(char('('), inner, char(')'))
|
||||||
}
|
}
|
||||||
@ -39,10 +40,10 @@ where
|
|||||||
/// - `n`: The length of the string to take
|
/// - `n`: The length of the string to take
|
||||||
/// - `predicate`: The predicate to call to validate the input
|
/// - `predicate`: The predicate to call to validate the input
|
||||||
/// - Returns: A parser that takes `n` characters from the input
|
/// - Returns: A parser that takes `n` characters from the input
|
||||||
pub fn take_where<F, Input>(n: usize, predicate: F) -> impl Fn(Input) -> IResult<Input, Input>
|
pub fn take_where<F, I>(n: usize, predicate: F) -> impl FnMut(I) -> IResult<I, I>
|
||||||
where
|
where
|
||||||
Input: InputTake + InputIter + InputLength + Slice<RangeFrom<usize>>,
|
I: Input,
|
||||||
F: Fn(<Input as InputIter>::Item) -> bool + Copy,
|
F: Fn(<I as Input>::Item) -> bool,
|
||||||
{
|
{
|
||||||
take_while_m_n(n, n, predicate)
|
take_while_m_n(n, n, predicate)
|
||||||
}
|
}
|
||||||
@ -54,40 +55,43 @@ where
|
|||||||
/// - Returns: A parser that runs the inner parser and then the end of the input
|
/// - Returns: A parser that runs the inner parser and then the end of the input
|
||||||
/// # Example
|
/// # Example
|
||||||
/// ```
|
/// ```
|
||||||
/// use nom::bytes::complete::{tag};
|
|
||||||
/// use lib::nom::combinators::exhausted;
|
/// use lib::nom::combinators::exhausted;
|
||||||
|
/// use nom::bytes::complete::{tag};
|
||||||
|
/// use nom::Parser;
|
||||||
///
|
///
|
||||||
/// let input = "test";
|
/// let input = "test";
|
||||||
/// let (remaining, result) = exhausted(tag("test"))(input).unwrap();
|
/// let (remaining, result) = exhausted(tag::<&str, &str, nom::error::Error<&str>>("test")).parse(input).unwrap();
|
||||||
/// assert_eq!(remaining, "");
|
/// assert_eq!(remaining, "");
|
||||||
/// assert_eq!(result, "test");
|
/// assert_eq!(result, "test");
|
||||||
/// ```
|
/// ```
|
||||||
/// - Fails if the input is not exhausted
|
/// - Fails if the input is not exhausted
|
||||||
/// ```
|
/// ```
|
||||||
/// use nom::bytes::complete::{tag};
|
|
||||||
/// use lib::nom::combinators::exhausted;
|
/// use lib::nom::combinators::exhausted;
|
||||||
|
/// use nom::bytes::complete::{tag};
|
||||||
|
/// use nom::Parser;
|
||||||
///
|
///
|
||||||
/// let input = "test";
|
/// let input = "test";
|
||||||
/// assert!(exhausted(tag("tes"))(input).is_err());
|
/// assert!(exhausted(tag::<&str, &str, nom::error::Error<&str>>("tes")).parse(input).is_err());
|
||||||
/// ```
|
/// ```
|
||||||
pub fn exhausted<'a, Parser, R>(inner: Parser) -> impl FnMut(&'a str) -> IResult<&'a str, R>
|
pub fn exhausted<F, I, O, E: ParseError<I>>(inner: F) -> impl Parser<I, Output = O, Error = E>
|
||||||
where
|
where
|
||||||
Parser: FnMut(&'a str) -> IResult<&'a str, R>,
|
I: Input,
|
||||||
|
F: Parser<I, Output = O, Error = E>,
|
||||||
{
|
{
|
||||||
terminated(inner, eof)
|
terminated(inner, eof)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use nom::{bytes::complete::take_while, sequence::tuple};
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use nom::bytes::complete::take_while;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_trim_both_sides() {
|
fn test_trim_both_sides() {
|
||||||
let input = " test ";
|
let input = " test ";
|
||||||
let (remaining, result) =
|
let (remaining, result) = trim(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||||
trim(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
|
.parse(input)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(remaining, "");
|
assert_eq!(remaining, "");
|
||||||
assert_eq!(result, "test");
|
assert_eq!(result, "test");
|
||||||
}
|
}
|
||||||
@ -95,8 +99,9 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_trim_leading() {
|
fn test_trim_leading() {
|
||||||
let input = " test";
|
let input = " test";
|
||||||
let (remaining, result) =
|
let (remaining, result) = trim(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||||
trim(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
|
.parse(input)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(remaining, "");
|
assert_eq!(remaining, "");
|
||||||
assert_eq!(result, "test");
|
assert_eq!(result, "test");
|
||||||
}
|
}
|
||||||
@ -104,8 +109,9 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_trim_trailing() {
|
fn test_trim_trailing() {
|
||||||
let input = "test ";
|
let input = "test ";
|
||||||
let (remaining, result) =
|
let (remaining, result) = trim(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||||
trim(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
|
.parse(input)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(remaining, "");
|
assert_eq!(remaining, "");
|
||||||
assert_eq!(result, "test");
|
assert_eq!(result, "test");
|
||||||
}
|
}
|
||||||
@ -113,8 +119,9 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_trim_no_trim() {
|
fn test_trim_no_trim() {
|
||||||
let input = "test";
|
let input = "test";
|
||||||
let (remaining, result) =
|
let (remaining, result) = trim(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||||
trim(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
|
.parse(input)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(remaining, "");
|
assert_eq!(remaining, "");
|
||||||
assert_eq!(result, "test");
|
assert_eq!(result, "test");
|
||||||
}
|
}
|
||||||
@ -122,8 +129,9 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_parenthesized() {
|
fn test_parenthesized() {
|
||||||
let input = "(test)";
|
let input = "(test)";
|
||||||
let (remaining, result) =
|
let (remaining, result) = parenthesized(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||||
parenthesized(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
|
.parse(input)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(remaining, "");
|
assert_eq!(remaining, "");
|
||||||
assert_eq!(result, "test");
|
assert_eq!(result, "test");
|
||||||
}
|
}
|
||||||
@ -131,7 +139,11 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_parenthesized_parse_until_end() {
|
fn test_parenthesized_parse_until_end() {
|
||||||
let input = "(test)";
|
let input = "(test)";
|
||||||
assert!(parenthesized(take_while(|_| true))(input).is_err());
|
assert!(
|
||||||
|
parenthesized::<&str, &str, _, nom::error::Error<&str>>(take_while(|_| true))
|
||||||
|
.parse(input)
|
||||||
|
.is_err()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -152,7 +164,7 @@ mod tests {
|
|||||||
fn test_take_where_too_much() {
|
fn test_take_where_too_much() {
|
||||||
let input = "testing";
|
let input = "testing";
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
take_where(4, |c: char| c.is_ascii_alphabetic())(input),
|
take_where(4, |c: char| c.is_ascii_alphabetic()).parse(input),
|
||||||
Ok(("ing", "test"))
|
Ok(("ing", "test"))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -160,14 +172,19 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_take_where_predicate_false() {
|
fn test_take_where_predicate_false() {
|
||||||
let input = "test";
|
let input = "test";
|
||||||
assert!(take_where(4, |c: char| c.is_ascii_digit())(input).is_err());
|
assert!(
|
||||||
|
take_where(4, |c: char| c.is_ascii_digit())
|
||||||
|
.parse(input)
|
||||||
|
.is_err()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_exhausted() {
|
fn test_exhausted() {
|
||||||
let input = "test";
|
let input = "test";
|
||||||
let (remaining, result) =
|
let (remaining, result) = exhausted(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||||
exhausted(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
|
.parse(input)
|
||||||
|
.unwrap();
|
||||||
assert_eq!(remaining, "");
|
assert_eq!(remaining, "");
|
||||||
assert_eq!(result, "test");
|
assert_eq!(result, "test");
|
||||||
}
|
}
|
||||||
@ -175,16 +192,21 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_exhausted_not_exhausted() {
|
fn test_exhausted_not_exhausted() {
|
||||||
let input = "test ";
|
let input = "test ";
|
||||||
assert!(exhausted(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).is_err());
|
assert!(
|
||||||
|
exhausted(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||||
|
.parse(input)
|
||||||
|
.is_err()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_exhausted_tuple() {
|
fn test_exhausted_tuple() {
|
||||||
let input = "test";
|
let input = "test";
|
||||||
let (remaining, result) = exhausted(tuple((
|
let (remaining, result) = exhausted((
|
||||||
take_where(3, |c: char| c.is_ascii_alphabetic()),
|
take_where(3, |c: char| c.is_ascii_alphabetic()),
|
||||||
take_while(|c: char| c.is_ascii_alphabetic()),
|
take_while(|c: char| c.is_ascii_alphabetic()),
|
||||||
)))(input)
|
))
|
||||||
|
.parse(input)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert_eq!(remaining, "");
|
assert_eq!(remaining, "");
|
||||||
assert_eq!(result, ("tes", "t"));
|
assert_eq!(result, ("tes", "t"));
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use {
|
use {
|
||||||
crate::traits::IntoResult,
|
crate::traits::IntoResult,
|
||||||
nom::{error::Error, IResult},
|
nom::{IResult, error::Error},
|
||||||
};
|
};
|
||||||
|
|
||||||
impl<T, R> IntoResult<T> for IResult<R, T> {
|
impl<T, R> IntoResult<T> for IResult<R, T> {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use async_trait::async_trait;
|
|
||||||
use serde::de::DeserializeOwned;
|
use serde::de::DeserializeOwned;
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
pub trait DeserializeInto {
|
pub trait DeserializeInto {
|
||||||
async fn deserialize_into<T: DeserializeOwned>(self) -> Result<T, serde_json::Error>;
|
fn deserialize_into<T: DeserializeOwned>(
|
||||||
|
self,
|
||||||
|
) -> impl Future<Output = Result<T, serde_json::Error>>;
|
||||||
}
|
}
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
|
use crate::diesel::DieselError;
|
||||||
use crate::diesel::get_connection::{GetConnection, GetConnectionError};
|
use crate::diesel::get_connection::{GetConnection, GetConnectionError};
|
||||||
use crate::diesel::pool::PgPool;
|
use crate::diesel::pool::PgPool;
|
||||||
use crate::diesel::DieselError;
|
|
||||||
use axum::async_trait;
|
|
||||||
use deadpool_diesel::postgres::BuildError;
|
|
||||||
use deadpool_diesel::Status;
|
use deadpool_diesel::Status;
|
||||||
|
use deadpool_diesel::postgres::BuildError;
|
||||||
use derive_more::From;
|
use derive_more::From;
|
||||||
use diesel_async::pooled_connection::deadpool::Object;
|
use diesel_async::pooled_connection::deadpool::Object;
|
||||||
use diesel_async::{AsyncConnection, AsyncPgConnection};
|
use diesel_async::{AsyncConnection, AsyncPgConnection};
|
||||||
@ -32,7 +31,6 @@ pub async fn create_test_pool_url_with_size(
|
|||||||
Ok(PoolStub(pool))
|
Ok(PoolStub(pool))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[async_trait]
|
|
||||||
impl GetConnection for PoolStub {
|
impl GetConnection for PoolStub {
|
||||||
async fn get(&self) -> Result<Object<AsyncPgConnection>, GetConnectionError> {
|
async fn get(&self) -> Result<Object<AsyncPgConnection>, GetConnectionError> {
|
||||||
let mut conn = self.0.get().await?;
|
let mut conn = self.0.get().await?;
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::diesel::pool::{create_pool_from_url, PgPool};
|
use crate::diesel::pool::{PgPool, create_pool_from_url};
|
||||||
use deadpool_diesel::postgres::BuildError;
|
use deadpool_diesel::postgres::BuildError;
|
||||||
use derive_more::{Constructor, From};
|
use derive_more::{Constructor, From};
|
||||||
use diesel_async::pooled_connection::deadpool::PoolError;
|
use diesel_async::pooled_connection::deadpool::PoolError;
|
||||||
@ -6,6 +6,7 @@ use lib::diesel::DieselError;
|
|||||||
use testcontainers_modules::postgres::Postgres;
|
use testcontainers_modules::postgres::Postgres;
|
||||||
use testcontainers_modules::testcontainers::runners::AsyncRunner;
|
use testcontainers_modules::testcontainers::runners::AsyncRunner;
|
||||||
use testcontainers_modules::testcontainers::{ContainerAsync, TestcontainersError};
|
use testcontainers_modules::testcontainers::{ContainerAsync, TestcontainersError};
|
||||||
|
use tokio::task::JoinError;
|
||||||
|
|
||||||
/// When the TestContainer is dropped, the container will be removed.
|
/// When the TestContainer is dropped, the container will be removed.
|
||||||
/// # Errors
|
/// # Errors
|
||||||
@ -16,11 +17,15 @@ pub struct TestContainer {
|
|||||||
pub pool: PgPool,
|
pub pool: PgPool,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn create_test_containers_pool<'a>() -> Result<TestContainer, ContainerError> {
|
const TEST_CONTAINERS_INTERNAL_PORT: u16 = 5432;
|
||||||
|
|
||||||
|
pub async fn create_test_containers_pool() -> Result<TestContainer, ContainerError> {
|
||||||
let container = create_postgres_container().await?;
|
let container = create_postgres_container().await?;
|
||||||
let connection_string = format!(
|
let connection_string = format!(
|
||||||
"postgres://postgres:postgres@127.0.0.1:{}/postgres",
|
"postgres://postgres:postgres@127.0.0.1:{}/postgres",
|
||||||
container.get_host_port_ipv4(5432).await?
|
container
|
||||||
|
.get_host_port_ipv4(TEST_CONTAINERS_INTERNAL_PORT)
|
||||||
|
.await?
|
||||||
);
|
);
|
||||||
let pool = create_pool_from_url(connection_string)?;
|
let pool = create_pool_from_url(connection_string)?;
|
||||||
Ok(TestContainer::new(container, pool))
|
Ok(TestContainer::new(container, pool))
|
||||||
@ -36,4 +41,5 @@ pub enum ContainerError {
|
|||||||
BuildError(BuildError),
|
BuildError(BuildError),
|
||||||
PoolError(PoolError),
|
PoolError(PoolError),
|
||||||
DieselError(DieselError),
|
DieselError(DieselError),
|
||||||
|
JoinError(JoinError),
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user