Compare commits
25 Commits
Author | SHA1 | Date | |
---|---|---|---|
d84078d6c3
|
|||
f14543af6b
|
|||
d260b2d676
|
|||
35ccc235c8
|
|||
7a46101b42
|
|||
5a77407297
|
|||
2f1eb4df3a
|
|||
80f4af9087 | |||
7e2df67fee | |||
8fb89e0459 | |||
ce770e9c6f | |||
cd99466266 | |||
93fe1e0cda | |||
eeab1257e3 | |||
762330189d | |||
17c81f4da1 | |||
3318aacf7c | |||
00e894140f | |||
ae775f4e9e | |||
2c8577a11d | |||
7eb675c210 | |||
10d8535b27 | |||
cd558eebfa | |||
347314460a | |||
ee487540ac |
@ -12,7 +12,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
|
||||
- name: Build
|
||||
run: cargo build --verbose
|
||||
|
||||
- name: Run tests
|
||||
run: cargo test --verbose --all-features --workspace
|
4
.idea/lib.iml
generated
4
.idea/lib.iml
generated
@ -9,6 +9,10 @@
|
||||
<sourceFolder url="file://$MODULE_DIR$/crates/read_files/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/crates/read_files/tests" isTestSource="true" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/crates/read_files/tests" isTestSource="true" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/crates/diesel_crud_derive/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/crates/diesel_crud_trait/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/crates/tests/src" isTestSource="false" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/crates/tests/tests" isTestSource="true" />
|
||||
<sourceFolder url="file://$MODULE_DIR$/tests" isTestSource="true" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/target" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/examples/multipart_file/target" />
|
||||
|
1
.idea/runConfigurations/All_Tests.xml
generated
1
.idea/runConfigurations/All_Tests.xml
generated
@ -1,5 +1,6 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="All Tests" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
|
||||
<option name="buildProfile" value="Test" />
|
||||
<option name="command" value="test --workspace" />
|
||||
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
|
||||
<envs />
|
||||
|
1911
Cargo.lock
generated
1911
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
62
Cargo.toml
62
Cargo.toml
@ -1,15 +1,16 @@
|
||||
[workspace]
|
||||
members = ["crates/*"]
|
||||
exclude = ["examples"]
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
rust-version = "1.79.0"
|
||||
edition = "2024"
|
||||
rust-version = "1.85"
|
||||
authors = ["Martin Berg Alstad"]
|
||||
homepage = "emberal.github.io"
|
||||
homepage = "martials.no"
|
||||
|
||||
[package]
|
||||
name = "lib"
|
||||
version = "1.4.1-hotfix-hotfix-2"
|
||||
version = "2.0.0"
|
||||
description = "A library with utilities and helper fuctions."
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
@ -20,34 +21,69 @@ homepage = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
# Api
|
||||
axum = { version = "0.7", optional = true, features = ["multipart"] }
|
||||
tower = { version = "0.4", optional = true }
|
||||
tower-http = { version = "0.5", optional = true, features = ["trace", "cors", "normalize-path"] }
|
||||
axum = { version = "0.8", optional = true, features = ["multipart"] }
|
||||
tower = { version = "0.5", optional = true }
|
||||
tower-http = { version = "0.6", optional = true, features = ["trace", "cors", "normalize-path"] }
|
||||
mime = { version = "0.3", optional = true }
|
||||
# Async
|
||||
tokio = { version = "1.38", optional = true, features = ["fs"] }
|
||||
tokio = { workspace = true, optional = true, features = ["fs", "rt-multi-thread"] }
|
||||
tokio-util = { version = "0.7", optional = true, features = ["io"] }
|
||||
# Database
|
||||
diesel = { workspace = true, optional = true, features = ["postgres"] }
|
||||
diesel-async = { workspace = true, optional = true, features = ["postgres", "deadpool", "async-connection-wrapper"] }
|
||||
diesel-crud-derive = { path = "crates/diesel_crud_derive", optional = true }
|
||||
diesel-crud-trait = { path = "crates/diesel_crud_trait", optional = true }
|
||||
diesel_migrations = { workspace = true, optional = true }
|
||||
deadpool-diesel = { workspace = true, optional = true, features = ["postgres"] }
|
||||
# Error handling
|
||||
thiserror = { version = "1.0", optional = true }
|
||||
thiserror = { workspace = true, optional = true }
|
||||
# Logging
|
||||
tracing = { version = "0.1", optional = true }
|
||||
tracing-subscriber = { version = "0.3", optional = true }
|
||||
# Parsing
|
||||
nom = { version = "7.1", optional = true }
|
||||
nom = { version = "8.0", optional = true }
|
||||
# Procedural macros
|
||||
into-response-derive = { path = "crates/into_response_derive", optional = true }
|
||||
read-files = { path = "crates/read_files", optional = true }
|
||||
# Serialization / Deserialization
|
||||
serde = { version = "1.0", optional = true, features = ["derive"] }
|
||||
serde_json = { version = "1.0", optional = true }
|
||||
# Test
|
||||
testcontainers-modules = { workspace = true, features = ["postgres"], optional = true }
|
||||
# Time
|
||||
chrono = { version = "0.4", optional = true, features = ["serde"] }
|
||||
# Utils
|
||||
derive_more = { workspace = true, features = ["from", "constructor"] }
|
||||
|
||||
[workspace.dependencies]
|
||||
# Async
|
||||
tokio = "1.40"
|
||||
# Database
|
||||
diesel = "2.2"
|
||||
diesel-async = "0.5"
|
||||
diesel_migrations = "2.2"
|
||||
deadpool-diesel = "0.6"
|
||||
# Error handling
|
||||
thiserror = "2.0"
|
||||
# Procedural macros
|
||||
syn = "2.0"
|
||||
quote = "1.0"
|
||||
deluxe = "0.5"
|
||||
proc-macro2 = "1.0"
|
||||
# Test
|
||||
testcontainers-modules = "0.11"
|
||||
# Utils
|
||||
derive_more = "2.0"
|
||||
regex = "1.11"
|
||||
|
||||
[features]
|
||||
axum = ["dep:axum", "dep:tower", "dep:tower-http", "dep:thiserror", "dep:tracing", "dep:tracing-subscriber", "dep:tokio"]
|
||||
axum = ["dep:axum", "dep:tower", "dep:tower-http", "dep:thiserror", "dep:tracing", "dep:tracing-subscriber", "dep:tokio", "dep:mime"]
|
||||
diesel = ["dep:diesel-crud-trait", "dep:diesel", "dep:diesel-async", "dep:deadpool-diesel", "dep:diesel_migrations"]
|
||||
io = ["dep:tokio", "dep:tokio-util"]
|
||||
iter = []
|
||||
nom = ["dep:nom"]
|
||||
serde = ["dep:serde"]
|
||||
derive = ["dep:into-response-derive", "axum", "serde"]
|
||||
serde = ["dep:serde", "dep:serde_json"]
|
||||
derive = ["dep:into-response-derive", "dep:diesel-crud-derive"]
|
||||
read-files = ["dep:read-files"]
|
||||
time = ["dep:chrono"]
|
||||
test = ["dep:testcontainers-modules"]
|
||||
|
@ -1,3 +1,3 @@
|
||||
# Lib
|
||||
# Rust Lib 🦀
|
||||
|
||||
-_-
|
||||
Rust library for common code, traits and utility functions
|
||||
|
14
crates/diesel_crud_derive/Cargo.toml
Normal file
14
crates/diesel_crud_derive/Cargo.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "diesel-crud-derive"
|
||||
version = "0.1.0"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
syn = { workspace = true }
|
||||
quote = { workspace = true }
|
||||
deluxe = { workspace = true }
|
||||
proc-macro2 = { workspace = true }
|
73
crates/diesel_crud_derive/src/attributes.rs
Normal file
73
crates/diesel_crud_derive/src/attributes.rs
Normal file
@ -0,0 +1,73 @@
|
||||
use crate::common::PrimaryKey;
|
||||
use deluxe::{extract_attributes, ExtractAttributes};
|
||||
use proc_macro2::Ident;
|
||||
use quote::quote;
|
||||
use std::collections::HashMap;
|
||||
use syn::spanned::Spanned;
|
||||
use syn::{Data, DeriveInput, Expr, Path, Type};
|
||||
|
||||
#[derive(ExtractAttributes)]
|
||||
#[deluxe(attributes(diesel))]
|
||||
pub(crate) struct DieselStructAttributes {
|
||||
table_name: Option<Expr>,
|
||||
#[deluxe(rest)]
|
||||
_rest: HashMap<Path, Expr>,
|
||||
}
|
||||
|
||||
#[derive(ExtractAttributes)]
|
||||
#[deluxe(attributes(diesel_crud))]
|
||||
pub(crate) struct StructAttributes {
|
||||
table: Option<Expr>,
|
||||
#[deluxe(default)]
|
||||
insert: Option<Type>,
|
||||
#[deluxe(default)]
|
||||
update: Option<Type>,
|
||||
}
|
||||
|
||||
#[derive(ExtractAttributes)]
|
||||
#[deluxe(attributes(diesel_crud))]
|
||||
pub(crate) struct FieldAttributes(#[allow(unused)] Expr);
|
||||
|
||||
pub(crate) struct Attributes {
|
||||
pub struct_ident: Ident,
|
||||
pub table: Expr,
|
||||
pub insert: Type,
|
||||
pub update: Type,
|
||||
pub pk: Option<PrimaryKey>,
|
||||
}
|
||||
|
||||
pub(crate) fn extract_attrs(ast: &mut DeriveInput) -> deluxe::Result<Attributes> {
|
||||
let struct_attributes: StructAttributes = extract_attributes(ast)?;
|
||||
let diesel_attributes: DieselStructAttributes = extract_attributes(ast)?;
|
||||
Ok(Attributes {
|
||||
struct_ident: ast.ident.clone(),
|
||||
table: diesel_attributes.table_name.unwrap_or_else(|| {
|
||||
struct_attributes
|
||||
.table
|
||||
.expect("Table name should be provided on either diesel or diesel_crud attribute")
|
||||
}),
|
||||
insert: struct_attributes
|
||||
.insert
|
||||
.unwrap_or_else(|| Type::Verbatim(quote! { Self })),
|
||||
update: struct_attributes
|
||||
.update
|
||||
.unwrap_or_else(|| Type::Verbatim(quote! { Self })),
|
||||
pk: extract_field_attrs(ast).ok(),
|
||||
})
|
||||
}
|
||||
|
||||
fn extract_field_attrs(ast: &mut DeriveInput) -> deluxe::Result<PrimaryKey> {
|
||||
if let Data::Struct(data_struct) = &mut ast.data {
|
||||
for field in data_struct.fields.iter_mut() {
|
||||
if let Ok(FieldAttributes(_)) = extract_attributes(field) {
|
||||
return Ok(PrimaryKey {
|
||||
ident: field.ident.clone().unwrap(),
|
||||
ty: field.ty.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return Err(deluxe::Error::new(ast.span(), "Expected a struct"));
|
||||
};
|
||||
Err(deluxe::Error::new(ast.span(), "Primary key not found"))
|
||||
}
|
14
crates/diesel_crud_derive/src/common.rs
Normal file
14
crates/diesel_crud_derive/src/common.rs
Normal file
@ -0,0 +1,14 @@
|
||||
use proc_macro2::Ident;
|
||||
use quote::quote;
|
||||
use syn::Type;
|
||||
|
||||
pub(crate) struct PrimaryKey {
|
||||
pub ident: Ident,
|
||||
pub ty: Type,
|
||||
}
|
||||
|
||||
pub(crate) fn return_type(output: proc_macro2::TokenStream) -> proc_macro2::TokenStream {
|
||||
quote! {
|
||||
Result<#output, lib::diesel_crud_trait::CrudError>
|
||||
}
|
||||
}
|
40
crates/diesel_crud_derive/src/create.rs
Normal file
40
crates/diesel_crud_derive/src/create.rs
Normal file
@ -0,0 +1,40 @@
|
||||
use crate::{Attributes, common};
|
||||
use quote::quote;
|
||||
|
||||
pub(crate) fn derive_diesel_crud_create_impl(
|
||||
Attributes {
|
||||
struct_ident,
|
||||
table,
|
||||
insert,
|
||||
..
|
||||
}: &Attributes,
|
||||
) -> proc_macro2::TokenStream {
|
||||
let return_type = common::return_type(quote! { Self });
|
||||
let many_return_type = common::return_type(quote! { Vec<Self> });
|
||||
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl lib::diesel_crud_trait::DieselCrudCreate<#table::table> for #struct_ident {
|
||||
type Insert = #insert;
|
||||
async fn insert(insert: Self::Insert, conn: &mut diesel_async::AsyncPgConnection) -> #return_type {
|
||||
use diesel::associations::HasTable;
|
||||
diesel_async::RunQueryDsl::get_result(
|
||||
diesel::dsl::insert_into(#table::table::table()).values(insert),
|
||||
conn
|
||||
)
|
||||
.await
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
async fn insert_many(insert: &[Self::Insert], conn: &mut diesel_async::AsyncPgConnection) -> #many_return_type {
|
||||
use diesel::associations::HasTable;
|
||||
diesel_async::RunQueryDsl::get_results(
|
||||
diesel::dsl::insert_into(#table::table::table()).values(insert),
|
||||
conn
|
||||
)
|
||||
.await
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
40
crates/diesel_crud_derive/src/delete.rs
Normal file
40
crates/diesel_crud_derive/src/delete.rs
Normal file
@ -0,0 +1,40 @@
|
||||
use crate::{Attributes, PrimaryKey, common};
|
||||
use quote::quote;
|
||||
|
||||
pub(crate) fn derive_diesel_crud_delete_impl(
|
||||
Attributes {
|
||||
struct_ident,
|
||||
table,
|
||||
pk,
|
||||
..
|
||||
}: &Attributes,
|
||||
) -> proc_macro2::TokenStream {
|
||||
if pk.is_none() {
|
||||
panic!("Please specify a primary key using #[diesel_crud(pk)]");
|
||||
}
|
||||
let PrimaryKey {
|
||||
ident: pk_ident,
|
||||
ty: pk_type,
|
||||
} = pk.as_ref().unwrap();
|
||||
let return_type = common::return_type(quote! { Self });
|
||||
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl lib::diesel_crud_trait::DieselCrudDelete for #struct_ident {
|
||||
type PK = #pk_type;
|
||||
async fn delete(pk: Self::PK, conn: &mut diesel_async::AsyncPgConnection) -> #return_type {
|
||||
use diesel::QueryDsl;
|
||||
use diesel::associations::HasTable;
|
||||
diesel_async::RunQueryDsl::get_result(
|
||||
diesel::delete(
|
||||
#table::table
|
||||
.filter(diesel::expression_methods::ExpressionMethods::eq(#table::#pk_ident, pk))
|
||||
),
|
||||
conn,
|
||||
)
|
||||
.await
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
165
crates/diesel_crud_derive/src/lib.rs
Normal file
165
crates/diesel_crud_derive/src/lib.rs
Normal file
@ -0,0 +1,165 @@
|
||||
extern crate proc_macro;
|
||||
|
||||
use crate::attributes::{extract_attrs, Attributes};
|
||||
use crate::common::PrimaryKey;
|
||||
use crate::create::derive_diesel_crud_create_impl;
|
||||
use crate::delete::derive_diesel_crud_delete_impl;
|
||||
use crate::list::derive_diesel_crud_list_impl;
|
||||
use crate::read::derive_diesel_crud_read_impl;
|
||||
use crate::update::derive_diesel_crud_update_impl;
|
||||
use quote::quote;
|
||||
use syn::{parse_macro_input, DeriveInput};
|
||||
|
||||
mod attributes;
|
||||
mod common;
|
||||
mod create;
|
||||
mod delete;
|
||||
mod list;
|
||||
mod read;
|
||||
mod update;
|
||||
|
||||
/// Derives 5 functions for CRUD operations
|
||||
/// 1. create
|
||||
/// 2. read
|
||||
/// 3. update
|
||||
/// 4. delete
|
||||
/// 5. list
|
||||
#[proc_macro_derive(DieselCrud, attributes(diesel_crud))]
|
||||
pub fn derive_diesel_crud(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let mut item = parse_macro_input!(item as DeriveInput);
|
||||
let attrs = extract_attrs(&mut item).unwrap();
|
||||
|
||||
let create = derive_diesel_crud_create_impl(&attrs);
|
||||
let read = derive_diesel_crud_read_impl(&attrs);
|
||||
let update = derive_diesel_crud_update_impl(&attrs);
|
||||
let delete = derive_diesel_crud_delete_impl(&attrs);
|
||||
let list = derive_diesel_crud_list_impl(&attrs);
|
||||
|
||||
let Attributes {
|
||||
table,
|
||||
struct_ident,
|
||||
..
|
||||
} = attrs;
|
||||
let expanded = quote! {
|
||||
#create
|
||||
#read
|
||||
#update
|
||||
#delete
|
||||
#list
|
||||
|
||||
impl lib::diesel_crud_trait::DieselCrud<#table::table> for #struct_ident {}
|
||||
};
|
||||
expanded.into()
|
||||
}
|
||||
|
||||
/// Derives the create function for CRUD operations.
|
||||
/// Must be used on a struct.
|
||||
/// # Struct Attributes
|
||||
/// - table: Expr - The schema struct for the table (can be provided on either diesel or diesel_crud attribute)
|
||||
/// - insert: Type - The insertable model (Optional, defaults to `Self`)
|
||||
/// # Example
|
||||
/// ```ignore
|
||||
/// #[derive(Queryable, diesel_crud_derive::DieselCrudCreate)]
|
||||
/// #[diesel_crud(create = crate::models::InsertUser)]
|
||||
/// #[diesel(table_name = crate::schema::user)]
|
||||
/// struct User {
|
||||
/// #[diesel_crud(pk)]
|
||||
/// email: String,
|
||||
/// password: String,
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_derive(DieselCrudCreate, attributes(diesel_crud))]
|
||||
pub fn derive_diesel_crud_create(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let mut item = syn::parse_macro_input!(item as DeriveInput);
|
||||
let attrs = extract_attrs(&mut item).unwrap();
|
||||
derive_diesel_crud_create_impl(&attrs).into()
|
||||
}
|
||||
|
||||
/// Derives the read function for CRUD operations.
|
||||
/// Must be used on a struct with one field marked as the primary key.
|
||||
/// # Struct Attributes
|
||||
/// - table: Expr - The schema struct for the table (can be provided on either diesel or diesel_crud attribute)
|
||||
/// # Field Attributes
|
||||
/// - pk: Ident - The primary key field (Only one field should be marked as the primary key)
|
||||
/// # Example
|
||||
/// ```ignore
|
||||
/// #[derive(Queryable, diesel_crud_derive::DieselCrudRead)]
|
||||
/// #[diesel(table_name = crate::schema::user)]
|
||||
/// struct User {
|
||||
/// #[diesel_crud(pk)]
|
||||
/// email: String,
|
||||
/// password: String,
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_derive(DieselCrudRead, attributes(diesel_crud))]
|
||||
pub fn derive_diesel_crud_read(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let mut item = syn::parse_macro_input!(item as DeriveInput);
|
||||
let attrs = extract_attrs(&mut item).unwrap();
|
||||
derive_diesel_crud_read_impl(&attrs).into()
|
||||
}
|
||||
|
||||
/// Derives the update function for CRUD operations.
|
||||
/// Must be used on a struct.
|
||||
/// # Struct Attributes
|
||||
/// - table: Expr - The schema struct for the table (can be provided on either diesel or diesel_crud attribute)
|
||||
/// - update: Type - The update model (Optional, defaults to `Self`)
|
||||
/// # Example
|
||||
/// ```ignore
|
||||
/// #[derive(Queryable, diesel_crud_derive::DieselCrudUpdate)]
|
||||
/// #[diesel(table_name = crate::schema::user)]
|
||||
/// struct User {
|
||||
/// #[diesel_crud(pk)]
|
||||
/// email: String,
|
||||
/// password: String,
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_derive(DieselCrudUpdate, attributes(diesel_crud))]
|
||||
pub fn derive_diesel_crud_update(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let mut item = syn::parse_macro_input!(item as DeriveInput);
|
||||
let attrs = extract_attrs(&mut item).unwrap();
|
||||
derive_diesel_crud_update_impl(&attrs).into()
|
||||
}
|
||||
|
||||
/// Derives the delete function for CRUD operations.
|
||||
/// Must be used on a struct with a field marked as primary key.
|
||||
/// # Struct Attributes
|
||||
/// - table: Expr - The schema struct for the table (can be provided on either diesel or diesel_crud attribute)
|
||||
/// # Field Attributes
|
||||
/// - pk: Ident - The primary key field (Only one field should be marked as the primary key)
|
||||
/// # Example
|
||||
/// ```ignore
|
||||
/// #[derive(Queryable, diesel_crud_derive::DieselCrudDelete)]
|
||||
/// #[diesel(table_name = crate::schema::user)]
|
||||
/// struct User {
|
||||
/// #[diesel_crud(pk)]
|
||||
/// email: String,
|
||||
/// password: String,
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_derive(DieselCrudDelete, attributes(diesel_crud))]
|
||||
pub fn derive_diesel_crud_delete(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let mut item = syn::parse_macro_input!(item as DeriveInput);
|
||||
let attrs = extract_attrs(&mut item).unwrap();
|
||||
derive_diesel_crud_delete_impl(&attrs).into()
|
||||
}
|
||||
|
||||
/// Derives the list function for CRUD operations.
|
||||
/// Must be used on a struct.
|
||||
/// # Struct Attributes
|
||||
/// - table: Expr - The schema struct for the table (can be provided on either diesel or diesel_crud attribute)
|
||||
/// # Example
|
||||
/// ```ignore
|
||||
/// #[derive(Queryable, diesel_crud_derive::DieselCrudList)]
|
||||
/// #[diesel(table_name = crate::schema::user)]
|
||||
/// struct User {
|
||||
/// #[diesel_crud(pk)]
|
||||
/// email: String,
|
||||
/// password: String,
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_derive(DieselCrudList, attributes(diesel_crud))]
|
||||
pub fn derive_diesel_crud_list(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let mut item = syn::parse_macro_input!(item as DeriveInput);
|
||||
let attrs = extract_attrs(&mut item).unwrap();
|
||||
derive_diesel_crud_list_impl(&attrs).into()
|
||||
}
|
22
crates/diesel_crud_derive/src/list.rs
Normal file
22
crates/diesel_crud_derive/src/list.rs
Normal file
@ -0,0 +1,22 @@
|
||||
use crate::{Attributes, common};
|
||||
use quote::quote;
|
||||
|
||||
pub(crate) fn derive_diesel_crud_list_impl(
|
||||
Attributes {
|
||||
struct_ident,
|
||||
table,
|
||||
..
|
||||
}: &Attributes,
|
||||
) -> proc_macro2::TokenStream {
|
||||
let return_type = common::return_type(quote! { Vec<Self> });
|
||||
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl lib::diesel_crud_trait::DieselCrudList for #struct_ident {
|
||||
async fn list(conn: &mut diesel_async::AsyncPgConnection) -> #return_type {
|
||||
use diesel::associations::HasTable;
|
||||
diesel_async::RunQueryDsl::get_results(#table::table::table(), conn).await.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
34
crates/diesel_crud_derive/src/read.rs
Normal file
34
crates/diesel_crud_derive/src/read.rs
Normal file
@ -0,0 +1,34 @@
|
||||
use crate::common::PrimaryKey;
|
||||
use crate::{Attributes, common};
|
||||
use quote::quote;
|
||||
|
||||
pub(crate) fn derive_diesel_crud_read_impl(
|
||||
Attributes {
|
||||
struct_ident,
|
||||
table,
|
||||
pk,
|
||||
..
|
||||
}: &Attributes,
|
||||
) -> proc_macro2::TokenStream {
|
||||
if pk.is_none() {
|
||||
panic!("Please specify a primary key using #[diesel_crud(pk)]");
|
||||
}
|
||||
let PrimaryKey { ty: pk_type, .. } = pk.as_ref().unwrap();
|
||||
let return_type = common::return_type(quote! { Self });
|
||||
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl lib::diesel_crud_trait::DieselCrudRead for #struct_ident {
|
||||
type PK = #pk_type;
|
||||
async fn read(pk: Self::PK, conn: &mut diesel_async::AsyncPgConnection) -> #return_type {
|
||||
use diesel::associations::HasTable;
|
||||
diesel_async::RunQueryDsl::get_result(
|
||||
diesel::QueryDsl::find(#table::table::table(), pk),
|
||||
conn
|
||||
)
|
||||
.await
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
29
crates/diesel_crud_derive/src/update.rs
Normal file
29
crates/diesel_crud_derive/src/update.rs
Normal file
@ -0,0 +1,29 @@
|
||||
use crate::{Attributes, common};
|
||||
use quote::quote;
|
||||
|
||||
pub(crate) fn derive_diesel_crud_update_impl(
|
||||
Attributes {
|
||||
struct_ident,
|
||||
table,
|
||||
update,
|
||||
..
|
||||
}: &Attributes,
|
||||
) -> proc_macro2::TokenStream {
|
||||
let return_type = common::return_type(quote! { Self });
|
||||
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
impl lib::diesel_crud_trait::DieselCrudUpdate for #struct_ident {
|
||||
type Update = #update;
|
||||
async fn update(update: Self::Update, conn: &mut diesel_async::AsyncPgConnection) -> #return_type {
|
||||
use diesel::associations::HasTable;
|
||||
diesel_async::RunQueryDsl::get_result(
|
||||
diesel::dsl::update(#table::table::table()).set(update),
|
||||
conn,
|
||||
)
|
||||
.await
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
10
crates/diesel_crud_trait/Cargo.toml
Normal file
10
crates/diesel_crud_trait/Cargo.toml
Normal file
@ -0,0 +1,10 @@
|
||||
[package]
|
||||
name = "diesel-crud-trait"
|
||||
version = "0.1.0"
|
||||
edition = { workspace = true }
|
||||
rust-version = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
diesel = { workspace = true, features = ["postgres"] }
|
||||
diesel-async = { workspace = true, features = ["postgres", "deadpool"] }
|
||||
thiserror = { workspace = true }
|
22
crates/diesel_crud_trait/src/error.rs
Normal file
22
crates/diesel_crud_trait/src/error.rs
Normal file
@ -0,0 +1,22 @@
|
||||
use diesel::result::Error;
|
||||
use thiserror::Error;
|
||||
|
||||
/// Error type for CRUD operations
|
||||
#[derive(Debug, PartialEq, Error)]
|
||||
pub enum CrudError {
|
||||
#[error("Resource not found")]
|
||||
NotFound,
|
||||
#[error("Database pool error: {0}")]
|
||||
PoolError(String),
|
||||
#[error(transparent)]
|
||||
Other(Error),
|
||||
}
|
||||
|
||||
impl From<Error> for CrudError {
|
||||
fn from(error: Error) -> Self {
|
||||
match error {
|
||||
Error::NotFound => CrudError::NotFound,
|
||||
_ => CrudError::Other(error),
|
||||
}
|
||||
}
|
||||
}
|
124
crates/diesel_crud_trait/src/lib.rs
Normal file
124
crates/diesel_crud_trait/src/lib.rs
Normal file
@ -0,0 +1,124 @@
|
||||
mod error;
|
||||
|
||||
use diesel::{AsChangeset, Insertable};
|
||||
use diesel_async::AsyncPgConnection;
|
||||
pub use error::CrudError;
|
||||
|
||||
/// Combines all CRUD operations into a single trait
|
||||
/// Includes:
|
||||
/// - Create
|
||||
/// - Read
|
||||
/// - Update
|
||||
/// - Delete
|
||||
/// - List
|
||||
pub trait DieselCrud<Table>:
|
||||
DieselCrudCreate<Table> + DieselCrudRead + DieselCrudUpdate + DieselCrudDelete + DieselCrudList
|
||||
{
|
||||
}
|
||||
|
||||
/// Insert an entity into the database
|
||||
/// The entity must implement `Insertable<Table>` for the given table.
|
||||
///
|
||||
/// Implementing the trait requires the `async_trait` macro.
|
||||
/// # Associations
|
||||
/// - `Insert` - The type to insert, must implement `Insertable<Table>`
|
||||
/// # Parameters
|
||||
/// - `insert` - The entity to insert
|
||||
/// - `conn` - The database connection
|
||||
/// # Returns
|
||||
/// A result containing the inserted entity or a `CrudError`
|
||||
pub trait DieselCrudCreate<Table>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
type Insert: Insertable<Table>;
|
||||
fn insert(
|
||||
insert: Self::Insert,
|
||||
conn: &mut AsyncPgConnection,
|
||||
) -> impl Future<Output = Result<Self, CrudError>>;
|
||||
fn insert_many(
|
||||
insert: &[Self::Insert],
|
||||
conn: &mut AsyncPgConnection,
|
||||
) -> impl Future<Output = Result<Vec<Self>, CrudError>>;
|
||||
}
|
||||
|
||||
/// Gets an entity from the database
|
||||
///
|
||||
/// Implementing the trait requires the `async_trait` macro.
|
||||
/// # Associations
|
||||
/// - `PK` - The primary key of the entity
|
||||
/// # Parameters
|
||||
/// - `pk` - The primary key of the entity
|
||||
/// - `conn` - The database connection
|
||||
/// # Returns
|
||||
/// A result containing the entity or a `CrudError`.
|
||||
/// If the entity is not found, the error should be `CrudError::NotFound`.
|
||||
pub trait DieselCrudRead
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
type PK;
|
||||
fn read(
|
||||
pk: Self::PK,
|
||||
conn: &mut AsyncPgConnection,
|
||||
) -> impl Future<Output = Result<Self, CrudError>>;
|
||||
}
|
||||
|
||||
/// Updates an entity in the database
|
||||
/// The entity must implement `AsChangeset` for the given table.
|
||||
///
|
||||
/// Implementing the trait requires the `async_trait` macro.
|
||||
/// # Associations
|
||||
/// - `Update` - The type to update
|
||||
/// # Parameters
|
||||
/// - `update` - The update to apply
|
||||
/// - `conn` - The database connection
|
||||
/// # Returns
|
||||
/// A result containing the old entry of the entity if successful or a `CrudError`.
|
||||
/// If the entity is not found, the error should be `CrudError::NotFound`.
|
||||
pub trait DieselCrudUpdate
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
type Update: AsChangeset;
|
||||
fn update(
|
||||
update: Self::Update,
|
||||
conn: &mut AsyncPgConnection,
|
||||
) -> impl Future<Output = Result<Self, CrudError>>;
|
||||
}
|
||||
|
||||
/// Deletes an entity from the database
|
||||
///
|
||||
/// Implementing the trait requires the `async_trait` macro.
|
||||
/// # Associations
|
||||
/// - `PK` - The primary key of the entity
|
||||
/// # Parameters
|
||||
/// - `pk` - The primary key of the entity
|
||||
/// - `conn` - The database connection
|
||||
/// # Returns
|
||||
/// A result containing the deleted entity or a `CrudError`.
|
||||
/// If the entity is not found, the error should be `CrudError::NotFound`.
|
||||
pub trait DieselCrudDelete
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
type PK;
|
||||
fn delete(
|
||||
pk: Self::PK,
|
||||
conn: &mut AsyncPgConnection,
|
||||
) -> impl Future<Output = Result<Self, CrudError>>;
|
||||
}
|
||||
|
||||
/// Lists all entities in the table
|
||||
///
|
||||
/// Implementing the trait requires the `async_trait` macro.
|
||||
/// # Parameters
|
||||
/// - `conn` - The database connection
|
||||
/// # Returns
|
||||
/// A result containing a Vec of entities or a `CrudError`.
|
||||
pub trait DieselCrudList
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
fn list(conn: &mut AsyncPgConnection) -> impl Future<Output = Result<Vec<Self>, CrudError>>;
|
||||
}
|
@ -3,12 +3,13 @@ use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::DeriveInput;
|
||||
|
||||
// TODO derive generic types
|
||||
pub fn into_response_derive_impl(input: DeriveInput) -> TokenStream {
|
||||
let name = &input.ident;
|
||||
|
||||
let expanded = quote! {
|
||||
impl IntoResponse for #name {
|
||||
fn into_response(self) -> Response {
|
||||
impl axum::response::IntoResponse for #name {
|
||||
fn into_response(self) -> axum::response::Response {
|
||||
let version = env!("CARGO_PKG_VERSION");
|
||||
lib::serde::response::BaseResponse::new(version, self)
|
||||
.into_response()
|
||||
|
@ -10,4 +10,4 @@ proc-macro = true
|
||||
[dependencies]
|
||||
syn = { workspace = true }
|
||||
quote = { workspace = true }
|
||||
regex = "1.10"
|
||||
regex = { workspace = true }
|
||||
|
17
crates/tests/Cargo.toml
Normal file
17
crates/tests/Cargo.toml
Normal file
@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "tests"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
authors.workspace = true
|
||||
homepage.workspace = true
|
||||
|
||||
[dependencies]
|
||||
diesel = { workspace = true }
|
||||
diesel-async = { workspace = true }
|
||||
lib = { path = "../../../rust-lib", features = ["diesel", "derive", "test"] }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { workspace = true, features = ["macros"] }
|
||||
testcontainers-modules = { workspace = true, features = ["postgres"] }
|
||||
diesel_migrations = { workspace = true }
|
9
crates/tests/diesel.toml
Normal file
9
crates/tests/diesel.toml
Normal file
@ -0,0 +1,9 @@
|
||||
# For documentation on how to configure this file,
|
||||
# see https://diesel.rs/guides/configuring-diesel-cli
|
||||
|
||||
[print_schema]
|
||||
file = "tests/schema.rs"
|
||||
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
|
||||
|
||||
[migrations_directory]
|
||||
dir = "./migrations"
|
0
crates/tests/migrations/.keep
Normal file
0
crates/tests/migrations/.keep
Normal file
@ -0,0 +1,6 @@
|
||||
-- This file was automatically created by Diesel to setup helper functions
|
||||
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||
-- changes will be added to existing projects as new migrations.
|
||||
|
||||
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
|
||||
DROP FUNCTION IF EXISTS diesel_set_updated_at();
|
@ -0,0 +1,36 @@
|
||||
-- This file was automatically created by Diesel to setup helper functions
|
||||
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||
-- changes will be added to existing projects as new migrations.
|
||||
|
||||
|
||||
|
||||
|
||||
-- Sets up a trigger for the given table to automatically set a column called
|
||||
-- `updated_at` whenever the row is modified (unless `updated_at` was included
|
||||
-- in the modified columns)
|
||||
--
|
||||
-- # Example
|
||||
--
|
||||
-- ```sql
|
||||
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
|
||||
--
|
||||
-- SELECT diesel_manage_updated_at('users');
|
||||
-- ```
|
||||
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
|
||||
BEGIN
|
||||
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
|
||||
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
|
||||
BEGIN
|
||||
IF (
|
||||
NEW IS DISTINCT FROM OLD AND
|
||||
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
|
||||
) THEN
|
||||
NEW.updated_at := current_timestamp;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
@ -0,0 +1 @@
|
||||
DROP TABLE IF EXISTS "user" CASCADE;
|
@ -0,0 +1,4 @@
|
||||
CREATE TABLE "user"
|
||||
(
|
||||
email VARCHAR(255) PRIMARY KEY
|
||||
);
|
1
crates/tests/src/lib.rs
Normal file
1
crates/tests/src/lib.rs
Normal file
@ -0,0 +1 @@
|
||||
// This file is needed to make the tests directory a crate
|
55
crates/tests/tests/diesel_crud_derive.rs
Normal file
55
crates/tests/tests/diesel_crud_derive.rs
Normal file
@ -0,0 +1,55 @@
|
||||
use diesel::{AsChangeset, Insertable, Queryable, Selectable};
|
||||
use lib::diesel_crud_derive::{
|
||||
DieselCrudCreate, DieselCrudDelete, DieselCrudList, DieselCrudRead, DieselCrudUpdate,
|
||||
};
|
||||
use lib::diesel_crud_trait::DieselCrudCreate;
|
||||
use test_containers::create_test_containers_pool;
|
||||
|
||||
mod schema;
|
||||
#[cfg(test)]
|
||||
mod test_containers;
|
||||
|
||||
#[derive(
|
||||
Debug,
|
||||
PartialEq,
|
||||
Queryable,
|
||||
Selectable,
|
||||
Insertable,
|
||||
AsChangeset,
|
||||
DieselCrudCreate,
|
||||
DieselCrudDelete,
|
||||
DieselCrudList,
|
||||
DieselCrudRead,
|
||||
DieselCrudUpdate,
|
||||
)]
|
||||
#[diesel_crud(insert = InsertUser)]
|
||||
#[diesel(table_name = schema::user)]
|
||||
struct User {
|
||||
#[diesel_crud(pk)]
|
||||
email: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Insertable)]
|
||||
#[diesel(table_name = schema::user)]
|
||||
struct InsertUser {
|
||||
email: String,
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread")]
|
||||
async fn test_insert_user() {
|
||||
let container = create_test_containers_pool().await.unwrap();
|
||||
let mut conn = container.pool.get().await.unwrap();
|
||||
let user = User::insert(
|
||||
InsertUser {
|
||||
email: "test".to_string(),
|
||||
},
|
||||
&mut conn,
|
||||
)
|
||||
.await;
|
||||
assert_eq!(
|
||||
user,
|
||||
Ok(User {
|
||||
email: "test".to_string()
|
||||
})
|
||||
);
|
||||
}
|
6
crates/tests/tests/schema.rs
Normal file
6
crates/tests/tests/schema.rs
Normal file
@ -0,0 +1,6 @@
|
||||
diesel::table! {
|
||||
user (email) {
|
||||
#[max_length = 255]
|
||||
email -> Varchar,
|
||||
}
|
||||
}
|
11
crates/tests/tests/test_containers.rs
Normal file
11
crates/tests/tests/test_containers.rs
Normal file
@ -0,0 +1,11 @@
|
||||
use diesel_migrations::EmbeddedMigrations;
|
||||
use lib::diesel::migration::run_migrations;
|
||||
use lib::test::test_containers::{ContainerError, TestContainer};
|
||||
|
||||
pub async fn create_test_containers_pool() -> Result<TestContainer, ContainerError> {
|
||||
let test_container = lib::test::test_containers::create_test_containers_pool().await?;
|
||||
let connection = test_container.pool.get().await?;
|
||||
const MIGRATIONS: EmbeddedMigrations = diesel_migrations::embed_migrations!("./migrations");
|
||||
run_migrations(connection, MIGRATIONS).await?;
|
||||
Ok(test_container)
|
||||
}
|
151
examples/multipart_file/Cargo.lock
generated
151
examples/multipart_file/Cargo.lock
generated
@ -57,7 +57,7 @@ dependencies = [
|
||||
"serde_urlencoded",
|
||||
"sync_wrapper 1.0.1",
|
||||
"tokio",
|
||||
"tower",
|
||||
"tower 0.4.13",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
@ -123,6 +123,26 @@ version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "derive_more"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05"
|
||||
dependencies = [
|
||||
"derive_more-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_more-impl"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "encoding_rs"
|
||||
version = "0.8.34"
|
||||
@ -286,12 +306,15 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||
|
||||
[[package]]
|
||||
name = "lib"
|
||||
version = "1.3.5"
|
||||
version = "1.4.3"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"axum",
|
||||
"derive_more",
|
||||
"mime",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
"tower",
|
||||
"tower 0.5.0",
|
||||
"tower-http",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
@ -338,13 +361,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "0.8.11"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
|
||||
checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
"wasi",
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -383,16 +407,6 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num_cpus"
|
||||
version = "1.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
|
||||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.36.1"
|
||||
@ -563,7 +577,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -627,25 +641,24 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.38.0"
|
||||
version = "1.40.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a"
|
||||
checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
"libc",
|
||||
"mio",
|
||||
"num_cpus",
|
||||
"pin-project-lite",
|
||||
"socket2",
|
||||
"tokio-macros",
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-macros"
|
||||
version = "2.3.0"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a"
|
||||
checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -668,6 +681,16 @@ dependencies = [
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "36b837f86b25d7c0d7988f00a54e74739be6477f2aac6201b8f429a7569991b7"
|
||||
dependencies = [
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-http"
|
||||
version = "0.5.2"
|
||||
@ -801,37 +824,13 @@ version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.48.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
|
||||
dependencies = [
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.48.5",
|
||||
"windows_aarch64_msvc 0.48.5",
|
||||
"windows_i686_gnu 0.48.5",
|
||||
"windows_i686_msvc 0.48.5",
|
||||
"windows_x86_64_gnu 0.48.5",
|
||||
"windows_x86_64_gnullvm 0.48.5",
|
||||
"windows_x86_64_msvc 0.48.5",
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -840,46 +839,28 @@ version = "0.52.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.52.5",
|
||||
"windows_aarch64_msvc 0.52.5",
|
||||
"windows_i686_gnu 0.52.5",
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_gnullvm",
|
||||
"windows_i686_msvc 0.52.5",
|
||||
"windows_x86_64_gnu 0.52.5",
|
||||
"windows_x86_64_gnullvm 0.52.5",
|
||||
"windows_x86_64_msvc 0.52.5",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
"windows_x86_64_msvc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.5"
|
||||
@ -892,48 +873,24 @@ version = "0.52.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.48.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.5"
|
||||
|
@ -6,4 +6,4 @@ edition = "2021"
|
||||
[dependencies]
|
||||
lib = { path = "../..", features = ["axum"] }
|
||||
axum = "0.7.5"
|
||||
tokio = { version = "1.38.0", features = ["rt-multi-thread", "macros"] }
|
||||
tokio = { version = "1.40", features = ["rt-multi-thread", "macros"] }
|
||||
|
44
flake.lock
generated
Normal file
44
flake.lock
generated
Normal file
@ -0,0 +1,44 @@
|
||||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1741196730,
|
||||
"narHash": "sha256-0Sj6ZKjCpQMfWnN0NURqRCQn2ob7YtXTAOTwCuz7fkA=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "48913d8f9127ea6530a2a2f1bd4daa1b8685d8a3",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-24.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs-unstable": {
|
||||
"locked": {
|
||||
"lastModified": 1741173522,
|
||||
"narHash": "sha256-k7VSqvv0r1r53nUI/IfPHCppkUAddeXn843YlAC5DR0=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "d69ab0d71b22fa1ce3dbeff666e6deb4917db049",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs",
|
||||
"nixpkgs-unstable": "nixpkgs-unstable"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
38
flake.nix
Normal file
38
flake.nix
Normal file
@ -0,0 +1,38 @@
|
||||
{
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
|
||||
nixpkgs-unstable.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
};
|
||||
|
||||
outputs = { nixpkgs, nixpkgs-unstable, ... }:
|
||||
let
|
||||
system = "x86_64-linux";
|
||||
in
|
||||
{
|
||||
devShells.${system}.default =
|
||||
let
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
};
|
||||
unstable = import nixpkgs-unstable {
|
||||
inherit system;
|
||||
};
|
||||
in
|
||||
pkgs.mkShell {
|
||||
packages = with pkgs; [
|
||||
git
|
||||
just
|
||||
] ++ [
|
||||
# Rust
|
||||
gcc
|
||||
# cargo
|
||||
] ++ [
|
||||
# Diesel
|
||||
diesel-cli
|
||||
unstable.libpq
|
||||
];
|
||||
|
||||
shellHook = "fish";
|
||||
};
|
||||
};
|
||||
}
|
19
justfile
Executable file
19
justfile
Executable file
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env just --justfile
|
||||
|
||||
run:
|
||||
cargo run
|
||||
|
||||
fmt:
|
||||
cargo fmt --all
|
||||
|
||||
lint:
|
||||
cargo clippy --all-targets --all-features -- -D warnings
|
||||
|
||||
release:
|
||||
cargo build --release
|
||||
|
||||
test:
|
||||
cargo test --all-features --workspace
|
||||
|
||||
coverage:
|
||||
cargo llvm-cov
|
178
src/axum/app.rs
178
src/axum/app.rs
@ -1,17 +1,22 @@
|
||||
use axum::Router;
|
||||
use axum::ServiceExt;
|
||||
use axum::extract::Request;
|
||||
use axum::handler::Handler;
|
||||
use axum::response::IntoResponse;
|
||||
use axum::routing::Route;
|
||||
use std::convert::Infallible;
|
||||
use std::io;
|
||||
use std::net::IpAddr;
|
||||
use {
|
||||
axum::{extract::Request, handler::Handler, Router, ServiceExt},
|
||||
std::{io, net::Ipv4Addr, net::SocketAddr},
|
||||
tokio::net::TcpListener,
|
||||
tower::layer::Layer,
|
||||
tower_http::{
|
||||
cors::CorsLayer,
|
||||
normalize_path::NormalizePathLayer,
|
||||
trace,
|
||||
trace::{HttpMakeClassifier, TraceLayer},
|
||||
},
|
||||
tracing::{info, Level},
|
||||
};
|
||||
use std::net::Ipv4Addr;
|
||||
use std::net::SocketAddr;
|
||||
use tokio::net::TcpListener;
|
||||
use tower::{Service, layer::Layer};
|
||||
use tower_http::cors::CorsLayer;
|
||||
use tower_http::normalize_path::NormalizePathLayer;
|
||||
use tower_http::trace;
|
||||
use tower_http::trace::HttpMakeClassifier;
|
||||
use tower_http::trace::TraceLayer;
|
||||
use tracing::{Level, info};
|
||||
|
||||
// TODO trim trailing slash into macro > let _app = NormalizePathLayer::trim_trailing_slash().layer(create_app!(routes));
|
||||
#[macro_export]
|
||||
@ -34,26 +39,53 @@ pub struct AppBuilder {
|
||||
}
|
||||
|
||||
impl AppBuilder {
|
||||
/// Creates a new app builder with default options.
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Creates the builder from the given router.
|
||||
/// Only the routes and layers will be used.
|
||||
pub fn from_router(router: Router) -> Self {
|
||||
Self {
|
||||
router,
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds a route to the previously added routes
|
||||
pub fn route(mut self, route: Router) -> Self {
|
||||
self.router = self.router.merge(route);
|
||||
self
|
||||
}
|
||||
|
||||
/// Adds multiple routes to the previously added routes
|
||||
pub fn routes(mut self, routes: impl IntoIterator<Item = Router>) -> Self {
|
||||
self.router = routes.into_iter().fold(self.router, Router::merge);
|
||||
self
|
||||
}
|
||||
|
||||
/// Adds a layer to the previously added routes
|
||||
pub fn layer<L>(mut self, layer: L) -> Self
|
||||
where
|
||||
L: Layer<Route> + Clone + Send + Sync + 'static,
|
||||
L::Service: Service<Request> + Clone + Send + Sync + 'static,
|
||||
<L::Service as Service<Request>>::Response: IntoResponse + 'static,
|
||||
<L::Service as Service<Request>>::Error: Into<Infallible> + 'static,
|
||||
<L::Service as Service<Request>>::Future: Send + 'static,
|
||||
{
|
||||
self.router = self.router.layer(layer);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the socket for the server.
|
||||
pub fn socket<IP: Into<IpAddr>>(mut self, socket: impl Into<(IP, u16)>) -> Self {
|
||||
let (ip, port) = socket.into();
|
||||
self.socket = Some((ip.into(), port));
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the port for the server.
|
||||
pub fn port(mut self, port: u16) -> Self {
|
||||
self.socket = if let Some((ip, _)) = self.socket {
|
||||
Some((ip, port))
|
||||
@ -63,6 +95,7 @@ impl AppBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the fallback handler.
|
||||
pub fn fallback<H, T>(mut self, fallback: H) -> Self
|
||||
where
|
||||
H: Handler<T, ()>,
|
||||
@ -72,42 +105,28 @@ impl AppBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the cors layer.
|
||||
pub fn cors(mut self, cors: CorsLayer) -> Self {
|
||||
self.cors = Some(cors);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the normalize path option. Default is true.
|
||||
pub fn normalize_path(mut self, normalize_path: bool) -> Self {
|
||||
self.normalize_path = Some(normalize_path);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the trace layer.
|
||||
pub fn tracing(mut self, tracing: TraceLayer<HttpMakeClassifier>) -> Self {
|
||||
self.tracing = Some(tracing);
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn serve(self) -> io::Result<()> {
|
||||
let _ = fmt_trace(); // Allowed to fail
|
||||
let listener = self.listener().await?;
|
||||
|
||||
if self.normalize_path.unwrap_or(true) {
|
||||
let app = NormalizePathLayer::trim_trailing_slash().layer(self.create_app());
|
||||
axum::serve(listener, ServiceExt::<Request>::into_make_service(app)).await?;
|
||||
} else {
|
||||
let app = self.create_app();
|
||||
axum::serve(listener, app.into_make_service()).await?;
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn listener(&self) -> io::Result<TcpListener> {
|
||||
let addr = SocketAddr::from(self.socket.unwrap_or((Ipv4Addr::UNSPECIFIED.into(), 8000)));
|
||||
info!("Initializing server on: {addr}");
|
||||
TcpListener::bind(&addr).await
|
||||
}
|
||||
|
||||
fn create_app(self) -> Router {
|
||||
/// Creates the app with the given options.
|
||||
/// This method is useful for testing purposes.
|
||||
/// Options used for configuring the listener will be lost.
|
||||
pub fn build(self) -> Router {
|
||||
let mut app = self.router;
|
||||
if let Some(cors) = self.cors {
|
||||
app = app.layer(cors);
|
||||
@ -120,6 +139,35 @@ impl AppBuilder {
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
/// Build the app and start the server
|
||||
/// # Default Options
|
||||
/// - IP == 0.0.0.0
|
||||
/// - Port == 8000
|
||||
/// - Cors == None
|
||||
/// - Normalize Path == true
|
||||
/// - Tracing == Default compact
|
||||
pub async fn serve(self) -> io::Result<()> {
|
||||
let _ = fmt_trace(); // Allowed to fail
|
||||
let listener = self.listener().await?;
|
||||
|
||||
let should_normalize = self.normalize_path.unwrap_or(true);
|
||||
let app = self.build();
|
||||
|
||||
if should_normalize {
|
||||
let app = NormalizePathLayer::trim_trailing_slash().layer(app);
|
||||
axum::serve(listener, ServiceExt::<Request>::into_make_service(app)).await?;
|
||||
} else {
|
||||
axum::serve(listener, app.into_make_service()).await?;
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn listener(&self) -> io::Result<TcpListener> {
|
||||
let addr = SocketAddr::from(self.socket.unwrap_or((Ipv4Addr::UNSPECIFIED.into(), 8000)));
|
||||
info!("Initializing server on: {addr}");
|
||||
TcpListener::bind(&addr).await
|
||||
}
|
||||
}
|
||||
|
||||
fn fmt_trace() -> Result<(), String> {
|
||||
@ -132,43 +180,37 @@ fn fmt_trace() -> Result<(), String> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use axum::Router;
|
||||
|
||||
use super::*;
|
||||
use axum::Router;
|
||||
use std::time::Duration;
|
||||
use tokio::time::sleep;
|
||||
|
||||
mod tokio_tests {
|
||||
use std::time::Duration;
|
||||
#[tokio::test]
|
||||
async fn test_app_builder_serve() {
|
||||
let handler = tokio::spawn(async {
|
||||
AppBuilder::new().serve().await.unwrap();
|
||||
});
|
||||
sleep(Duration::from_millis(250)).await;
|
||||
handler.abort();
|
||||
}
|
||||
|
||||
use tokio::time::sleep;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_app_builder_serve() {
|
||||
let handler = tokio::spawn(async {
|
||||
AppBuilder::new().serve().await.unwrap();
|
||||
});
|
||||
sleep(Duration::from_secs(1)).await;
|
||||
handler.abort();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_app_builder_all() {
|
||||
let handler = tokio::spawn(async {
|
||||
AppBuilder::new()
|
||||
.socket((Ipv4Addr::LOCALHOST, 8080))
|
||||
.routes([Router::new()])
|
||||
.fallback(|| async { "Fallback" })
|
||||
.cors(CorsLayer::new())
|
||||
.normalize_path(true)
|
||||
.tracing(TraceLayer::new_for_http())
|
||||
.serve()
|
||||
.await
|
||||
.unwrap();
|
||||
});
|
||||
sleep(Duration::from_secs(1)).await;
|
||||
handler.abort();
|
||||
}
|
||||
#[tokio::test]
|
||||
async fn test_app_builder_all() {
|
||||
let handler = tokio::spawn(async {
|
||||
AppBuilder::new()
|
||||
.socket((Ipv4Addr::LOCALHOST, 8080))
|
||||
.routes([Router::new()])
|
||||
.fallback(async || "Fallback")
|
||||
.cors(CorsLayer::new())
|
||||
.normalize_path(true)
|
||||
.tracing(TraceLayer::new_for_http())
|
||||
.layer(TraceLayer::new_for_http())
|
||||
.serve()
|
||||
.await
|
||||
.unwrap();
|
||||
});
|
||||
sleep(Duration::from_millis(250)).await;
|
||||
handler.abort();
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
14
src/axum/builder.rs
Normal file
14
src/axum/builder.rs
Normal file
@ -0,0 +1,14 @@
|
||||
use crate::axum::traits::BuildJson;
|
||||
use axum::body::Body;
|
||||
use axum::http::header::CONTENT_TYPE;
|
||||
use axum::http::Request;
|
||||
use mime::APPLICATION_JSON;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
|
||||
impl BuildJson for axum::http::request::Builder {
|
||||
fn json<T: Serialize>(self, body: T) -> Result<Request<Body>, axum::http::Error> {
|
||||
self.header(CONTENT_TYPE, APPLICATION_JSON.as_ref())
|
||||
.body(Body::new(json!(body).to_string()))
|
||||
}
|
||||
}
|
@ -1,62 +1,28 @@
|
||||
use axum::{
|
||||
async_trait,
|
||||
extract::{
|
||||
multipart::{Field, MultipartError, MultipartRejection},
|
||||
FromRequest, Multipart, Request,
|
||||
multipart::{Field, MultipartError, MultipartRejection},
|
||||
},
|
||||
response::IntoResponse,
|
||||
};
|
||||
use mime::Mime;
|
||||
use std::str::FromStr;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(PartialEq, Eq, Ord, PartialOrd, Hash, Debug, Clone, Copy)]
|
||||
pub enum ContentType {
|
||||
Json,
|
||||
Form,
|
||||
Multipart,
|
||||
Pdf,
|
||||
Html,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
impl From<&str> for ContentType {
|
||||
fn from(content_type: &str) -> Self {
|
||||
match content_type {
|
||||
"application/json" => ContentType::Json,
|
||||
"application/x-www-form-urlencoded" => ContentType::Form,
|
||||
"multipart/form-data" => ContentType::Multipart,
|
||||
"application/pdf" => ContentType::Pdf,
|
||||
"text/html" => ContentType::Html,
|
||||
_ => ContentType::Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for ContentType {
|
||||
fn from(content_type: String) -> Self {
|
||||
ContentType::from(content_type.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Option<&str>> for ContentType {
|
||||
fn from(content_type: Option<&str>) -> Self {
|
||||
content_type
|
||||
.map(ContentType::from)
|
||||
.unwrap_or(ContentType::Unknown)
|
||||
}
|
||||
}
|
||||
|
||||
/// A file extracted from a multipart request.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct File {
|
||||
pub filename: String,
|
||||
pub bytes: Vec<u8>,
|
||||
pub content_type: ContentType,
|
||||
pub content_type: Mime,
|
||||
}
|
||||
|
||||
impl File {
|
||||
/// Creates a new file with the given filename, bytes and content type.
|
||||
pub fn new(
|
||||
filename: impl Into<String>,
|
||||
bytes: impl Into<Vec<u8>>,
|
||||
content_type: impl Into<ContentType>,
|
||||
content_type: impl Into<Mime>,
|
||||
) -> Self {
|
||||
Self {
|
||||
filename: filename.into(),
|
||||
@ -65,12 +31,15 @@ impl File {
|
||||
}
|
||||
}
|
||||
|
||||
async fn from_field(field: Field<'_>) -> Result<Self, MultipartFileRejection> {
|
||||
/// Creates a new file from a field in a multipart request.
|
||||
pub async fn from_field(field: Field<'_>) -> Result<Self, MultipartFileRejection> {
|
||||
let filename = field
|
||||
.file_name()
|
||||
.ok_or(MultipartFileRejection::MissingFilename)?
|
||||
.to_string();
|
||||
let content_type: ContentType = field.content_type().into();
|
||||
let content_type = Mime::from_str(field.content_type().ok_or_else(|| {
|
||||
MultipartFileRejection::FieldError("Missing or illegal content type".to_string())
|
||||
})?)?;
|
||||
let bytes = field.bytes().await?;
|
||||
Ok(File::new(filename, bytes, content_type))
|
||||
}
|
||||
@ -87,12 +56,15 @@ pub struct MultipartFile(pub File);
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct MultipartFiles(pub Vec<File>);
|
||||
|
||||
/// Rejection type for multipart file extractors.
|
||||
#[derive(Debug, Error)]
|
||||
pub enum MultipartFileRejection {
|
||||
#[error(transparent)]
|
||||
MultipartRejection(#[from] MultipartRejection),
|
||||
#[error("Field error: {0}")]
|
||||
FieldError(String),
|
||||
#[error(transparent)]
|
||||
FromStrError(#[from] mime::FromStrError),
|
||||
#[error("No files found")]
|
||||
NoFiles,
|
||||
#[error("Expected one file, got several")]
|
||||
@ -130,17 +102,32 @@ impl IntoResponse for MultipartFileRejection {
|
||||
MultipartFileRejection::BodyError(error) => {
|
||||
(axum::http::StatusCode::BAD_REQUEST, error).into_response()
|
||||
}
|
||||
MultipartFileRejection::FromStrError(error) => {
|
||||
(axum::http::StatusCode::BAD_REQUEST, error.to_string()).into_response()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<S> FromRequest<S> for MultipartFile
|
||||
where
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = MultipartFileRejection;
|
||||
|
||||
/// Extracts a single file from a multipart request.
|
||||
/// Expects exactly one file. A file must have a name, bytes and optionally a content type.
|
||||
/// This extractor consumes the request and must ble placed last in the handler.
|
||||
/// # Example
|
||||
/// ```
|
||||
/// use std::str::from_utf8;
|
||||
/// use axum::response::Html;
|
||||
/// use lib::axum::extractor::MultipartFile;
|
||||
///
|
||||
/// async fn upload_file(MultipartFile(file): MultipartFile) -> Html<String> {
|
||||
/// Html(String::from_utf8(file.bytes).unwrap())
|
||||
/// }
|
||||
/// ```
|
||||
async fn from_request(req: Request, state: &S) -> Result<Self, Self::Rejection> {
|
||||
let multipart = Multipart::from_request(req, state).await?;
|
||||
let files = get_files(multipart).await?;
|
||||
@ -153,13 +140,30 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<S> FromRequest<S> for MultipartFiles
|
||||
where
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = MultipartFileRejection;
|
||||
|
||||
/// Extracts multiple files from a multipart request.
|
||||
/// Expects at least one file. A file must have a name, bytes and optionally a content type.
|
||||
/// This extractor consumes the request and must ble placed last in the handler.
|
||||
/// # Example
|
||||
/// ```
|
||||
/// use axum::response::Html;
|
||||
/// use lib::axum::extractor::MultipartFiles;
|
||||
/// use std::str::from_utf8;
|
||||
///
|
||||
/// async fn upload_files(MultipartFiles(files): MultipartFiles) -> Html<String> {
|
||||
/// let content = files
|
||||
/// .iter()
|
||||
/// .map(|file| String::from_utf8(file.bytes.clone()).unwrap())
|
||||
/// .collect::<Vec<String>>()
|
||||
/// .join("<br>");
|
||||
/// Html(content)
|
||||
/// }
|
||||
/// ```
|
||||
async fn from_request(req: Request, state: &S) -> Result<Self, Self::Rejection> {
|
||||
let multipart = Multipart::from_request(req, state).await?;
|
||||
let files = get_files(multipart).await?;
|
||||
@ -171,7 +175,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_files<'a>(mut multipart: Multipart) -> Result<Vec<File>, MultipartFileRejection> {
|
||||
async fn get_files(mut multipart: Multipart) -> Result<Vec<File>, MultipartFileRejection> {
|
||||
let mut files = vec![];
|
||||
while let Some(field) = multipart.next_field().await? {
|
||||
files.push(File::from_field(field).await?);
|
||||
|
@ -1,5 +1,9 @@
|
||||
#[cfg(feature = "io")]
|
||||
use {crate::io::file, axum::body::Body, axum::response::Html, std::io};
|
||||
use {
|
||||
crate::io::file,
|
||||
axum::{body::Body, response::Html},
|
||||
std::io,
|
||||
};
|
||||
|
||||
/// Load an HTML file from the given file path, relative to the current directory.
|
||||
/// # Arguments
|
||||
|
@ -1,6 +1,11 @@
|
||||
pub mod app;
|
||||
#[cfg(feature = "serde")]
|
||||
pub mod builder;
|
||||
pub mod extractor;
|
||||
pub mod load;
|
||||
#[cfg(feature = "serde")]
|
||||
pub mod response;
|
||||
pub mod router;
|
||||
pub mod traits;
|
||||
#[cfg(feature = "serde")]
|
||||
pub mod wrappers;
|
||||
|
@ -1,10 +1,14 @@
|
||||
use {
|
||||
crate::serde::response::BaseResponse,
|
||||
crate::{serde::response::BaseResponse, serde::traits::DeserializeInto},
|
||||
axum::{
|
||||
response::{IntoResponse, Response},
|
||||
Json,
|
||||
body::to_bytes,
|
||||
response::{IntoResponse, Response},
|
||||
},
|
||||
serde::{
|
||||
Serialize,
|
||||
de::{DeserializeOwned, Error},
|
||||
},
|
||||
serde::Serialize,
|
||||
};
|
||||
|
||||
impl<T: Serialize> IntoResponse for BaseResponse<T> {
|
||||
@ -13,11 +17,21 @@ impl<T: Serialize> IntoResponse for BaseResponse<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl DeserializeInto for Response {
|
||||
async fn deserialize_into<T: DeserializeOwned>(self) -> Result<T, serde_json::Error> {
|
||||
let body = to_bytes(self.into_body(), usize::MAX).await.map_err(|e| {
|
||||
serde_json::Error::custom(format!("Failed to read response body: {}", e))
|
||||
})?;
|
||||
serde_json::from_slice(&body)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use axum::http::header::CONTENT_TYPE;
|
||||
use axum::http::{HeaderValue, StatusCode};
|
||||
use axum::response::IntoResponse;
|
||||
use mime::APPLICATION_JSON;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::serde::response::BaseResponse;
|
||||
@ -39,7 +53,7 @@ mod tests {
|
||||
assert_eq!(json_response.status(), StatusCode::OK);
|
||||
assert_eq!(
|
||||
json_response.headers().get(CONTENT_TYPE),
|
||||
Some(&HeaderValue::from_static("application/json"))
|
||||
Some(&HeaderValue::from_static(APPLICATION_JSON.as_ref()))
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -13,8 +13,8 @@
|
||||
/// use lib::router;
|
||||
/// async fn simplify(path: axum::extract::path::Path<String>) {}
|
||||
/// router!("/simplify", lib::routes!(
|
||||
/// get "/:exp" => simplify,
|
||||
/// get "/table/:exp" => || async {}
|
||||
/// get "/{exp}" => simplify,
|
||||
/// get "/table/{exp}" => async || {}
|
||||
/// ));
|
||||
/// ```
|
||||
#[macro_export]
|
||||
@ -29,12 +29,28 @@ macro_rules! router {
|
||||
$body
|
||||
}
|
||||
};
|
||||
($body:expr; $state:ident: $($bound:tt),*) => {
|
||||
pub fn router<$state: $($bound+)* 'static>() -> axum::Router<$state> {
|
||||
$body
|
||||
}
|
||||
};
|
||||
($body:expr; $generic:ident: $($bound:tt),* -> $state:ty) => {
|
||||
pub fn router<$generic: $($bound+)* 'static>() -> axum::Router<$state<$generic>> {
|
||||
$body
|
||||
}
|
||||
};
|
||||
($route:expr, $router:expr) => {
|
||||
router!(axum::Router::new().nest($route, $router));
|
||||
};
|
||||
($route:expr, $router:expr, $state:ty) => {
|
||||
router!(axum::Router::new().nest($route, $router); $state);
|
||||
};
|
||||
($route:expr, $router:expr, $state:ident: $($bound:tt),*) => {
|
||||
router!(axum::Router::new().nest($route, $router); $state: $($bound),*);
|
||||
};
|
||||
($route:expr, $router:expr, $generic:ident: $($bound:tt),* -> $state:ty) => {
|
||||
router!(axum::Router::new().nest($route, $router); $generic: $($bound),* -> $state);
|
||||
};
|
||||
($($method:ident $route:expr => $func:expr),* $(,)?) => {
|
||||
router!($crate::routes!($($method $route => $func),*));
|
||||
};
|
||||
@ -60,6 +76,11 @@ macro_rules! routes {
|
||||
};
|
||||
}
|
||||
|
||||
/// Merges the given routers into a single router.
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// let _: axum::Router<()> = lib::join_routes![axum::Router::new(), axum::Router::new()];
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! join_routes {
|
||||
($($route:expr),* $(,)?) => {
|
||||
@ -71,8 +92,8 @@ macro_rules! join_routes {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use axum::extract::State;
|
||||
use axum::Router;
|
||||
use axum::extract::State;
|
||||
|
||||
async fn index() {}
|
||||
|
||||
@ -112,6 +133,18 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nested_router_with_generic_state() {
|
||||
router!(
|
||||
"/simplify",
|
||||
routes!(
|
||||
get "/:exp" => || async {},
|
||||
get "/table/:exp" => |_state: State<T>| async {}
|
||||
),
|
||||
T: Clone, Send, Sync
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_routes() {
|
||||
let _router: Router = routes!(
|
||||
|
7
src/axum/traits.rs
Normal file
7
src/axum/traits.rs
Normal file
@ -0,0 +1,7 @@
|
||||
use axum::body::Body;
|
||||
use axum::http::Request;
|
||||
use serde::Serialize;
|
||||
|
||||
pub trait BuildJson {
|
||||
fn json<T: Serialize>(self, body: T) -> Result<Request<Body>, axum::http::Error>;
|
||||
}
|
24
src/axum/wrappers.rs
Normal file
24
src/axum/wrappers.rs
Normal file
@ -0,0 +1,24 @@
|
||||
use axum::response::{IntoResponse, Response};
|
||||
use derive_more::{Constructor, From};
|
||||
use into_response_derive::IntoResponse;
|
||||
use serde::Serialize;
|
||||
|
||||
/// Wrapper for a vector of items.
|
||||
#[derive(Debug, Clone, PartialEq, Default, Serialize, From, Constructor)]
|
||||
pub struct Array<T: Serialize> {
|
||||
pub data: Vec<T>,
|
||||
}
|
||||
|
||||
/// Wrapper for a count.
|
||||
#[derive(
|
||||
Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, IntoResponse, From, Constructor,
|
||||
)]
|
||||
pub struct Count {
|
||||
pub count: usize,
|
||||
}
|
||||
|
||||
impl<T: Serialize> IntoResponse for Array<T> {
|
||||
fn into_response(self) -> Response {
|
||||
crate::from!(self).into_response()
|
||||
}
|
||||
}
|
28
src/diesel/get_connection.rs
Normal file
28
src/diesel/get_connection.rs
Normal file
@ -0,0 +1,28 @@
|
||||
use deadpool_diesel::Status;
|
||||
use derive_more::From;
|
||||
use diesel_async::AsyncPgConnection;
|
||||
use diesel_async::pooled_connection::deadpool::{Object, PoolError};
|
||||
use lib::diesel::pool::PgPool;
|
||||
|
||||
pub trait GetConnection: Clone + Send + Sync {
|
||||
fn get(
|
||||
&self,
|
||||
) -> impl Future<Output = Result<Object<AsyncPgConnection>, GetConnectionError>> + Send;
|
||||
fn status(&self) -> Status;
|
||||
}
|
||||
|
||||
impl GetConnection for PgPool {
|
||||
async fn get(&self) -> Result<Object<AsyncPgConnection>, GetConnectionError> {
|
||||
self.get().await.map_err(Into::into)
|
||||
}
|
||||
#[inline]
|
||||
fn status(&self) -> Status {
|
||||
self.status()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, From)]
|
||||
pub enum GetConnectionError {
|
||||
PoolError(PoolError),
|
||||
DieselError(diesel::result::Error),
|
||||
}
|
22
src/diesel/migration.rs
Normal file
22
src/diesel/migration.rs
Normal file
@ -0,0 +1,22 @@
|
||||
use diesel::pg::Pg;
|
||||
use diesel_async::AsyncConnection;
|
||||
use diesel_async::async_connection_wrapper::AsyncConnectionWrapper;
|
||||
use diesel_migrations::{EmbeddedMigrations, MigrationHarness};
|
||||
use tokio::task::JoinError;
|
||||
|
||||
/// Run Diesel migrations using an async connection.
|
||||
/// Only works with Postgres.
|
||||
pub async fn run_migrations<A>(
|
||||
async_connection: A,
|
||||
migrations: EmbeddedMigrations,
|
||||
) -> Result<(), JoinError>
|
||||
where
|
||||
A: AsyncConnection<Backend = Pg> + 'static,
|
||||
{
|
||||
let mut async_wrapper = AsyncConnectionWrapper::<A>::from(async_connection);
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
async_wrapper.run_pending_migrations(migrations).unwrap();
|
||||
})
|
||||
.await
|
||||
}
|
6
src/diesel/mod.rs
Normal file
6
src/diesel/mod.rs
Normal file
@ -0,0 +1,6 @@
|
||||
pub mod get_connection;
|
||||
pub mod migration;
|
||||
pub mod pool;
|
||||
|
||||
/// Re-export diesel::result::Error as DieselError
|
||||
pub type DieselError = diesel::result::Error;
|
24
src/diesel/pool.rs
Normal file
24
src/diesel/pool.rs
Normal file
@ -0,0 +1,24 @@
|
||||
use deadpool_diesel::postgres::BuildError;
|
||||
use diesel_async::pooled_connection::deadpool::Pool;
|
||||
use diesel_async::pooled_connection::AsyncDieselConnectionManager;
|
||||
use diesel_async::AsyncPgConnection;
|
||||
|
||||
/// A type alias for the asynchronous PostgreSQL connection pool.
|
||||
pub type PgPool = Pool<AsyncPgConnection>;
|
||||
|
||||
/// Create a deadpool connection pool from the given URL.
|
||||
/// Using the default pool size and other settings.
|
||||
pub fn create_pool_from_url(url: impl Into<String>) -> Result<PgPool, BuildError> {
|
||||
let config = AsyncDieselConnectionManager::<AsyncPgConnection>::new(url);
|
||||
Pool::builder(config).build()
|
||||
}
|
||||
|
||||
/// Create a deadpool connection pool from the given URL.
|
||||
/// Using the given pool size and other default settings.
|
||||
pub fn create_pool_from_url_with_size(
|
||||
url: impl Into<String>,
|
||||
size: usize,
|
||||
) -> Result<PgPool, BuildError> {
|
||||
let config = AsyncDieselConnectionManager::<AsyncPgConnection>::new(url);
|
||||
Pool::builder(config).max_size(size).build()
|
||||
}
|
@ -1,5 +1,10 @@
|
||||
use {std::io::Error, tokio::fs::File, tokio_util::io::ReaderStream};
|
||||
|
||||
/// Loads a file from the file system and returns a stream of bytes.
|
||||
/// # Arguments
|
||||
/// * `file_path` - The path to the file to load.
|
||||
/// # Returns
|
||||
/// A stream of bytes from the file if the file is found. Otherwise, an error is returned.
|
||||
pub async fn load_file<Path>(file_path: Path) -> Result<ReaderStream<File>, Error>
|
||||
where
|
||||
Path: AsRef<std::path::Path>,
|
||||
|
14
src/lib.rs
14
src/lib.rs
@ -1,18 +1,28 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
#[cfg(all(feature = "derive", feature = "serde"))]
|
||||
#[cfg(all(feature = "derive", feature = "diesel"))]
|
||||
pub extern crate diesel_crud_derive;
|
||||
#[cfg(feature = "diesel")]
|
||||
pub extern crate diesel_crud_trait;
|
||||
#[cfg(all(feature = "derive", feature = "axum", feature = "serde"))]
|
||||
pub extern crate into_response_derive;
|
||||
#[cfg(feature = "read-files")]
|
||||
pub extern crate read_files;
|
||||
extern crate self as lib;
|
||||
|
||||
#[cfg(feature = "axum")]
|
||||
pub mod axum;
|
||||
#[cfg(feature = "diesel")]
|
||||
pub mod diesel;
|
||||
#[cfg(feature = "io")]
|
||||
pub mod io;
|
||||
#[cfg(feature = "nom")]
|
||||
pub mod nom;
|
||||
#[cfg(feature = "serde")]
|
||||
pub mod serde;
|
||||
#[cfg(feature = "test")]
|
||||
pub mod test;
|
||||
#[cfg(feature = "time")]
|
||||
pub mod time;
|
||||
pub mod traits;
|
||||
#[cfg(feature = "iter")]
|
||||
pub mod vector;
|
||||
|
@ -1,23 +1,22 @@
|
||||
use {
|
||||
nom::{
|
||||
bytes::complete::take_while_m_n,
|
||||
character::complete::{char, multispace0},
|
||||
combinator::eof,
|
||||
sequence::{delimited, terminated},
|
||||
IResult, InputIter, InputLength, InputTake, Slice,
|
||||
},
|
||||
std::ops::RangeFrom,
|
||||
};
|
||||
|
||||
// TODO generic input
|
||||
use nom::IResult;
|
||||
use nom::bytes::complete::take_while_m_n;
|
||||
use nom::character::complete::char;
|
||||
use nom::character::complete::multispace0;
|
||||
use nom::combinator::eof;
|
||||
use nom::error::ParseError;
|
||||
use nom::sequence::delimited;
|
||||
use nom::sequence::terminated;
|
||||
use nom::{Input, Parser};
|
||||
|
||||
/// Trim leading and trailing whitespace from the input Parser
|
||||
/// - Parameters
|
||||
/// - `inner`: The parser to trim
|
||||
/// - Returns: A parser that trims leading and trailing whitespace from the input and then runs the value from the inner parser
|
||||
pub fn trim<'a, Parser, R>(inner: Parser) -> impl FnMut(&'a str) -> IResult<&'a str, R>
|
||||
pub fn trim<I, O, F, E: ParseError<I>>(inner: F) -> impl Parser<I, Output = O, Error = E>
|
||||
where
|
||||
Parser: FnMut(&'a str) -> IResult<&'a str, R>,
|
||||
I: Input,
|
||||
F: Parser<I, Output = O, Error = E>,
|
||||
<I as Input>::Item: nom::AsChar,
|
||||
{
|
||||
delimited(multispace0, inner, multispace0)
|
||||
}
|
||||
@ -27,9 +26,11 @@ where
|
||||
/// - Parameters
|
||||
/// - `inner`: The parser to run inside the parentheses
|
||||
/// - Returns: A parser that parses a parenthesized expression
|
||||
pub fn parenthesized<'a, Parser, R>(inner: Parser) -> impl FnMut(&'a str) -> IResult<&'a str, R>
|
||||
pub fn parenthesized<I, O, F, E: ParseError<I>>(inner: F) -> impl Parser<I, Output = O, Error = E>
|
||||
where
|
||||
Parser: FnMut(&'a str) -> IResult<&'a str, R>,
|
||||
I: Input,
|
||||
F: Parser<I, Output = O, Error = E>,
|
||||
<I as Input>::Item: nom::AsChar,
|
||||
{
|
||||
delimited(char('('), inner, char(')'))
|
||||
}
|
||||
@ -39,32 +40,58 @@ where
|
||||
/// - `n`: The length of the string to take
|
||||
/// - `predicate`: The predicate to call to validate the input
|
||||
/// - Returns: A parser that takes `n` characters from the input
|
||||
pub fn take_where<F, Input>(n: usize, predicate: F) -> impl Fn(Input) -> IResult<Input, Input>
|
||||
pub fn take_where<F, I>(n: usize, predicate: F) -> impl FnMut(I) -> IResult<I, I>
|
||||
where
|
||||
Input: InputTake + InputIter + InputLength + Slice<RangeFrom<usize>>,
|
||||
F: Fn(<Input as InputIter>::Item) -> bool + Copy,
|
||||
I: Input,
|
||||
F: Fn(<I as Input>::Item) -> bool,
|
||||
{
|
||||
take_while_m_n(n, n, predicate)
|
||||
}
|
||||
|
||||
pub fn exhausted<'a, Parser, R>(inner: Parser) -> impl FnMut(&'a str) -> IResult<&'a str, R>
|
||||
/// Parse the inner parser and then the end of the input.
|
||||
/// Very useful for ensuring that the entire input is consumed.
|
||||
/// - Parameters
|
||||
/// - `inner`: The parser to run
|
||||
/// - Returns: A parser that runs the inner parser and then the end of the input
|
||||
/// # Example
|
||||
/// ```
|
||||
/// use lib::nom::combinators::exhausted;
|
||||
/// use nom::bytes::complete::{tag};
|
||||
/// use nom::Parser;
|
||||
///
|
||||
/// let input = "test";
|
||||
/// let (remaining, result) = exhausted(tag::<&str, &str, nom::error::Error<&str>>("test")).parse(input).unwrap();
|
||||
/// assert_eq!(remaining, "");
|
||||
/// assert_eq!(result, "test");
|
||||
/// ```
|
||||
/// - Fails if the input is not exhausted
|
||||
/// ```
|
||||
/// use lib::nom::combinators::exhausted;
|
||||
/// use nom::bytes::complete::{tag};
|
||||
/// use nom::Parser;
|
||||
///
|
||||
/// let input = "test";
|
||||
/// assert!(exhausted(tag::<&str, &str, nom::error::Error<&str>>("tes")).parse(input).is_err());
|
||||
/// ```
|
||||
pub fn exhausted<F, I, O, E: ParseError<I>>(inner: F) -> impl Parser<I, Output = O, Error = E>
|
||||
where
|
||||
Parser: FnMut(&'a str) -> IResult<&'a str, R>,
|
||||
I: Input,
|
||||
F: Parser<I, Output = O, Error = E>,
|
||||
{
|
||||
terminated(inner, eof)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use nom::{bytes::complete::take_while, sequence::tuple};
|
||||
|
||||
use super::*;
|
||||
use nom::bytes::complete::take_while;
|
||||
|
||||
#[test]
|
||||
fn test_trim_both_sides() {
|
||||
let input = " test ";
|
||||
let (remaining, result) =
|
||||
trim(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
|
||||
let (remaining, result) = trim(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||
.parse(input)
|
||||
.unwrap();
|
||||
assert_eq!(remaining, "");
|
||||
assert_eq!(result, "test");
|
||||
}
|
||||
@ -72,8 +99,9 @@ mod tests {
|
||||
#[test]
|
||||
fn test_trim_leading() {
|
||||
let input = " test";
|
||||
let (remaining, result) =
|
||||
trim(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
|
||||
let (remaining, result) = trim(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||
.parse(input)
|
||||
.unwrap();
|
||||
assert_eq!(remaining, "");
|
||||
assert_eq!(result, "test");
|
||||
}
|
||||
@ -81,8 +109,9 @@ mod tests {
|
||||
#[test]
|
||||
fn test_trim_trailing() {
|
||||
let input = "test ";
|
||||
let (remaining, result) =
|
||||
trim(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
|
||||
let (remaining, result) = trim(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||
.parse(input)
|
||||
.unwrap();
|
||||
assert_eq!(remaining, "");
|
||||
assert_eq!(result, "test");
|
||||
}
|
||||
@ -90,8 +119,9 @@ mod tests {
|
||||
#[test]
|
||||
fn test_trim_no_trim() {
|
||||
let input = "test";
|
||||
let (remaining, result) =
|
||||
trim(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
|
||||
let (remaining, result) = trim(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||
.parse(input)
|
||||
.unwrap();
|
||||
assert_eq!(remaining, "");
|
||||
assert_eq!(result, "test");
|
||||
}
|
||||
@ -99,8 +129,9 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parenthesized() {
|
||||
let input = "(test)";
|
||||
let (remaining, result) =
|
||||
parenthesized(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
|
||||
let (remaining, result) = parenthesized(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||
.parse(input)
|
||||
.unwrap();
|
||||
assert_eq!(remaining, "");
|
||||
assert_eq!(result, "test");
|
||||
}
|
||||
@ -108,7 +139,11 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parenthesized_parse_until_end() {
|
||||
let input = "(test)";
|
||||
assert!(parenthesized(take_while(|_| true))(input).is_err());
|
||||
assert!(
|
||||
parenthesized::<&str, &str, _, nom::error::Error<&str>>(take_while(|_| true))
|
||||
.parse(input)
|
||||
.is_err()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -129,7 +164,7 @@ mod tests {
|
||||
fn test_take_where_too_much() {
|
||||
let input = "testing";
|
||||
assert_eq!(
|
||||
take_where(4, |c: char| c.is_ascii_alphabetic())(input),
|
||||
take_where(4, |c: char| c.is_ascii_alphabetic()).parse(input),
|
||||
Ok(("ing", "test"))
|
||||
);
|
||||
}
|
||||
@ -137,14 +172,19 @@ mod tests {
|
||||
#[test]
|
||||
fn test_take_where_predicate_false() {
|
||||
let input = "test";
|
||||
assert!(take_where(4, |c: char| c.is_ascii_digit())(input).is_err());
|
||||
assert!(
|
||||
take_where(4, |c: char| c.is_ascii_digit())
|
||||
.parse(input)
|
||||
.is_err()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_exhausted() {
|
||||
let input = "test";
|
||||
let (remaining, result) =
|
||||
exhausted(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).unwrap();
|
||||
let (remaining, result) = exhausted(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||
.parse(input)
|
||||
.unwrap();
|
||||
assert_eq!(remaining, "");
|
||||
assert_eq!(result, "test");
|
||||
}
|
||||
@ -152,16 +192,21 @@ mod tests {
|
||||
#[test]
|
||||
fn test_exhausted_not_exhausted() {
|
||||
let input = "test ";
|
||||
assert!(exhausted(take_where(4, |c: char| c.is_ascii_alphabetic()))(input).is_err());
|
||||
assert!(
|
||||
exhausted(take_where(4, |c: char| c.is_ascii_alphabetic()))
|
||||
.parse(input)
|
||||
.is_err()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_exhausted_tuple() {
|
||||
let input = "test";
|
||||
let (remaining, result) = exhausted(tuple((
|
||||
let (remaining, result) = exhausted((
|
||||
take_where(3, |c: char| c.is_ascii_alphabetic()),
|
||||
take_while(|c: char| c.is_ascii_alphabetic()),
|
||||
)))(input)
|
||||
))
|
||||
.parse(input)
|
||||
.unwrap();
|
||||
assert_eq!(remaining, "");
|
||||
assert_eq!(result, ("tes", "t"));
|
||||
|
@ -1 +1,2 @@
|
||||
pub mod response;
|
||||
pub mod traits;
|
||||
|
@ -4,7 +4,7 @@ use serde::Serialize;
|
||||
pub struct BaseResponse<T: Serialize> {
|
||||
pub version: String,
|
||||
#[serde(flatten)]
|
||||
pub body: T, // T must be a struct (or enum?)
|
||||
pub body: T, // T must be a struct (or enum?) TODO from! macro that validates T on compile time
|
||||
}
|
||||
|
||||
impl<T: Serialize> BaseResponse<T> {
|
||||
@ -16,6 +16,14 @@ impl<T: Serialize> BaseResponse<T> {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO version should reference the version in caller's Cargo.toml
|
||||
#[macro_export]
|
||||
macro_rules! from {
|
||||
($body:expr) => {
|
||||
$crate::serde::response::BaseResponse::new(env!("CARGO_PKG_VERSION"), $body)
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@ -35,4 +43,14 @@ mod tests {
|
||||
);
|
||||
assert_eq!(response.body.message, "Hi".to_string());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_macro() {
|
||||
let response = from!(Response {
|
||||
message: "Hi".to_string(),
|
||||
});
|
||||
from!(1); // Should not be allowed
|
||||
assert_eq!(response.version, env!("CARGO_PKG_VERSION"));
|
||||
assert_eq!(response.body.message, "Hi".to_string());
|
||||
}
|
||||
}
|
||||
|
7
src/serde/traits.rs
Normal file
7
src/serde/traits.rs
Normal file
@ -0,0 +1,7 @@
|
||||
use serde::de::DeserializeOwned;
|
||||
|
||||
pub trait DeserializeInto {
|
||||
fn deserialize_into<T: DeserializeOwned>(
|
||||
self,
|
||||
) -> impl Future<Output = Result<T, serde_json::Error>>;
|
||||
}
|
43
src/test/diesel_pool.rs
Normal file
43
src/test/diesel_pool.rs
Normal file
@ -0,0 +1,43 @@
|
||||
use crate::diesel::DieselError;
|
||||
use crate::diesel::get_connection::{GetConnection, GetConnectionError};
|
||||
use crate::diesel::pool::PgPool;
|
||||
use deadpool_diesel::Status;
|
||||
use deadpool_diesel::postgres::BuildError;
|
||||
use derive_more::From;
|
||||
use diesel_async::pooled_connection::deadpool::Object;
|
||||
use diesel_async::{AsyncConnection, AsyncPgConnection};
|
||||
use lib::diesel::pool::create_pool_from_url_with_size;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PoolStub(PgPool);
|
||||
|
||||
#[derive(Debug, PartialEq, From)]
|
||||
pub enum Error {
|
||||
Connection(diesel::ConnectionError),
|
||||
Database(DieselError),
|
||||
}
|
||||
|
||||
pub async fn setup_test_transaction(url: impl AsRef<str>) -> Result<AsyncPgConnection, Error> {
|
||||
let mut conn = AsyncPgConnection::establish(url.as_ref()).await?;
|
||||
conn.begin_test_transaction().await?;
|
||||
Ok(conn)
|
||||
}
|
||||
|
||||
pub async fn create_test_pool_url_with_size(
|
||||
url: impl Into<String>,
|
||||
size: usize,
|
||||
) -> Result<PoolStub, BuildError> {
|
||||
let pool = create_pool_from_url_with_size(url, size)?;
|
||||
Ok(PoolStub(pool))
|
||||
}
|
||||
|
||||
impl GetConnection for PoolStub {
|
||||
async fn get(&self) -> Result<Object<AsyncPgConnection>, GetConnectionError> {
|
||||
let mut conn = self.0.get().await?;
|
||||
conn.begin_test_transaction().await?;
|
||||
Ok(conn)
|
||||
}
|
||||
fn status(&self) -> Status {
|
||||
unimplemented!("PoolStub does not support status")
|
||||
}
|
||||
}
|
3
src/test/mod.rs
Normal file
3
src/test/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
#[cfg(feature = "diesel")]
|
||||
pub mod diesel_pool;
|
||||
pub mod test_containers;
|
45
src/test/test_containers.rs
Normal file
45
src/test/test_containers.rs
Normal file
@ -0,0 +1,45 @@
|
||||
use crate::diesel::pool::{PgPool, create_pool_from_url};
|
||||
use deadpool_diesel::postgres::BuildError;
|
||||
use derive_more::{Constructor, From};
|
||||
use diesel_async::pooled_connection::deadpool::PoolError;
|
||||
use lib::diesel::DieselError;
|
||||
use testcontainers_modules::postgres::Postgres;
|
||||
use testcontainers_modules::testcontainers::runners::AsyncRunner;
|
||||
use testcontainers_modules::testcontainers::{ContainerAsync, TestcontainersError};
|
||||
use tokio::task::JoinError;
|
||||
|
||||
/// When the TestContainer is dropped, the container will be removed.
|
||||
/// # Errors
|
||||
/// If destructed and the container field is dropped, the container will be dropped, and using the pool will cause an error.
|
||||
#[derive(Constructor)]
|
||||
pub struct TestContainer {
|
||||
pub container: ContainerAsync<Postgres>,
|
||||
pub pool: PgPool,
|
||||
}
|
||||
|
||||
const TEST_CONTAINERS_INTERNAL_PORT: u16 = 5432;
|
||||
|
||||
pub async fn create_test_containers_pool() -> Result<TestContainer, ContainerError> {
|
||||
let container = create_postgres_container().await?;
|
||||
let connection_string = format!(
|
||||
"postgres://postgres:postgres@127.0.0.1:{}/postgres",
|
||||
container
|
||||
.get_host_port_ipv4(TEST_CONTAINERS_INTERNAL_PORT)
|
||||
.await?
|
||||
);
|
||||
let pool = create_pool_from_url(connection_string)?;
|
||||
Ok(TestContainer::new(container, pool))
|
||||
}
|
||||
|
||||
pub async fn create_postgres_container() -> Result<ContainerAsync<Postgres>, TestcontainersError> {
|
||||
Postgres::default().start().await
|
||||
}
|
||||
|
||||
#[derive(Debug, From)]
|
||||
pub enum ContainerError {
|
||||
TestContainers(TestcontainersError),
|
||||
BuildError(BuildError),
|
||||
PoolError(PoolError),
|
||||
DieselError(DieselError),
|
||||
JoinError(JoinError),
|
||||
}
|
64
src/time/common.rs
Normal file
64
src/time/common.rs
Normal file
@ -0,0 +1,64 @@
|
||||
use chrono::NaiveDateTime;
|
||||
use derive_more::{Constructor, From};
|
||||
|
||||
/// Represents a date-time interval using naive date-time.
|
||||
/// All date-times are expected to be in UTC.
|
||||
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Constructor, From)]
|
||||
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
pub struct DateTimeInterval {
|
||||
pub start: NaiveDateTime,
|
||||
pub end: NaiveDateTime,
|
||||
}
|
||||
|
||||
impl DateTimeInterval {
|
||||
/// Creates a new `DateTimeInterval` from the given `start` and `end` `NaiveDateTime`s.
|
||||
/// The `start` must be before the `end`.
|
||||
/// If `start` is equal to or after `end`, this function will return `None`.
|
||||
/// # Arguments
|
||||
/// * `start` - The start of the interval.
|
||||
/// * `end` - The end of the interval.
|
||||
/// # Returns
|
||||
/// A new `DateTimeInterval` if `start` is before `end`, otherwise `None`.
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use chrono::{NaiveDateTime, Duration, Utc};
|
||||
/// use lib::time::DateTimeInterval;
|
||||
///
|
||||
/// let start = Utc::now().naive_utc();
|
||||
/// let end = start + Duration::days(1);
|
||||
/// let interval = DateTimeInterval::new_safe(start, end);
|
||||
/// assert_eq!(interval, Some((start, end).into()));
|
||||
/// ```
|
||||
/// Illegal interval:
|
||||
/// ```
|
||||
/// use chrono::{NaiveDateTime, Utc};
|
||||
/// use lib::time::DateTimeInterval;
|
||||
/// let start = Utc::now().naive_utc();
|
||||
/// let end = start;
|
||||
/// let interval = DateTimeInterval::new_safe(start, end);
|
||||
/// assert_eq!(interval, None);
|
||||
pub fn new_safe(start: NaiveDateTime, end: NaiveDateTime) -> Option<Self> {
|
||||
if start < end {
|
||||
Some(Self::new(start, end))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::DateTime;
|
||||
|
||||
#[test]
|
||||
fn test_new_safe() {
|
||||
let start = DateTime::from_timestamp(0, 0).unwrap().naive_utc();
|
||||
let end = DateTime::from_timestamp(1, 0).unwrap().naive_utc();
|
||||
assert_eq!(
|
||||
DateTimeInterval::new_safe(start, end),
|
||||
Some(DateTimeInterval::new(start, end))
|
||||
);
|
||||
assert_eq!(DateTimeInterval::new_safe(end, start), None);
|
||||
}
|
||||
}
|
3
src/time/mod.rs
Normal file
3
src/time/mod.rs
Normal file
@ -0,0 +1,3 @@
|
||||
mod common;
|
||||
|
||||
pub use common::DateTimeInterval;
|
@ -1,4 +1,6 @@
|
||||
/// Modify self to contain only distinct elements.
|
||||
pub trait Distinct {
|
||||
/// Modify self to contain only distinct elements.
|
||||
fn distinct(&mut self);
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,37 @@
|
||||
/// Create a `HashMap` with the given key-value pairs.
|
||||
/// There are three ways to use this macro:
|
||||
/// 1. `map!()`: Create an empty `HashMap`.
|
||||
/// 2. `map!(usize; 1, 2)`: Create a `HashMap` with the keys `1` and `2` with the default value of `usize`.
|
||||
/// 3. `map!("one" => 1, "two" => 2)`: Create a `HashMap` with the keys `"one"` and `"two"` with the values `1` and `2` respectively.
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use std::collections::HashMap;
|
||||
///
|
||||
/// let empty_map: HashMap<usize, usize> = lib::map!();
|
||||
/// assert_eq!(empty_map.len(), 0);
|
||||
///
|
||||
/// let map: HashMap<&str, usize> = lib::map!("one" => 1, "two" => 2);
|
||||
/// assert_eq!(map.len(), 2);
|
||||
/// assert_eq!(map.get("one"), Some(&1));
|
||||
/// assert_eq!(map.get("two"), Some(&2));
|
||||
///
|
||||
/// let map: HashMap<usize, usize> = lib::map!(usize; 1, 2);
|
||||
/// assert_eq!(map.len(), 2);
|
||||
/// assert_eq!(map.get(&1), Some(&0));
|
||||
/// assert_eq!(map.get(&2), Some(&0));
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! map {
|
||||
() => { std::collections::HashMap::new() };
|
||||
($default:ty; $($key:expr),+ $(,)?) => {
|
||||
{
|
||||
let mut temp_map = std::collections::HashMap::new();
|
||||
$(
|
||||
temp_map.insert($key, <$default>::default());
|
||||
)*
|
||||
temp_map
|
||||
}
|
||||
};
|
||||
($($k:expr => $v:expr),* $(,)?) => {
|
||||
{
|
||||
let mut temp_map = std::collections::HashMap::new();
|
||||
@ -33,4 +64,19 @@ mod tests {
|
||||
assert_eq!(map.get("two"), Some(&2));
|
||||
assert_eq!(map.get("three"), Some(&3));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_map_only_keys() {
|
||||
let map: HashMap<usize, usize> = map!(usize; 1, 2, 3);
|
||||
assert_eq!(map.len(), 3);
|
||||
assert_eq!(map.get(&1), Some(&0));
|
||||
assert_eq!(map.get(&2), Some(&0));
|
||||
assert_eq!(map.get(&3), Some(&0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_map_only_keys_1_key() {
|
||||
let map: HashMap<usize, usize> = map!(usize; 1);
|
||||
assert_eq!(map.len(), 1);
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user