Merge branch 'master' into pulls/361

This commit is contained in:
Chris Tsang 2021-12-25 17:41:29 +08:00 committed by GitHub
commit 47cbf50c3c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
82 changed files with 2261 additions and 661 deletions

View File

@ -108,10 +108,12 @@ jobs:
--all
# Run clippy
- uses: actions-rs/clippy-check@v1
- uses: actions-rs/cargo@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
args: --all-targets --all
command: clippy
args: >
--all-targets
--all
compile-sqlite:
name: Compile SQLite
@ -316,7 +318,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest]
path: [86, 249, 262, 319, 324, 352]
path: [86, 249, 262, 319, 324, 352, 356]
steps:
- uses: actions/checkout@v2

View File

@ -5,6 +5,22 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
## 0.4.2 - 2021-12-12
### Fixed Issues
* Delete::many() doesn't work when schema_name is defined https://github.com/SeaQL/sea-orm/issues/362
* find_with_related panic https://github.com/SeaQL/sea-orm/issues/374
* How to define rust type of TIMESTAMP? https://github.com/SeaQL/sea-orm/issues/344
* Add Table on the generated Column enum https://github.com/SeaQL/sea-orm/issues/356
### Merged PRs
* `Delete::many()` with `TableRef` by @billy1624 in https://github.com/SeaQL/sea-orm/pull/363
* Fix related & linked with enum columns by @billy1624 in https://github.com/SeaQL/sea-orm/pull/376
* Temporary Fix: Handling MySQL & SQLite timestamp columns by @billy1624 in https://github.com/SeaQL/sea-orm/pull/379
* Add feature to generate table Iden by @Sytten in https://github.com/SeaQL/sea-orm/pull/360
**Full Changelog**: https://github.com/SeaQL/sea-orm/compare/0.4.1...0.4.2
## 0.4.1 - 2021-12-05
### Fixed Issues

View File

@ -3,7 +3,7 @@ members = [".", "sea-orm-macros", "sea-orm-codegen"]
[package]
name = "sea-orm"
version = "0.4.1"
version = "0.4.2"
authors = ["Chris Tsang <tyt2y7@gmail.com>"]
edition = "2021"
description = "🐚 An async & dynamic ORM for Rust"
@ -27,17 +27,18 @@ async-trait = { version = "^0.1" }
chrono = { version = "^0", optional = true }
futures = { version = "^0.3" }
futures-util = { version = "^0.3" }
log = { version = "^0.4", optional = true }
tracing = { version = "0.1", features = ["log"] }
rust_decimal = { version = "^1", optional = true }
sea-orm-macros = { version = "^0.4.1", path = "sea-orm-macros", optional = true }
sea-query = { version = "^0.19.1", features = ["thread-safe"] }
sea-strum = { version = "^0.21", git = "https://github.com/SeaQL/strum.git", branch = "pulls/1", features = ["derive", "sea-orm"] }
sea-orm-macros = { version = "^0.4.2", path = "sea-orm-macros", optional = true }
sea-query = { version = "^0.20.0", features = ["thread-safe"] }
sea-strum = { version = "^0.23", features = ["derive", "sea-orm"] }
serde = { version = "^1.0", features = ["derive"] }
serde_json = { version = "^1", optional = true }
sqlx = { version = "^0.5", optional = true }
uuid = { version = "0.8", features = ["serde", "v4"], optional = true }
ouroboros = "0.11"
ouroboros = "0.14"
url = "^2.2"
once_cell = "1.8"
[dev-dependencies]
smol = { version = "^1.2" }
@ -47,12 +48,12 @@ tokio = { version = "^1.6", features = ["full"] }
actix-rt = { version = "2.2.0" }
maplit = { version = "^1" }
rust_decimal_macros = { version = "^1" }
env_logger = { version = "^0.9" }
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
sea-orm = { path = ".", features = ["debug-print"] }
pretty_assertions = { version = "^0.7" }
[features]
debug-print = ["log"]
debug-print = []
default = [
"macros",
"mock",

View File

@ -123,7 +123,7 @@ let mut pear: fruit::ActiveModel = pear.unwrap().into();
pear.name = Set("Sweet pear".to_owned());
// update one
let pear: fruit::ActiveModel = pear.update(db).await?;
let pear: fruit::Model = pear.update(db).await?;
// update many: UPDATE "fruit" SET "cake_id" = NULL WHERE "fruit"."name" LIKE '%Apple%'
Fruit::update_many()
@ -136,13 +136,13 @@ Fruit::update_many()
### Save
```rust
let banana = fruit::ActiveModel {
id: Unset(None),
id: NotSet,
name: Set("Banana".to_owned()),
..Default::default()
};
// create, because primary key `id` is `Unset`
let mut banana = banana.save(db).await?;
// create, because primary key `id` is `NotSet`
let mut banana = banana.save(db).await?.into_active_model();
banana.name = Set("Banana Mongo".to_owned());
@ -152,12 +152,16 @@ let banana = banana.save(db).await?;
```
### Delete
```rust
let orange: Option<fruit::Model> = Fruit::find_by_id(1).one(db).await?;
let orange: fruit::ActiveModel = orange.unwrap().into();
// delete one
fruit::Entity::delete(orange).exec(db).await?;
let orange: Option<fruit::Model> = Fruit::find_by_id(1).one(db).await?;
let orange: fruit::Model = orange.unwrap();
fruit::Entity::delete(orange.into_active_model())
.exec(db)
.await?;
// or simply
let orange: Option<fruit::Model> = Fruit::find_by_id(1).one(db).await?;
let orange: fruit::Model = orange.unwrap();
orange.delete(db).await?;
// delete many: DELETE FROM "fruit" WHERE "fruit"."name" LIKE 'Orange'

View File

@ -0,0 +1,93 @@
version: "3"
services:
#
# MariaDB
#
mariadb_10_6:
image: mariadb:10.6
ports:
- 3306
environment:
MYSQL_DB: mysql
MYSQL_USER: sea
MYSQL_PASSWORD: sea
MYSQL_ALLOW_EMPTY_PASSWORD: yes
MYSQL_ROOT_PASSWORD: root
mariadb_10_5:
image: mariadb:10.5
ports:
- 3306
environment:
MYSQL_DB: mysql
MYSQL_USER: sea
MYSQL_PASSWORD: sea
MYSQL_ALLOW_EMPTY_PASSWORD: yes
MYSQL_ROOT_PASSWORD: root
mariadb_10_4:
image: mariadb:10.4
ports:
- 3306
environment:
MYSQL_DB: mysql
MYSQL_USER: sea
MYSQL_PASSWORD: sea
MYSQL_ALLOW_EMPTY_PASSWORD: yes
MYSQL_ROOT_PASSWORD: root
#
# MySQL
#
mysql_8_0:
image: mysql:8.0
ports:
- 3306
environment:
MYSQL_DB: mysql
MYSQL_USER: sea
MYSQL_PASSWORD: sea
MYSQL_ALLOW_EMPTY_PASSWORD: yes
MYSQL_ROOT_PASSWORD: root
mysql_5_7:
image: mysql:5.7
ports:
- 3306
environment:
MYSQL_DB: mysql
MYSQL_USER: sea
MYSQL_PASSWORD: sea
MYSQL_ALLOW_EMPTY_PASSWORD: yes
MYSQL_ROOT_PASSWORD: root
#
# PostgreSQL
#
postgres_13:
image: postgres:13
ports:
- 5432
environment:
POSTGRES_USER: root
POSTGRES_PASSWORD: root
postgres_12:
image: postgres:12
ports:
- 5432
environment:
POSTGRES_USER: root
POSTGRES_PASSWORD: root
postgres_11:
image: postgres:11
ports:
- 5432
environment:
POSTGRES_USER: root
POSTGRES_PASSWORD: root

View File

@ -0,0 +1,87 @@
# Some Common Docker Commands You Might Need (use with caution)
#
# Delete all containers
# $ docker rm -f $(docker ps -a -q)
#
# Delete all volumns
# $ docker volume rm $(docker volume ls -q)
#
# Delete all images
# $ docker image rm $(docker image ls -q)
# Setup MariaDB
docker run \
--name "mariadb-10.6" \
--env MYSQL_DB="mysql" \
--env MYSQL_USER="sea" \
--env MYSQL_PASSWORD="sea" \
--env MYSQL_ALLOW_EMPTY_PASSWORD="yes" \
--env MYSQL_ROOT_PASSWORD="root" \
-d -p 3306:3306 mariadb:10.6
docker stop "mariadb-10.6"
docker run \
--name "mariadb-10.5" \
--env MYSQL_DB="mysql" \
--env MYSQL_USER="sea" \
--env MYSQL_PASSWORD="sea" \
--env MYSQL_ALLOW_EMPTY_PASSWORD="yes" \
--env MYSQL_ROOT_PASSWORD="root" \
-d -p 3306:3306 mariadb:10.5
docker stop "mariadb-10.5"
docker run \
--name "mariadb-10.4" \
--env MYSQL_DB="mysql" \
--env MYSQL_USER="sea" \
--env MYSQL_PASSWORD="sea" \
--env MYSQL_ALLOW_EMPTY_PASSWORD="yes" \
--env MYSQL_ROOT_PASSWORD="root" \
-d -p 3306:3306 mariadb:10.4
docker stop "mariadb-10.4"
# Setup MySQL
docker run \
--name "mysql-8.0" \
--env MYSQL_DB="mysql" \
--env MYSQL_USER="sea" \
--env MYSQL_PASSWORD="sea" \
--env MYSQL_ALLOW_EMPTY_PASSWORD="yes" \
--env MYSQL_ROOT_PASSWORD="root" \
-d -p 3306:3306 mysql:8.0
docker stop "mysql-8.0"
docker run \
--name "mysql-5.7" \
--env MYSQL_DB="mysql" \
--env MYSQL_USER="sea" \
--env MYSQL_PASSWORD="sea" \
--env MYSQL_ALLOW_EMPTY_PASSWORD="yes" \
--env MYSQL_ROOT_PASSWORD="root" \
-d -p 3306:3306 mysql:5.7
docker stop "mysql-5.7"
# Setup PostgreSQL
docker run \
--name "postgres-13" \
--env POSTGRES_USER="root" \
--env POSTGRES_PASSWORD="root" \
-d -p 5432:5432 postgres:13
docker stop "postgres-13"
docker run \
--name "postgres-12" \
--env POSTGRES_USER="root" \
--env POSTGRES_PASSWORD="root" \
-d -p 5432:5432 postgres:12
docker stop "postgres-12"
docker run \
--name "postgres-11" \
--env POSTGRES_USER="root" \
--env POSTGRES_PASSWORD="root" \
-d -p 5432:5432 postgres:11
docker stop "postgres-11"

View File

@ -18,12 +18,12 @@ tera = "1.8.0"
dotenv = "0.15"
listenfd = "0.3.3"
serde = "1"
env_logger = "0.8"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
[dependencies.sea-orm]
path = "../../" # remove this line in your own project
version = "^0.4.0"
features = ["macros", "runtime-actix-native-tls"]
features = ["macros", "runtime-actix-native-tls", "debug-print"]
default-features = false
[features]

View File

@ -155,7 +155,7 @@ async fn delete(data: web::Data<AppState>, id: web::Path<i32>) -> Result<HttpRes
#[actix_web::main]
async fn main() -> std::io::Result<()> {
std::env::set_var("RUST_LOG", "debug");
env_logger::init();
tracing_subscriber::fmt::init();
// get env vars
dotenv::dotenv().ok();

View File

@ -18,12 +18,12 @@ tera = "1.8.0"
dotenv = "0.15"
listenfd = "0.3.3"
serde = "1"
env_logger = "0.8"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
[dependencies.sea-orm]
path = "../../" # remove this line in your own project
version = "^0.4.0"
features = ["macros", "runtime-async-std-native-tls"]
features = ["macros", "runtime-async-std-native-tls", "debug-print"]
default-features = false
[features]

View File

@ -181,7 +181,7 @@ async fn delete(
#[actix_web::main]
async fn main() -> std::io::Result<()> {
std::env::set_var("RUST_LOG", "debug");
env_logger::init();
tracing_subscriber::fmt::init();
// get env vars
dotenv::dotenv().ok();

View File

@ -9,22 +9,22 @@ publish = false
[workspace]
[dependencies]
tokio = { version = "1.5", features = ["full"] }
axum = { version = "0.3.0" }
tower = "0.4.10"
tower-http = { version = "0.1", features = ["fs"] }
tower-cookies = { version = "0.3" }
tokio = { version = "1.14", features = ["full"] }
axum = { version = "0.4.2" }
tower = "0.4.11"
tower-http = { version = "0.2", features = ["fs"] }
tower-cookies = { version = "0.4" }
anyhow = "1"
dotenv = "0.15"
env_logger = "0.9"
serde = "1"
serde_json = "1"
tera = "1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
[dependencies.sea-orm]
path = "../../" # remove this line in your own project
version = "^0.4.0"
features = ["macros", "runtime-tokio-native-tls"]
version = "^0.4.2"
features = ["macros", "runtime-tokio-native-tls", "debug-print"]
default-features = false
[features]

View File

@ -3,11 +3,10 @@ mod post;
mod setup;
use axum::{
error_handling::HandleErrorExt,
extract::{Extension, Form, Path, Query},
http::StatusCode,
response::Html,
routing::{get, post, service_method_routing},
routing::{get, post, get_service},
AddExtensionLayer, Router, Server,
};
use flash::{get_flash_cookie, post_response, PostResponse};
@ -24,7 +23,7 @@ use tower_http::services::ServeDir;
#[tokio::main]
async fn main() -> anyhow::Result<()> {
env::set_var("RUST_LOG", "debug");
env_logger::init();
tracing_subscriber::fmt::init();
dotenv::dotenv().ok();
let db_url = env::var("DATABASE_URL").expect("DATABASE_URL is not set in .env file");
@ -47,11 +46,11 @@ async fn main() -> anyhow::Result<()> {
.route("/delete/:id", post(delete_post))
.nest(
"/static",
service_method_routing::get(ServeDir::new(concat!(
get_service(ServeDir::new(concat!(
env!("CARGO_MANIFEST_DIR"),
"/static"
)))
.handle_error(|error: std::io::Error| {
.handle_error(|error: std::io::Error| async move {
(
StatusCode::INTERNAL_SERVER_ERROR,
format!("Unhandled internal error: {}", error),

View File

@ -33,7 +33,7 @@ pub async fn insert_and_update(db: &DbConn) -> Result<(), DbErr> {
let mut pear: fruit::ActiveModel = pear.unwrap().into();
pear.name = Set("Sweet pear".to_owned());
let pear: fruit::ActiveModel = pear.update(db).await?;
let pear: fruit::Model = pear.update(db).await?;
println!();
println!("Updated: {:?}\n", pear);
@ -46,14 +46,14 @@ pub async fn save_active_model(db: &DbConn) -> Result<(), DbErr> {
name: Set("Banana".to_owned()),
..Default::default()
};
let mut banana = banana.save(db).await?;
let mut banana: fruit::ActiveModel = banana.save(db).await?.into_active_model();
println!();
println!("Inserted: {:?}\n", banana);
banana.name = Set("Banana Mongo".to_owned());
let banana = banana.save(db).await?;
let banana: fruit::ActiveModel = banana.save(db).await?.into_active_model();
println!();
println!("Updated: {:?}\n", banana);
@ -81,7 +81,7 @@ mod form {
async fn save_custom_active_model(db: &DbConn) -> Result<(), DbErr> {
let pineapple = form::ActiveModel {
id: Unset(None),
id: NotSet,
name: Set("Pineapple".to_owned()),
};

View File

@ -12,10 +12,10 @@ async-stream = { version = "^0.3" }
async-trait = { version = "0.1" }
futures = { version = "^0.3" }
futures-util = { version = "^0.3" }
rocket = { git = "https://github.com/SergioBenitez/Rocket.git", features = [
rocket = { version = "0.5.0-rc.1", features = [
"json",
] }
rocket_dyn_templates = { git = "https://github.com/SergioBenitez/Rocket.git", features = [
rocket_dyn_templates = { version = "0.1.0-rc.1", features = [
"tera",
] }
serde_json = { version = "^1" }

View File

@ -7,7 +7,8 @@ use rocket::fs::{relative, FileServer};
use rocket::request::FlashMessage;
use rocket::response::{Flash, Redirect};
use rocket::{Build, Request, Rocket};
use rocket_dyn_templates::{context, Template};
use rocket_dyn_templates::Template;
use serde_json::json;
use sea_orm::{entity::*, query::*};
use sea_orm_rocket::{Connection, Database};
@ -107,13 +108,13 @@ async fn list(
Template::render(
"index",
context! {
page: page,
posts_per_page: posts_per_page,
num_pages: num_pages,
posts: posts,
flash: flash.map(FlashMessage::into_inner),
},
json! ({
"page": page,
"posts_per_page": posts_per_page,
"num_pages": num_pages,
"posts": posts,
"flash": flash.map(FlashMessage::into_inner),
})
)
}
@ -128,9 +129,9 @@ async fn edit(conn: Connection<'_, Db>, id: i32) -> Template {
Template::render(
"edit",
context! {
post: post,
},
json! ({
"post": post,
})
)
}
@ -157,9 +158,9 @@ async fn destroy(conn: Connection<'_, Db>) -> Result<(), rocket::response::Debug
pub fn not_found(req: &Request<'_>) -> Template {
Template::render(
"error/404",
context! {
uri: req.uri()
},
json! ({
"uri": req.uri()
})
)
}

11
issues/356/Cargo.toml Normal file
View File

@ -0,0 +1,11 @@
[workspace]
# A separate workspace
[package]
name = "sea-orm-issues-356"
version = "0.1.0"
edition = "2021"
publish = false
[dependencies]
sea-orm = { path = "../../", features = [ "sqlx-mysql", "runtime-async-std-native-tls" ]}

3
issues/356/src/main.rs Normal file
View File

@ -0,0 +1,3 @@
mod model;
pub fn main() {}

42
issues/356/src/model.rs Normal file
View File

@ -0,0 +1,42 @@
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "model", table_iden)]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i32,
pub owner: String,
pub name: String,
pub description: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}
#[cfg(test)]
mod tests {
use super::*;
use sea_orm::*;
#[test]
fn test_columns_1() {
assert_eq!(
Column::iter()
.map(|col| col.to_string())
.collect::<Vec<_>>(),
vec![
"id".to_owned(),
"owner".to_owned(),
"name".to_owned(),
"description".to_owned(),
]
);
assert_eq!(Column::Table.to_string().as_str(), "model");
assert_eq!(Column::Id.to_string().as_str(), "id");
assert_eq!(Column::Owner.to_string().as_str(), "owner");
assert_eq!(Column::Name.to_string().as_str(), "name");
assert_eq!(Column::Description.to_string().as_str(), "description");
}
}

View File

@ -10,5 +10,5 @@ publish = false
[dependencies]
sea-orm = { path = "../../", features = [ "sqlx-all", "runtime-tokio-native-tls", "debug-print" ] }
tokio = { version = "1", features = ["full"] }
env_logger = { version = "^0.9" }
log = { version = "^0.4" }
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
tracing = { version = "0.1" }

View File

@ -3,9 +3,9 @@ use sea_orm::*;
#[tokio::main]
pub async fn main() {
env_logger::builder()
.filter_level(log::LevelFilter::Debug)
.is_test(true)
tracing_subscriber::fmt()
.with_max_level(tracing::Level::DEBUG)
.with_test_writer()
.init();
let db = Database::connect("mysql://sea:sea@localhost/bakery")

View File

@ -3,7 +3,7 @@
[package]
name = "sea-orm-cli"
version = "0.4.1"
version = "0.4.2"
authors = [ "Billy Chan <ccw.billy.123@gmail.com>" ]
edition = "2021"
description = "Command line utility for SeaORM"
@ -21,17 +21,18 @@ path = "src/main.rs"
clap = { version = "^2.33.3" }
dotenv = { version = "^0.15" }
async-std = { version = "^1.9", features = [ "attributes" ] }
sea-orm-codegen = { version = "^0.4.1", path = "../sea-orm-codegen" }
sea-schema = { version = "^0.2.9", default-features = false, features = [
sea-orm-codegen = { version = "^0.4.2", path = "../sea-orm-codegen" }
sea-schema = { version = "0.3.0", git = "https://github.com/SeaQL/sea-schema.git", branch = "sqlite-codegen", default-features = false, features = [
"debug-print",
"sqlx-mysql",
"sqlx-sqlite",
"sqlx-postgres",
"discovery",
"writer",
] }
sqlx = { version = "^0.5", default-features = false, features = [ "mysql", "postgres" ] }
env_logger = { version = "^0.9" }
log = { version = "^0.4" }
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
tracing = { version = "0.1" }
url = "^2.2"
[dev-dependencies]

View File

@ -12,6 +12,9 @@ Running Entity Generator:
# MySQL (`--database-schema` option is ignored)
cargo run -- generate entity -u mysql://sea:sea@localhost/bakery -o out
# SQLite (`--database-schema` option is ignored)
cargo run -- generate entity -u sqlite://bakery.db -o out
# PostgreSQL
cargo run -- generate entity -u postgres://sea:sea@localhost/bakery -s public -o out
```

View File

@ -1,6 +1,5 @@
use clap::ArgMatches;
use dotenv::dotenv;
use log::LevelFilter;
use sea_orm_codegen::{EntityTransformer, OutputFile, WithSerde};
use std::{error::Error, fmt::Display, fs, io::Write, path::Path, process::Command, str::FromStr};
use url::Url;
@ -33,9 +32,9 @@ async fn run_generate_command(matches: &ArgMatches<'_>) -> Result<(), Box<dyn Er
let expanded_format = args.is_present("EXPANDED_FORMAT");
let with_serde = args.value_of("WITH_SERDE").unwrap();
if args.is_present("VERBOSE") {
let _ = ::env_logger::builder()
.filter_level(LevelFilter::Debug)
.is_test(true)
let _ = tracing_subscriber::fmt()
.with_max_level(tracing::Level::DEBUG)
.with_test_writer()
.try_init();
}
@ -54,39 +53,20 @@ async fn run_generate_command(matches: &ArgMatches<'_>) -> Result<(), Box<dyn Er
let url_password = url.password();
let url_host = url.host_str();
// Panic on any that are missing
if url_username.is_empty() {
panic!("No username was found in the database url");
}
if url_password.is_none() {
panic!("No password was found in the database url");
}
if url_host.is_none() {
panic!("No host was found in the database url");
}
let is_sqlite = url.scheme() == "sqlite";
// The database name should be the first element of the path string
//
// Throwing an error if there is no database name since it might be
// accepted by the database without it, while we're looking to dump
// information from a particular database
let database_name = url
.path_segments()
.unwrap_or_else(|| {
panic!(
"There is no database name as part of the url path: {}",
url.as_str()
)
})
.next()
.unwrap();
// An empty string as the database name is also an error
if database_name.is_empty() {
panic!(
"There is no database name as part of the url path: {}",
url.as_str()
);
// Skip checking if it's SQLite
if !is_sqlite {
// Panic on any that are missing
if url_username.is_empty() {
panic!("No username was found in the database url");
}
if url_password.is_none() {
panic!("No password was found in the database url");
}
if url_host.is_none() {
panic!("No host was found in the database url");
}
}
// Closures for filtering tables
@ -105,6 +85,36 @@ async fn run_generate_command(matches: &ArgMatches<'_>) -> Result<(), Box<dyn Er
}
};
let database_name = if !is_sqlite {
// The database name should be the first element of the path string
//
// Throwing an error if there is no database name since it might be
// accepted by the database without it, while we're looking to dump
// information from a particular database
let database_name = url
.path_segments()
.unwrap_or_else(|| {
panic!(
"There is no database name as part of the url path: {}",
url.as_str()
)
})
.next()
.unwrap();
// An empty string as the database name is also an error
if database_name.is_empty() {
panic!(
"There is no database name as part of the url path: {}",
url.as_str()
);
}
database_name
} else {
Default::default()
};
let table_stmts = match url.scheme() {
"mysql" => {
use sea_schema::mysql::discovery::SchemaDiscovery;
@ -121,6 +131,21 @@ async fn run_generate_command(matches: &ArgMatches<'_>) -> Result<(), Box<dyn Er
.map(|schema| schema.write())
.collect()
}
"sqlite" => {
use sea_schema::sqlite::SchemaDiscovery;
use sqlx::SqlitePool;
let connection = SqlitePool::connect(url.as_str()).await?;
let schema_discovery = SchemaDiscovery::new(connection);
let schema = schema_discovery.discover().await?;
schema
.tables
.into_iter()
.filter(|schema| filter_tables(&schema.name))
.filter(|schema| filter_hidden_tables(&schema.name))
.map(|schema| schema.write())
.collect()
}
"postgres" | "postgresql" => {
use sea_schema::postgres::discovery::SchemaDiscovery;
use sqlx::PgPool;

View File

@ -1,6 +1,6 @@
[package]
name = "sea-orm-codegen"
version = "0.4.1"
version = "0.4.2"
authors = ["Billy Chan <ccw.billy.123@gmail.com>"]
edition = "2021"
description = "Code Generator for SeaORM"
@ -15,7 +15,7 @@ name = "sea_orm_codegen"
path = "src/lib.rs"
[dependencies]
sea-query = { version = "^0.16.4" }
sea-query = { version = "0.20.0" }
syn = { version = "^1", default-features = false, features = [
"derive",
"parsing",

View File

@ -0,0 +1,31 @@
use heck::CamelCase;
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
#[derive(Clone, Debug)]
pub struct ActiveEnum {
pub(crate) enum_name: String,
pub(crate) values: Vec<String>,
}
impl ActiveEnum {
pub fn impl_active_enum(&self) -> TokenStream {
let enum_name = &self.enum_name;
let enum_iden = format_ident!("{}", enum_name.to_camel_case());
let values = &self.values;
let variants = self
.values
.iter()
.map(|v| format_ident!("{}", v.to_camel_case()));
quote! {
#[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = #enum_name)]
pub enum #enum_iden {
#(
#[sea_orm(string_value = #values)]
#variants,
)*
}
}
}
}

View File

@ -24,26 +24,27 @@ impl Column {
pub fn get_rs_type(&self) -> TokenStream {
#[allow(unreachable_patterns)]
let ident: TokenStream = match self.col_type {
let ident: TokenStream = match &self.col_type {
ColumnType::Char(_)
| ColumnType::String(_)
| ColumnType::Text
| ColumnType::Custom(_) => "String",
ColumnType::TinyInteger(_) => "i8",
ColumnType::SmallInteger(_) => "i16",
ColumnType::Integer(_) => "i32",
ColumnType::BigInteger(_) => "i64",
ColumnType::Float(_) => "f32",
ColumnType::Double(_) => "f64",
ColumnType::Json | ColumnType::JsonBinary => "Json",
ColumnType::Date => "Date",
ColumnType::Time(_) => "Time",
ColumnType::DateTime(_) | ColumnType::Timestamp(_) => "DateTime",
ColumnType::TimestampWithTimeZone(_) => "DateTimeWithTimeZone",
ColumnType::Decimal(_) | ColumnType::Money(_) => "Decimal",
ColumnType::Uuid => "Uuid",
ColumnType::Binary(_) => "Vec<u8>",
ColumnType::Boolean => "bool",
| ColumnType::Custom(_) => "String".to_owned(),
ColumnType::TinyInteger(_) => "i8".to_owned(),
ColumnType::SmallInteger(_) => "i16".to_owned(),
ColumnType::Integer(_) => "i32".to_owned(),
ColumnType::BigInteger(_) => "i64".to_owned(),
ColumnType::Float(_) => "f32".to_owned(),
ColumnType::Double(_) => "f64".to_owned(),
ColumnType::Json | ColumnType::JsonBinary => "Json".to_owned(),
ColumnType::Date => "Date".to_owned(),
ColumnType::Time(_) => "Time".to_owned(),
ColumnType::DateTime(_) | ColumnType::Timestamp(_) => "DateTime".to_owned(),
ColumnType::TimestampWithTimeZone(_) => "DateTimeWithTimeZone".to_owned(),
ColumnType::Decimal(_) | ColumnType::Money(_) => "Decimal".to_owned(),
ColumnType::Uuid => "Uuid".to_owned(),
ColumnType::Binary(_) => "Vec<u8>".to_owned(),
ColumnType::Boolean => "bool".to_owned(),
ColumnType::Enum(name, _) => name.to_camel_case(),
_ => unimplemented!(),
}
.parse()

View File

@ -1,3 +1,4 @@
mod active_enum;
mod base_entity;
mod column;
mod conjunct_relation;
@ -6,6 +7,7 @@ mod relation;
mod transformer;
mod writer;
pub use active_enum::*;
pub use base_entity::*;
pub use column::*;
pub use conjunct_relation::*;

View File

@ -1,38 +1,52 @@
use crate::{
Column, ConjunctRelation, Entity, EntityWriter, Error, PrimaryKey, Relation, RelationType,
ActiveEnum, Column, ConjunctRelation, Entity, EntityWriter, Error, PrimaryKey, Relation,
RelationType,
};
use sea_query::TableStatement;
use sea_query::{ColumnSpec, TableCreateStatement};
use std::collections::HashMap;
#[derive(Clone, Debug)]
pub struct EntityTransformer;
impl EntityTransformer {
pub fn transform(table_stmts: Vec<TableStatement>) -> Result<EntityWriter, Error> {
pub fn transform(table_create_stmts: Vec<TableCreateStatement>) -> Result<EntityWriter, Error> {
let mut enums: HashMap<String, ActiveEnum> = HashMap::new();
let mut inverse_relations: HashMap<String, Vec<Relation>> = HashMap::new();
let mut conjunct_relations: HashMap<String, Vec<ConjunctRelation>> = HashMap::new();
let mut entities = HashMap::new();
for table_stmt in table_stmts.into_iter() {
let table_create = match table_stmt {
TableStatement::Create(stmt) => stmt,
_ => {
return Err(Error::TransformError(
"TableStatement should be create".into(),
))
}
};
for table_create in table_create_stmts.into_iter() {
let table_name = match table_create.get_table_name() {
Some(s) => s,
Some(table_ref) => match table_ref {
sea_query::TableRef::Table(t)
| sea_query::TableRef::SchemaTable(_, t)
| sea_query::TableRef::DatabaseSchemaTable(_, _, t)
| sea_query::TableRef::TableAlias(t, _)
| sea_query::TableRef::SchemaTableAlias(_, t, _)
| sea_query::TableRef::DatabaseSchemaTableAlias(_, _, t, _) => t.to_string(),
_ => unimplemented!(),
},
None => {
return Err(Error::TransformError(
"Table name should not be empty".into(),
))
}
};
let mut primary_keys: Vec<PrimaryKey> = Vec::new();
let columns: Vec<Column> = table_create
.get_columns()
.iter()
.map(|col_def| col_def.into())
.map(|col_def| {
let primary_key = col_def
.get_column_spec()
.iter()
.any(|spec| matches!(spec, ColumnSpec::PrimaryKey));
if primary_key {
primary_keys.push(PrimaryKey {
name: col_def.get_column_name(),
});
}
col_def.into()
})
.map(|mut col: Column| {
col.unique = table_create
.get_indexes()
@ -44,6 +58,18 @@ impl EntityTransformer {
> 0;
col
})
.map(|col| {
if let sea_query::ColumnType::Enum(enum_name, values) = &col.col_type {
enums.insert(
enum_name.clone(),
ActiveEnum {
enum_name: enum_name.clone(),
values: values.clone(),
},
);
}
col
})
.collect();
let mut ref_table_counts: HashMap<String, usize> = HashMap::new();
let relations: Vec<Relation> = table_create
@ -77,20 +103,21 @@ impl EntityTransformer {
})
.rev()
.collect();
let primary_keys = table_create
.get_indexes()
.iter()
.filter(|index| index.is_primary_key())
.map(|index| {
index
.get_index_spec()
.get_column_names()
.into_iter()
.map(|name| PrimaryKey { name })
.collect::<Vec<_>>()
})
.flatten()
.collect();
primary_keys.extend(
table_create
.get_indexes()
.iter()
.filter(|index| index.is_primary_key())
.map(|index| {
index
.get_index_spec()
.get_column_names()
.into_iter()
.map(|name| PrimaryKey { name })
.collect::<Vec<_>>()
})
.flatten(),
);
let entity = Entity {
table_name: table_name.clone(),
columns,
@ -158,9 +185,17 @@ impl EntityTransformer {
}
}
}
for (tbl_name, mut relations) in inverse_relations.into_iter() {
for (tbl_name, relations) in inverse_relations.into_iter() {
if let Some(entity) = entities.get_mut(&tbl_name) {
entity.relations.append(&mut relations);
for relation in relations.into_iter() {
let duplicate_relation = entity
.relations
.iter()
.any(|rel| rel.ref_table == relation.ref_table);
if !duplicate_relation {
entity.relations.push(relation);
}
}
}
}
for (tbl_name, mut conjunct_relations) in conjunct_relations.into_iter() {
@ -170,6 +205,7 @@ impl EntityTransformer {
}
Ok(EntityWriter {
entities: entities.into_iter().map(|(_, v)| v).collect(),
enums,
})
}
}

View File

@ -1,13 +1,14 @@
use std::str::FromStr;
use crate::Entity;
use crate::{ActiveEnum, Entity};
use heck::CamelCase;
use proc_macro2::TokenStream;
use quote::quote;
use quote::{format_ident, quote};
use std::{collections::HashMap, str::FromStr};
use syn::{punctuated::Punctuated, token::Comma};
#[derive(Clone, Debug)]
pub struct EntityWriter {
pub(crate) entities: Vec<Entity>,
pub(crate) enums: HashMap<String, ActiveEnum>,
}
pub struct WriterOutput {
@ -83,6 +84,9 @@ impl EntityWriter {
files.extend(self.write_entities(expanded_format, with_serde));
files.push(self.write_mod());
files.push(self.write_prelude());
if !self.enums.is_empty() {
files.push(self.write_sea_orm_active_enums());
}
WriterOutput { files }
}
@ -109,11 +113,7 @@ impl EntityWriter {
pub fn write_mod(&self) -> OutputFile {
let mut lines = Vec::new();
Self::write_doc_comment(&mut lines);
let code_blocks: Vec<TokenStream> = self
.entities
.iter()
.map(|entity| Self::gen_mod(entity))
.collect();
let code_blocks: Vec<TokenStream> = self.entities.iter().map(Self::gen_mod).collect();
Self::write(
&mut lines,
vec![quote! {
@ -122,6 +122,14 @@ impl EntityWriter {
);
lines.push("".to_owned());
Self::write(&mut lines, code_blocks);
if !self.enums.is_empty() {
Self::write(
&mut lines,
vec![quote! {
pub mod sea_orm_active_enums;
}],
);
}
OutputFile {
name: "mod.rs".to_owned(),
content: lines.join("\n"),
@ -131,11 +139,7 @@ impl EntityWriter {
pub fn write_prelude(&self) -> OutputFile {
let mut lines = Vec::new();
Self::write_doc_comment(&mut lines);
let code_blocks = self
.entities
.iter()
.map(|entity| Self::gen_prelude_use(entity))
.collect();
let code_blocks = self.entities.iter().map(Self::gen_prelude_use).collect();
Self::write(&mut lines, code_blocks);
OutputFile {
name: "prelude.rs".to_owned(),
@ -143,6 +147,28 @@ impl EntityWriter {
}
}
pub fn write_sea_orm_active_enums(&self) -> OutputFile {
let mut lines = Vec::new();
Self::write_doc_comment(&mut lines);
Self::write(
&mut lines,
vec![quote! {
use sea_orm::entity::prelude::*;
}],
);
lines.push("".to_owned());
let code_blocks = self
.enums
.iter()
.map(|(_, active_enum)| active_enum.impl_active_enum())
.collect();
Self::write(&mut lines, code_blocks);
OutputFile {
name: "sea_orm_active_enums.rs".to_owned(),
content: lines.join("\n"),
}
}
pub fn write(lines: &mut Vec<String>, code_blocks: Vec<TokenStream>) {
lines.extend(
code_blocks
@ -163,8 +189,10 @@ impl EntityWriter {
}
pub fn gen_expanded_code_blocks(entity: &Entity, with_serde: &WithSerde) -> Vec<TokenStream> {
let mut imports = Self::gen_import(with_serde);
imports.extend(Self::gen_import_active_enum(entity));
let mut code_blocks = vec![
Self::gen_import(with_serde),
imports,
Self::gen_entity_struct(),
Self::gen_impl_entity_name(entity),
Self::gen_model_struct(entity, with_serde),
@ -182,10 +210,9 @@ impl EntityWriter {
}
pub fn gen_compact_code_blocks(entity: &Entity, with_serde: &WithSerde) -> Vec<TokenStream> {
let mut code_blocks = vec![
Self::gen_import(with_serde),
Self::gen_compact_model_struct(entity, with_serde),
];
let mut imports = Self::gen_import(with_serde);
imports.extend(Self::gen_import_active_enum(entity));
let mut code_blocks = vec![imports, Self::gen_compact_model_struct(entity, with_serde)];
let relation_defs = if entity.get_relation_enum_name().is_empty() {
vec![
Self::gen_relation_enum(entity),
@ -249,6 +276,21 @@ impl EntityWriter {
}
}
pub fn gen_import_active_enum(entity: &Entity) -> TokenStream {
entity
.columns
.iter()
.fold(TokenStream::new(), |mut ts, col| {
if let sea_query::ColumnType::Enum(enum_name, _) = &col.col_type {
let enum_name = format_ident!("{}", enum_name.to_camel_case());
ts.extend(vec![quote! {
use super::sea_orm_active_enums::#enum_name;
}]);
}
ts
})
}
pub fn gen_model_struct(entity: &Entity, with_serde: &WithSerde) -> TokenStream {
let column_names_snake_case = entity.get_column_names_snake_case();
let column_rs_types = entity.get_column_rs_types();

View File

@ -1,6 +1,6 @@
[package]
name = "sea-orm-macros"
version = "0.4.1"
version = "0.4.2"
authors = [ "Billy Chan <ccw.billy.123@gmail.com>" ]
edition = "2021"
description = "Derive macros for SeaORM"

View File

@ -11,6 +11,7 @@ pub mod derive_attr {
pub relation: Option<syn::Ident>,
pub schema_name: Option<syn::Lit>,
pub table_name: Option<syn::Lit>,
pub table_iden: Option<()>,
}
}

View File

@ -80,7 +80,7 @@ pub fn expand_derive_active_model(ident: Ident, data: Data) -> syn::Result<Token
impl std::convert::From<<Entity as EntityTrait>::Model> for ActiveModel {
fn from(m: <Entity as EntityTrait>::Model) -> Self {
Self {
#(#field: sea_orm::unchanged_active_value_not_intended_for_public_use(m.#field)),*
#(#field: sea_orm::ActiveValue::unchanged(m.#field)),*
}
}
}
@ -99,18 +99,18 @@ pub fn expand_derive_active_model(ident: Ident, data: Data) -> syn::Result<Token
fn take(&mut self, c: <Self::Entity as EntityTrait>::Column) -> sea_orm::ActiveValue<sea_orm::Value> {
match c {
#(<Self::Entity as EntityTrait>::Column::#name => {
let mut value = sea_orm::ActiveValue::unset();
let mut value = sea_orm::ActiveValue::not_set();
std::mem::swap(&mut value, &mut self.#field);
value.into_wrapped_value()
},)*
_ => sea_orm::ActiveValue::unset(),
_ => sea_orm::ActiveValue::not_set(),
}
}
fn get(&self, c: <Self::Entity as EntityTrait>::Column) -> sea_orm::ActiveValue<sea_orm::Value> {
match c {
#(<Self::Entity as EntityTrait>::Column::#name => self.#field.clone().into_wrapped_value(),)*
_ => sea_orm::ActiveValue::unset(),
_ => sea_orm::ActiveValue::not_set(),
}
}
@ -121,23 +121,23 @@ pub fn expand_derive_active_model(ident: Ident, data: Data) -> syn::Result<Token
}
}
fn unset(&mut self, c: <Self::Entity as EntityTrait>::Column) {
fn not_set(&mut self, c: <Self::Entity as EntityTrait>::Column) {
match c {
#(<Self::Entity as EntityTrait>::Column::#name => self.#field = sea_orm::ActiveValue::unset(),)*
#(<Self::Entity as EntityTrait>::Column::#name => self.#field = sea_orm::ActiveValue::not_set(),)*
_ => {},
}
}
fn is_unset(&self, c: <Self::Entity as EntityTrait>::Column) -> bool {
fn is_not_set(&self, c: <Self::Entity as EntityTrait>::Column) -> bool {
match c {
#(<Self::Entity as EntityTrait>::Column::#name => self.#field.is_unset(),)*
#(<Self::Entity as EntityTrait>::Column::#name => self.#field.is_not_set(),)*
_ => panic!("This ActiveModel does not have this field"),
}
}
fn default() -> Self {
Self {
#(#field: sea_orm::ActiveValue::unset()),*
#(#field: sea_orm::ActiveValue::not_set()),*
}
}
}

View File

@ -45,6 +45,11 @@ pub fn impl_default_as_str(ident: &Ident, data: &Data) -> syn::Result<TokenStrea
column_name = litstr.value();
}
}
if name == "table_name" {
if let Lit::Str(litstr) = &nv.lit {
column_name = litstr.value();
}
}
}
}
}

View File

@ -12,6 +12,7 @@ pub fn expand_derive_entity_model(data: Data, attrs: Vec<Attribute>) -> syn::Res
// if #[sea_orm(table_name = "foo", schema_name = "bar")] specified, create Entity struct
let mut table_name = None;
let mut schema_name = quote! { None };
let mut table_iden = false;
attrs.iter().for_each(|attr| {
if attr.path.get_ident().map(|i| i == "sea_orm") != Some(true) {
return;
@ -28,11 +29,18 @@ pub fn expand_derive_entity_model(data: Data, attrs: Vec<Attribute>) -> syn::Res
schema_name = quote! { Some(#name) };
}
}
} else if let Meta::Path(path) = meta {
if let Some(ident) = path.get_ident() {
if ident == "table_iden" {
table_iden = true;
}
}
}
}
}
});
let entity_def = table_name
.as_ref()
.map(|table_name| {
quote! {
#[derive(Copy, Clone, Default, Debug, sea_orm::prelude::DeriveEntity)]
@ -58,6 +66,19 @@ pub fn expand_derive_entity_model(data: Data, attrs: Vec<Attribute>) -> syn::Res
let mut primary_keys: Punctuated<_, Comma> = Punctuated::new();
let mut primary_key_types: Punctuated<_, Comma> = Punctuated::new();
let mut auto_increment = true;
if table_iden {
if let Some(table_name) = table_name {
let table_field_name = Ident::new("Table", Span::call_site());
columns_enum.push(quote! {
#[sea_orm(table_name=#table_name)]
#[strum(disabled)]
#table_field_name
});
columns_trait.push(
quote! { Self::#table_field_name => panic!("Table cannot be used as a column") },
);
}
}
if let Data::Struct(item_struct) = data {
if let Fields::Named(fields) = item_struct.fields {
for field in fields.named {

View File

@ -596,9 +596,9 @@ pub fn test(_: TokenStream, input: TokenStream) -> TokenStream {
#[test]
#(#attrs)*
fn #name() #ret {
let _ = ::env_logger::builder()
.filter_level(::log::LevelFilter::Debug)
.is_test(true)
let _ = ::tracing_subscriber::fmt()
.with_max_level(::tracing::Level::DEBUG)
.with_test_writer()
.try_init();
crate::block_on!(async { #body })
}

View File

@ -17,6 +17,6 @@ devise = "0.3"
quote = "1"
[dev-dependencies]
rocket = { git = "https://github.com/SergioBenitez/Rocket.git", default-features = false }
rocket = { version = "0.5.0-rc.1", default-features = false }
trybuild = "1.0"
version_check = "0.9"

View File

@ -13,7 +13,6 @@ edition = "2021"
all-features = true
[dependencies.rocket]
git = "https://github.com/SergioBenitez/Rocket.git"
version = "0.5.0-rc.1"
default-features = false
@ -22,6 +21,6 @@ path = "../codegen"
version = "0.1.0-rc"
[dev-dependencies.rocket]
git = "https://github.com/SergioBenitez/Rocket.git"
version = "0.5.0-rc.1"
default-features = false
features = ["json"]

View File

@ -4,6 +4,7 @@ use crate::{
};
use sea_query::{MysqlQueryBuilder, PostgresQueryBuilder, QueryBuilder, SqliteQueryBuilder};
use std::{future::Future, pin::Pin};
use tracing::instrument;
use url::Url;
#[cfg(feature = "sqlx-dep")]
@ -49,6 +50,7 @@ pub enum DatabaseBackend {
/// The same as [DatabaseBackend] just shorter :)
pub type DbBackend = DatabaseBackend;
#[derive(Debug)]
pub(crate) enum InnerConnection {
#[cfg(feature = "sqlx-mysql")]
MySql(PoolConnection<sqlx::MySql>),
@ -104,6 +106,7 @@ impl<'a> ConnectionTrait<'a> for DatabaseConnection {
}
}
#[instrument(level = "trace")]
async fn execute(&self, stmt: Statement) -> Result<ExecResult, DbErr> {
match self {
#[cfg(feature = "sqlx-mysql")]
@ -118,6 +121,7 @@ impl<'a> ConnectionTrait<'a> for DatabaseConnection {
}
}
#[instrument(level = "trace")]
async fn query_one(&self, stmt: Statement) -> Result<Option<QueryResult>, DbErr> {
match self {
#[cfg(feature = "sqlx-mysql")]
@ -132,6 +136,7 @@ impl<'a> ConnectionTrait<'a> for DatabaseConnection {
}
}
#[instrument(level = "trace")]
async fn query_all(&self, stmt: Statement) -> Result<Vec<QueryResult>, DbErr> {
match self {
#[cfg(feature = "sqlx-mysql")]
@ -146,6 +151,7 @@ impl<'a> ConnectionTrait<'a> for DatabaseConnection {
}
}
#[instrument(level = "trace")]
fn stream(
&'a self,
stmt: Statement,
@ -160,13 +166,14 @@ impl<'a> ConnectionTrait<'a> for DatabaseConnection {
DatabaseConnection::SqlxSqlitePoolConnection(conn) => conn.stream(stmt).await?,
#[cfg(feature = "mock")]
DatabaseConnection::MockDatabaseConnection(conn) => {
crate::QueryStream::from((Arc::clone(conn), stmt))
crate::QueryStream::from((Arc::clone(conn), stmt, None))
}
DatabaseConnection::Disconnected => panic!("Disconnected"),
})
})
}
#[instrument(level = "trace")]
async fn begin(&self) -> Result<DatabaseTransaction, DbErr> {
match self {
#[cfg(feature = "sqlx-mysql")]
@ -177,7 +184,7 @@ impl<'a> ConnectionTrait<'a> for DatabaseConnection {
DatabaseConnection::SqlxSqlitePoolConnection(conn) => conn.begin().await,
#[cfg(feature = "mock")]
DatabaseConnection::MockDatabaseConnection(conn) => {
DatabaseTransaction::new_mock(Arc::clone(conn)).await
DatabaseTransaction::new_mock(Arc::clone(conn), None).await
}
DatabaseConnection::Disconnected => panic!("Disconnected"),
}
@ -185,6 +192,7 @@ impl<'a> ConnectionTrait<'a> for DatabaseConnection {
/// Execute the function inside a transaction.
/// If the function returns an error, the transaction will be rolled back. If it does not return an error, the transaction will be committed.
#[instrument(level = "trace", skip(_callback))]
async fn transaction<F, T, E>(&self, _callback: F) -> Result<T, TransactionError<E>>
where
F: for<'c> FnOnce(
@ -205,7 +213,7 @@ impl<'a> ConnectionTrait<'a> for DatabaseConnection {
DatabaseConnection::SqlxSqlitePoolConnection(conn) => conn.transaction(_callback).await,
#[cfg(feature = "mock")]
DatabaseConnection::MockDatabaseConnection(conn) => {
let transaction = DatabaseTransaction::new_mock(Arc::clone(conn))
let transaction = DatabaseTransaction::new_mock(Arc::clone(conn), None)
.await
.map_err(TransactionError::Connection)?;
transaction.run(_callback).await
@ -237,6 +245,30 @@ impl DatabaseConnection {
}
}
impl DatabaseConnection {
/// Sets a callback to metric this connection
pub fn set_metric_callback<F>(&mut self, _callback: F)
where
F: Fn(&crate::metric::Info<'_>) + Send + Sync + 'static,
{
match self {
#[cfg(feature = "sqlx-mysql")]
DatabaseConnection::SqlxMySqlPoolConnection(conn) => {
conn.set_metric_callback(_callback)
}
#[cfg(feature = "sqlx-postgres")]
DatabaseConnection::SqlxPostgresPoolConnection(conn) => {
conn.set_metric_callback(_callback)
}
#[cfg(feature = "sqlx-sqlite")]
DatabaseConnection::SqlxSqlitePoolConnection(conn) => {
conn.set_metric_callback(_callback)
}
_ => {}
}
}
}
impl DbBackend {
/// Check if the URI is the same as the specified database backend.
/// Returns true if they match.

View File

@ -5,6 +5,7 @@ use crate::{
};
use sea_query::{Value, ValueType, Values};
use std::{collections::BTreeMap, sync::Arc};
use tracing::instrument;
/// Defines a Mock database suitable for testing
#[derive(Debug)]
@ -89,6 +90,7 @@ impl MockDatabase {
}
impl MockDatabaseTrait for MockDatabase {
#[instrument(level = "trace")]
fn execute(&mut self, counter: usize, statement: Statement) -> Result<ExecResult, DbErr> {
if let Some(transaction) = &mut self.transaction {
transaction.push(statement);
@ -104,6 +106,7 @@ impl MockDatabaseTrait for MockDatabase {
}
}
#[instrument(level = "trace")]
fn query(&mut self, counter: usize, statement: Statement) -> Result<Vec<QueryResult>, DbErr> {
if let Some(transaction) = &mut self.transaction {
transaction.push(statement);
@ -122,6 +125,7 @@ impl MockDatabaseTrait for MockDatabase {
}
}
#[instrument(level = "trace")]
fn begin(&mut self) {
if self.transaction.is_some() {
self.transaction
@ -133,6 +137,7 @@ impl MockDatabaseTrait for MockDatabase {
}
}
#[instrument(level = "trace")]
fn commit(&mut self) {
if self.transaction.is_some() {
if self.transaction.as_mut().unwrap().commit(self.db_backend) {
@ -144,6 +149,7 @@ impl MockDatabaseTrait for MockDatabase {
}
}
#[instrument(level = "trace")]
fn rollback(&mut self) {
if self.transaction.is_some() {
if self.transaction.as_mut().unwrap().rollback(self.db_backend) {

View File

@ -14,6 +14,7 @@ pub use db_connection::*;
pub use mock::*;
pub use statement::*;
pub use stream::*;
use tracing::instrument;
pub use transaction::*;
use crate::DbErr;
@ -42,6 +43,7 @@ pub struct ConnectOptions {
impl Database {
/// Method to create a [DatabaseConnection] on a database
#[instrument(level = "trace", skip(opt))]
pub async fn connect<C>(opt: C) -> Result<DatabaseConnection, DbErr>
where
C: Into<ConnectOptions>,

View File

@ -12,6 +12,8 @@ use futures::TryStreamExt;
#[cfg(feature = "sqlx-dep")]
use sqlx::{pool::PoolConnection, Executor};
use tracing::instrument;
use crate::{DbErr, InnerConnection, QueryResult, Statement};
/// Creates a stream from a [QueryResult]
@ -19,36 +21,85 @@ use crate::{DbErr, InnerConnection, QueryResult, Statement};
pub struct QueryStream {
stmt: Statement,
conn: InnerConnection,
#[borrows(mut conn, stmt)]
metric_callback: Option<crate::metric::Callback>,
#[borrows(mut conn, stmt, metric_callback)]
#[not_covariant]
stream: Pin<Box<dyn Stream<Item = Result<QueryResult, DbErr>> + 'this>>,
}
#[cfg(feature = "sqlx-mysql")]
impl From<(PoolConnection<sqlx::MySql>, Statement)> for QueryStream {
fn from((conn, stmt): (PoolConnection<sqlx::MySql>, Statement)) -> Self {
QueryStream::build(stmt, InnerConnection::MySql(conn))
impl
From<(
PoolConnection<sqlx::MySql>,
Statement,
Option<crate::metric::Callback>,
)> for QueryStream
{
fn from(
(conn, stmt, metric_callback): (
PoolConnection<sqlx::MySql>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
QueryStream::build(stmt, InnerConnection::MySql(conn), metric_callback)
}
}
#[cfg(feature = "sqlx-postgres")]
impl From<(PoolConnection<sqlx::Postgres>, Statement)> for QueryStream {
fn from((conn, stmt): (PoolConnection<sqlx::Postgres>, Statement)) -> Self {
QueryStream::build(stmt, InnerConnection::Postgres(conn))
impl
From<(
PoolConnection<sqlx::Postgres>,
Statement,
Option<crate::metric::Callback>,
)> for QueryStream
{
fn from(
(conn, stmt, metric_callback): (
PoolConnection<sqlx::Postgres>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
QueryStream::build(stmt, InnerConnection::Postgres(conn), metric_callback)
}
}
#[cfg(feature = "sqlx-sqlite")]
impl From<(PoolConnection<sqlx::Sqlite>, Statement)> for QueryStream {
fn from((conn, stmt): (PoolConnection<sqlx::Sqlite>, Statement)) -> Self {
QueryStream::build(stmt, InnerConnection::Sqlite(conn))
impl
From<(
PoolConnection<sqlx::Sqlite>,
Statement,
Option<crate::metric::Callback>,
)> for QueryStream
{
fn from(
(conn, stmt, metric_callback): (
PoolConnection<sqlx::Sqlite>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
QueryStream::build(stmt, InnerConnection::Sqlite(conn), metric_callback)
}
}
#[cfg(feature = "mock")]
impl From<(Arc<crate::MockDatabaseConnection>, Statement)> for QueryStream {
fn from((conn, stmt): (Arc<crate::MockDatabaseConnection>, Statement)) -> Self {
QueryStream::build(stmt, InnerConnection::Mock(conn))
impl
From<(
Arc<crate::MockDatabaseConnection>,
Statement,
Option<crate::metric::Callback>,
)> for QueryStream
{
fn from(
(conn, stmt, metric_callback): (
Arc<crate::MockDatabaseConnection>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
QueryStream::build(stmt, InnerConnection::Mock(conn), metric_callback)
}
}
@ -59,37 +110,49 @@ impl std::fmt::Debug for QueryStream {
}
impl QueryStream {
fn build(stmt: Statement, conn: InnerConnection) -> QueryStream {
#[instrument(level = "trace", skip(metric_callback))]
fn build(
stmt: Statement,
conn: InnerConnection,
metric_callback: Option<crate::metric::Callback>,
) -> QueryStream {
QueryStreamBuilder {
stmt,
conn,
stream_builder: |conn, stmt| match conn {
metric_callback,
stream_builder: |conn, stmt, _metric_callback| match conn {
#[cfg(feature = "sqlx-mysql")]
InnerConnection::MySql(c) => {
let query = crate::driver::sqlx_mysql::sqlx_query(stmt);
Box::pin(
c.fetch(query)
.map_ok(Into::into)
.map_err(crate::sqlx_error_to_query_err),
)
crate::metric::metric_ok!(_metric_callback, stmt, {
Box::pin(
c.fetch(query)
.map_ok(Into::into)
.map_err(crate::sqlx_error_to_query_err),
)
})
}
#[cfg(feature = "sqlx-postgres")]
InnerConnection::Postgres(c) => {
let query = crate::driver::sqlx_postgres::sqlx_query(stmt);
Box::pin(
c.fetch(query)
.map_ok(Into::into)
.map_err(crate::sqlx_error_to_query_err),
)
crate::metric::metric_ok!(_metric_callback, stmt, {
Box::pin(
c.fetch(query)
.map_ok(Into::into)
.map_err(crate::sqlx_error_to_query_err),
)
})
}
#[cfg(feature = "sqlx-sqlite")]
InnerConnection::Sqlite(c) => {
let query = crate::driver::sqlx_sqlite::sqlx_query(stmt);
Box::pin(
c.fetch(query)
.map_ok(Into::into)
.map_err(crate::sqlx_error_to_query_err),
)
crate::metric::metric_ok!(_metric_callback, stmt, {
Box::pin(
c.fetch(query)
.map_ok(Into::into)
.map_err(crate::sqlx_error_to_query_err),
)
})
}
#[cfg(feature = "mock")]
InnerConnection::Mock(c) => c.fetch(stmt),

View File

@ -11,6 +11,8 @@ use sqlx::Executor;
use futures::lock::MutexGuard;
use tracing::instrument;
use crate::{DbErr, InnerConnection, QueryResult, Statement};
/// `TransactionStream` cannot be used in a `transaction` closure as it does not impl `Send`.
@ -19,7 +21,8 @@ use crate::{DbErr, InnerConnection, QueryResult, Statement};
pub struct TransactionStream<'a> {
stmt: Statement,
conn: MutexGuard<'a, InnerConnection>,
#[borrows(mut conn, stmt)]
metric_callback: Option<crate::metric::Callback>,
#[borrows(mut conn, stmt, metric_callback)]
#[not_covariant]
stream: Pin<Box<dyn Stream<Item = Result<QueryResult, DbErr>> + 'this>>,
}
@ -31,45 +34,54 @@ impl<'a> std::fmt::Debug for TransactionStream<'a> {
}
impl<'a> TransactionStream<'a> {
#[instrument(level = "trace", skip(metric_callback))]
pub(crate) async fn build(
conn: MutexGuard<'a, InnerConnection>,
stmt: Statement,
metric_callback: Option<crate::metric::Callback>,
) -> TransactionStream<'a> {
TransactionStreamAsyncBuilder {
stmt,
conn,
stream_builder: |conn, stmt| {
metric_callback,
stream_builder: |conn, stmt, _metric_callback| {
Box::pin(async move {
match conn.deref_mut() {
#[cfg(feature = "sqlx-mysql")]
InnerConnection::MySql(c) => {
let query = crate::driver::sqlx_mysql::sqlx_query(stmt);
Box::pin(
c.fetch(query)
.map_ok(Into::into)
.map_err(crate::sqlx_error_to_query_err),
)
as Pin<Box<dyn Stream<Item = Result<QueryResult, DbErr>>>>
crate::metric::metric_ok!(_metric_callback, stmt, {
Box::pin(
c.fetch(query)
.map_ok(Into::into)
.map_err(crate::sqlx_error_to_query_err),
)
as Pin<Box<dyn Stream<Item = Result<QueryResult, DbErr>>>>
})
}
#[cfg(feature = "sqlx-postgres")]
InnerConnection::Postgres(c) => {
let query = crate::driver::sqlx_postgres::sqlx_query(stmt);
Box::pin(
c.fetch(query)
.map_ok(Into::into)
.map_err(crate::sqlx_error_to_query_err),
)
as Pin<Box<dyn Stream<Item = Result<QueryResult, DbErr>>>>
crate::metric::metric_ok!(_metric_callback, stmt, {
Box::pin(
c.fetch(query)
.map_ok(Into::into)
.map_err(crate::sqlx_error_to_query_err),
)
as Pin<Box<dyn Stream<Item = Result<QueryResult, DbErr>>>>
})
}
#[cfg(feature = "sqlx-sqlite")]
InnerConnection::Sqlite(c) => {
let query = crate::driver::sqlx_sqlite::sqlx_query(stmt);
Box::pin(
c.fetch(query)
.map_ok(Into::into)
.map_err(crate::sqlx_error_to_query_err),
)
as Pin<Box<dyn Stream<Item = Result<QueryResult, DbErr>>>>
crate::metric::metric_ok!(_metric_callback, stmt, {
Box::pin(
c.fetch(query)
.map_ok(Into::into)
.map_err(crate::sqlx_error_to_query_err),
)
as Pin<Box<dyn Stream<Item = Result<QueryResult, DbErr>>>>
})
}
#[cfg(feature = "mock")]
InnerConnection::Mock(c) => c.fetch(stmt),

View File

@ -8,6 +8,7 @@ use futures::lock::Mutex;
#[cfg(feature = "sqlx-dep")]
use sqlx::{pool::PoolConnection, TransactionManager};
use std::{future::Future, pin::Pin, sync::Arc};
use tracing::instrument;
// a Transaction is just a sugar for a connection where START TRANSACTION has been executed
/// Defines a database transaction, whether it is an open transaction and the type of
@ -16,6 +17,7 @@ pub struct DatabaseTransaction {
conn: Arc<Mutex<InnerConnection>>,
backend: DbBackend,
open: bool,
metric_callback: Option<crate::metric::Callback>,
}
impl std::fmt::Debug for DatabaseTransaction {
@ -28,10 +30,12 @@ impl DatabaseTransaction {
#[cfg(feature = "sqlx-mysql")]
pub(crate) async fn new_mysql(
inner: PoolConnection<sqlx::MySql>,
metric_callback: Option<crate::metric::Callback>,
) -> Result<DatabaseTransaction, DbErr> {
Self::begin(
Arc::new(Mutex::new(InnerConnection::MySql(inner))),
DbBackend::MySql,
metric_callback,
)
.await
}
@ -39,10 +43,12 @@ impl DatabaseTransaction {
#[cfg(feature = "sqlx-postgres")]
pub(crate) async fn new_postgres(
inner: PoolConnection<sqlx::Postgres>,
metric_callback: Option<crate::metric::Callback>,
) -> Result<DatabaseTransaction, DbErr> {
Self::begin(
Arc::new(Mutex::new(InnerConnection::Postgres(inner))),
DbBackend::Postgres,
metric_callback,
)
.await
}
@ -50,10 +56,12 @@ impl DatabaseTransaction {
#[cfg(feature = "sqlx-sqlite")]
pub(crate) async fn new_sqlite(
inner: PoolConnection<sqlx::Sqlite>,
metric_callback: Option<crate::metric::Callback>,
) -> Result<DatabaseTransaction, DbErr> {
Self::begin(
Arc::new(Mutex::new(InnerConnection::Sqlite(inner))),
DbBackend::Sqlite,
metric_callback,
)
.await
}
@ -61,19 +69,28 @@ impl DatabaseTransaction {
#[cfg(feature = "mock")]
pub(crate) async fn new_mock(
inner: Arc<crate::MockDatabaseConnection>,
metric_callback: Option<crate::metric::Callback>,
) -> Result<DatabaseTransaction, DbErr> {
let backend = inner.get_database_backend();
Self::begin(Arc::new(Mutex::new(InnerConnection::Mock(inner))), backend).await
Self::begin(
Arc::new(Mutex::new(InnerConnection::Mock(inner))),
backend,
metric_callback,
)
.await
}
#[instrument(level = "trace", skip(metric_callback))]
async fn begin(
conn: Arc<Mutex<InnerConnection>>,
backend: DbBackend,
metric_callback: Option<crate::metric::Callback>,
) -> Result<DatabaseTransaction, DbErr> {
let res = DatabaseTransaction {
conn,
backend,
open: true,
metric_callback,
};
match *res.conn.lock().await {
#[cfg(feature = "sqlx-mysql")]
@ -104,6 +121,7 @@ impl DatabaseTransaction {
/// Runs a transaction to completion returning an rolling back the transaction on
/// encountering an error if it fails
#[instrument(level = "trace", skip(callback))]
pub(crate) async fn run<F, T, E>(self, callback: F) -> Result<T, TransactionError<E>>
where
F: for<'b> FnOnce(
@ -125,6 +143,7 @@ impl DatabaseTransaction {
}
/// Commit a transaction atomically
#[instrument(level = "trace")]
pub async fn commit(mut self) -> Result<(), DbErr> {
self.open = false;
match *self.conn.lock().await {
@ -155,6 +174,7 @@ impl DatabaseTransaction {
}
/// rolls back a transaction in case error are encountered during the operation
#[instrument(level = "trace")]
pub async fn rollback(mut self) -> Result<(), DbErr> {
self.open = false;
match *self.conn.lock().await {
@ -185,6 +205,7 @@ impl DatabaseTransaction {
}
// the rollback is queued and will be performed on next async operation, like returning the connection to the pool
#[instrument(level = "trace")]
fn start_rollback(&mut self) {
if self.open {
if let Some(mut conn) = self.conn.try_lock() {
@ -229,6 +250,7 @@ impl<'a> ConnectionTrait<'a> for DatabaseTransaction {
self.backend
}
#[instrument(level = "trace")]
async fn execute(&self, stmt: Statement) -> Result<ExecResult, DbErr> {
debug_print!("{}", stmt);
@ -236,17 +258,23 @@ impl<'a> ConnectionTrait<'a> for DatabaseTransaction {
#[cfg(feature = "sqlx-mysql")]
InnerConnection::MySql(conn) => {
let query = crate::driver::sqlx_mysql::sqlx_query(&stmt);
query.execute(conn).await.map(Into::into)
crate::metric::metric!(self.metric_callback, &stmt, {
query.execute(conn).await.map(Into::into)
})
}
#[cfg(feature = "sqlx-postgres")]
InnerConnection::Postgres(conn) => {
let query = crate::driver::sqlx_postgres::sqlx_query(&stmt);
query.execute(conn).await.map(Into::into)
crate::metric::metric!(self.metric_callback, &stmt, {
query.execute(conn).await.map(Into::into)
})
}
#[cfg(feature = "sqlx-sqlite")]
InnerConnection::Sqlite(conn) => {
let query = crate::driver::sqlx_sqlite::sqlx_query(&stmt);
query.execute(conn).await.map(Into::into)
crate::metric::metric!(self.metric_callback, &stmt, {
query.execute(conn).await.map(Into::into)
})
}
#[cfg(feature = "mock")]
InnerConnection::Mock(conn) => return conn.execute(stmt),
@ -255,6 +283,7 @@ impl<'a> ConnectionTrait<'a> for DatabaseTransaction {
_res.map_err(sqlx_error_to_exec_err)
}
#[instrument(level = "trace")]
async fn query_one(&self, stmt: Statement) -> Result<Option<QueryResult>, DbErr> {
debug_print!("{}", stmt);
@ -285,6 +314,7 @@ impl<'a> ConnectionTrait<'a> for DatabaseTransaction {
}
}
#[instrument(level = "trace")]
async fn query_all(&self, stmt: Statement) -> Result<Vec<QueryResult>, DbErr> {
debug_print!("{}", stmt);
@ -320,21 +350,34 @@ impl<'a> ConnectionTrait<'a> for DatabaseTransaction {
_res.map_err(sqlx_error_to_query_err)
}
#[instrument(level = "trace")]
fn stream(
&'a self,
stmt: Statement,
) -> Pin<Box<dyn Future<Output = Result<Self::Stream, DbErr>> + 'a>> {
Box::pin(
async move { Ok(crate::TransactionStream::build(self.conn.lock().await, stmt).await) },
)
Box::pin(async move {
Ok(crate::TransactionStream::build(
self.conn.lock().await,
stmt,
self.metric_callback.clone(),
)
.await)
})
}
#[instrument(level = "trace")]
async fn begin(&self) -> Result<DatabaseTransaction, DbErr> {
DatabaseTransaction::begin(Arc::clone(&self.conn), self.backend).await
DatabaseTransaction::begin(
Arc::clone(&self.conn),
self.backend,
self.metric_callback.clone(),
)
.await
}
/// Execute the function inside a transaction.
/// If the function returns an error, the transaction will be rolled back. If it does not return an error, the transaction will be committed.
#[instrument(level = "trace", skip(_callback))]
async fn transaction<F, T, E>(&self, _callback: F) -> Result<T, TransactionError<E>>
where
F: for<'c> FnOnce(

View File

@ -11,6 +11,7 @@ use std::{
Arc, Mutex,
},
};
use tracing::instrument;
/// Defines a database driver for the [MockDatabase]
#[derive(Debug)]
@ -69,6 +70,7 @@ impl MockDatabaseConnector {
/// Cpnnect to the [MockDatabase]
#[allow(unused_variables)]
#[instrument(level = "trace")]
pub async fn connect(string: &str) -> Result<DatabaseConnection, DbErr> {
macro_rules! connect_mock_db {
( $syntax: expr ) => {
@ -117,6 +119,7 @@ impl MockDatabaseConnection {
}
/// Execute the SQL statement in the [MockDatabase]
#[instrument(level = "trace")]
pub fn execute(&self, statement: Statement) -> Result<ExecResult, DbErr> {
debug_print!("{}", statement);
let counter = self.execute_counter.fetch_add(1, Ordering::SeqCst);
@ -124,6 +127,7 @@ impl MockDatabaseConnection {
}
/// Return one [QueryResult] if the query was successful
#[instrument(level = "trace")]
pub fn query_one(&self, statement: Statement) -> Result<Option<QueryResult>, DbErr> {
debug_print!("{}", statement);
let counter = self.query_counter.fetch_add(1, Ordering::SeqCst);
@ -132,6 +136,7 @@ impl MockDatabaseConnection {
}
/// Return all [QueryResult]s if the query was successful
#[instrument(level = "trace")]
pub fn query_all(&self, statement: Statement) -> Result<Vec<QueryResult>, DbErr> {
debug_print!("{}", statement);
let counter = self.query_counter.fetch_add(1, Ordering::SeqCst);
@ -139,6 +144,7 @@ impl MockDatabaseConnection {
}
/// Return [QueryResult]s from a multi-query operation
#[instrument(level = "trace")]
pub fn fetch(
&self,
statement: &Statement,
@ -150,16 +156,19 @@ impl MockDatabaseConnection {
}
/// Create a statement block of SQL statements that execute together.
#[instrument(level = "trace")]
pub fn begin(&self) {
self.mocker.lock().unwrap().begin()
}
/// Commit a transaction atomically to the database
#[instrument(level = "trace")]
pub fn commit(&self) {
self.mocker.lock().unwrap().commit()
}
/// Roll back a faulty transaction
#[instrument(level = "trace")]
pub fn rollback(&self) {
self.mocker.lock().unwrap().rollback()
}

View File

@ -1,4 +1,4 @@
use std::{future::Future, pin::Pin};
use std::{future::Future, pin::Pin, sync::Arc};
use sqlx::{
mysql::{MySqlArguments, MySqlConnectOptions, MySqlQueryResult, MySqlRow},
@ -7,6 +7,7 @@ use sqlx::{
sea_query::sea_query_driver_mysql!();
use sea_query_driver_mysql::bind_query;
use tracing::instrument;
use crate::{
debug_print, error::*, executor::*, ConnectOptions, DatabaseConnection, DatabaseTransaction,
@ -20,9 +21,16 @@ use super::sqlx_common::*;
pub struct SqlxMySqlConnector;
/// Defines a sqlx MySQL pool
#[derive(Debug, Clone)]
#[derive(Clone)]
pub struct SqlxMySqlPoolConnection {
pool: MySqlPool,
metric_callback: Option<crate::metric::Callback>,
}
impl std::fmt::Debug for SqlxMySqlPoolConnection {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "SqlxMySqlPoolConnection {{ pool: {:?} }}", self.pool)
}
}
impl SqlxMySqlConnector {
@ -32,6 +40,7 @@ impl SqlxMySqlConnector {
}
/// Add configuration options for the MySQL database
#[instrument(level = "trace")]
pub async fn connect(options: ConnectOptions) -> Result<DatabaseConnection, DbErr> {
let mut opt = options
.url
@ -43,7 +52,10 @@ impl SqlxMySqlConnector {
}
match options.pool_options().connect_with(opt).await {
Ok(pool) => Ok(DatabaseConnection::SqlxMySqlPoolConnection(
SqlxMySqlPoolConnection { pool },
SqlxMySqlPoolConnection {
pool,
metric_callback: None,
},
)),
Err(e) => Err(sqlx_error_to_conn_err(e)),
}
@ -53,21 +65,27 @@ impl SqlxMySqlConnector {
impl SqlxMySqlConnector {
/// Instantiate a sqlx pool connection to a [DatabaseConnection]
pub fn from_sqlx_mysql_pool(pool: MySqlPool) -> DatabaseConnection {
DatabaseConnection::SqlxMySqlPoolConnection(SqlxMySqlPoolConnection { pool })
DatabaseConnection::SqlxMySqlPoolConnection(SqlxMySqlPoolConnection {
pool,
metric_callback: None,
})
}
}
impl SqlxMySqlPoolConnection {
/// Execute a [Statement] on a MySQL backend
#[instrument(level = "trace")]
pub async fn execute(&self, stmt: Statement) -> Result<ExecResult, DbErr> {
debug_print!("{}", stmt);
let query = sqlx_query(&stmt);
if let Ok(conn) = &mut self.pool.acquire().await {
match query.execute(conn).await {
Ok(res) => Ok(res.into()),
Err(err) => Err(sqlx_error_to_exec_err(err)),
}
crate::metric::metric!(self.metric_callback, &stmt, {
match query.execute(conn).await {
Ok(res) => Ok(res.into()),
Err(err) => Err(sqlx_error_to_exec_err(err)),
}
})
} else {
Err(DbErr::Exec(
"Failed to acquire connection from pool.".to_owned(),
@ -76,18 +94,21 @@ impl SqlxMySqlPoolConnection {
}
/// Get one result from a SQL query. Returns [Option::None] if no match was found
#[instrument(level = "trace")]
pub async fn query_one(&self, stmt: Statement) -> Result<Option<QueryResult>, DbErr> {
debug_print!("{}", stmt);
let query = sqlx_query(&stmt);
if let Ok(conn) = &mut self.pool.acquire().await {
match query.fetch_one(conn).await {
Ok(row) => Ok(Some(row.into())),
Err(err) => match err {
sqlx::Error::RowNotFound => Ok(None),
_ => Err(DbErr::Query(err.to_string())),
},
}
crate::metric::metric!(self.metric_callback, &stmt, {
match query.fetch_one(conn).await {
Ok(row) => Ok(Some(row.into())),
Err(err) => match err {
sqlx::Error::RowNotFound => Ok(None),
_ => Err(DbErr::Query(err.to_string())),
},
}
})
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
@ -96,15 +117,18 @@ impl SqlxMySqlPoolConnection {
}
/// Get the results of a query returning them as a Vec<[QueryResult]>
#[instrument(level = "trace")]
pub async fn query_all(&self, stmt: Statement) -> Result<Vec<QueryResult>, DbErr> {
debug_print!("{}", stmt);
let query = sqlx_query(&stmt);
if let Ok(conn) = &mut self.pool.acquire().await {
match query.fetch_all(conn).await {
Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()),
Err(err) => Err(sqlx_error_to_query_err(err)),
}
crate::metric::metric!(self.metric_callback, &stmt, {
match query.fetch_all(conn).await {
Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()),
Err(err) => Err(sqlx_error_to_query_err(err)),
}
})
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
@ -113,11 +137,16 @@ impl SqlxMySqlPoolConnection {
}
/// Stream the results of executing a SQL query
#[instrument(level = "trace")]
pub async fn stream(&self, stmt: Statement) -> Result<QueryStream, DbErr> {
debug_print!("{}", stmt);
if let Ok(conn) = self.pool.acquire().await {
Ok(QueryStream::from((conn, stmt)))
Ok(QueryStream::from((
conn,
stmt,
self.metric_callback.clone(),
)))
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
@ -126,9 +155,10 @@ impl SqlxMySqlPoolConnection {
}
/// Bundle a set of SQL statements that execute together.
#[instrument(level = "trace")]
pub async fn begin(&self) -> Result<DatabaseTransaction, DbErr> {
if let Ok(conn) = self.pool.acquire().await {
DatabaseTransaction::new_mysql(conn).await
DatabaseTransaction::new_mysql(conn, self.metric_callback.clone()).await
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
@ -137,6 +167,7 @@ impl SqlxMySqlPoolConnection {
}
/// Create a MySQL transaction
#[instrument(level = "trace", skip(callback))]
pub async fn transaction<F, T, E>(&self, callback: F) -> Result<T, TransactionError<E>>
where
F: for<'b> FnOnce(
@ -147,7 +178,7 @@ impl SqlxMySqlPoolConnection {
E: std::error::Error + Send,
{
if let Ok(conn) = self.pool.acquire().await {
let transaction = DatabaseTransaction::new_mysql(conn)
let transaction = DatabaseTransaction::new_mysql(conn, self.metric_callback.clone())
.await
.map_err(|e| TransactionError::Connection(e))?;
transaction.run(callback).await
@ -157,6 +188,13 @@ impl SqlxMySqlPoolConnection {
)))
}
}
pub(crate) fn set_metric_callback<F>(&mut self, callback: F)
where
F: Fn(&crate::metric::Info<'_>) + Send + Sync + 'static,
{
self.metric_callback = Some(Arc::new(callback));
}
}
impl From<MySqlRow> for QueryResult {

View File

@ -1,4 +1,4 @@
use std::{future::Future, pin::Pin};
use std::{future::Future, pin::Pin, sync::Arc};
use sqlx::{
postgres::{PgArguments, PgConnectOptions, PgQueryResult, PgRow},
@ -7,6 +7,7 @@ use sqlx::{
sea_query::sea_query_driver_postgres!();
use sea_query_driver_postgres::bind_query;
use tracing::instrument;
use crate::{
debug_print, error::*, executor::*, ConnectOptions, DatabaseConnection, DatabaseTransaction,
@ -20,9 +21,16 @@ use super::sqlx_common::*;
pub struct SqlxPostgresConnector;
/// Defines a sqlx PostgreSQL pool
#[derive(Debug, Clone)]
#[derive(Clone)]
pub struct SqlxPostgresPoolConnection {
pool: PgPool,
metric_callback: Option<crate::metric::Callback>,
}
impl std::fmt::Debug for SqlxPostgresPoolConnection {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "SqlxPostgresPoolConnection {{ pool: {:?} }}", self.pool)
}
}
impl SqlxPostgresConnector {
@ -32,6 +40,7 @@ impl SqlxPostgresConnector {
}
/// Add configuration options for the MySQL database
#[instrument(level = "trace")]
pub async fn connect(options: ConnectOptions) -> Result<DatabaseConnection, DbErr> {
let mut opt = options
.url
@ -43,7 +52,10 @@ impl SqlxPostgresConnector {
}
match options.pool_options().connect_with(opt).await {
Ok(pool) => Ok(DatabaseConnection::SqlxPostgresPoolConnection(
SqlxPostgresPoolConnection { pool },
SqlxPostgresPoolConnection {
pool,
metric_callback: None,
},
)),
Err(e) => Err(sqlx_error_to_conn_err(e)),
}
@ -53,21 +65,27 @@ impl SqlxPostgresConnector {
impl SqlxPostgresConnector {
/// Instantiate a sqlx pool connection to a [DatabaseConnection]
pub fn from_sqlx_postgres_pool(pool: PgPool) -> DatabaseConnection {
DatabaseConnection::SqlxPostgresPoolConnection(SqlxPostgresPoolConnection { pool })
DatabaseConnection::SqlxPostgresPoolConnection(SqlxPostgresPoolConnection {
pool,
metric_callback: None,
})
}
}
impl SqlxPostgresPoolConnection {
/// Execute a [Statement] on a PostgreSQL backend
#[instrument(level = "trace")]
pub async fn execute(&self, stmt: Statement) -> Result<ExecResult, DbErr> {
debug_print!("{}", stmt);
let query = sqlx_query(&stmt);
if let Ok(conn) = &mut self.pool.acquire().await {
match query.execute(conn).await {
Ok(res) => Ok(res.into()),
Err(err) => Err(sqlx_error_to_exec_err(err)),
}
crate::metric::metric!(self.metric_callback, &stmt, {
match query.execute(conn).await {
Ok(res) => Ok(res.into()),
Err(err) => Err(sqlx_error_to_exec_err(err)),
}
})
} else {
Err(DbErr::Exec(
"Failed to acquire connection from pool.".to_owned(),
@ -76,18 +94,21 @@ impl SqlxPostgresPoolConnection {
}
/// Get one result from a SQL query. Returns [Option::None] if no match was found
#[instrument(level = "trace")]
pub async fn query_one(&self, stmt: Statement) -> Result<Option<QueryResult>, DbErr> {
debug_print!("{}", stmt);
let query = sqlx_query(&stmt);
if let Ok(conn) = &mut self.pool.acquire().await {
match query.fetch_one(conn).await {
Ok(row) => Ok(Some(row.into())),
Err(err) => match err {
sqlx::Error::RowNotFound => Ok(None),
_ => Err(DbErr::Query(err.to_string())),
},
}
crate::metric::metric!(self.metric_callback, &stmt, {
match query.fetch_one(conn).await {
Ok(row) => Ok(Some(row.into())),
Err(err) => match err {
sqlx::Error::RowNotFound => Ok(None),
_ => Err(DbErr::Query(err.to_string())),
},
}
})
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
@ -96,15 +117,18 @@ impl SqlxPostgresPoolConnection {
}
/// Get the results of a query returning them as a Vec<[QueryResult]>
#[instrument(level = "trace")]
pub async fn query_all(&self, stmt: Statement) -> Result<Vec<QueryResult>, DbErr> {
debug_print!("{}", stmt);
let query = sqlx_query(&stmt);
if let Ok(conn) = &mut self.pool.acquire().await {
match query.fetch_all(conn).await {
Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()),
Err(err) => Err(sqlx_error_to_query_err(err)),
}
crate::metric::metric!(self.metric_callback, &stmt, {
match query.fetch_all(conn).await {
Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()),
Err(err) => Err(sqlx_error_to_query_err(err)),
}
})
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
@ -113,11 +137,16 @@ impl SqlxPostgresPoolConnection {
}
/// Stream the results of executing a SQL query
#[instrument(level = "trace")]
pub async fn stream(&self, stmt: Statement) -> Result<QueryStream, DbErr> {
debug_print!("{}", stmt);
if let Ok(conn) = self.pool.acquire().await {
Ok(QueryStream::from((conn, stmt)))
Ok(QueryStream::from((
conn,
stmt,
self.metric_callback.clone(),
)))
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
@ -126,9 +155,10 @@ impl SqlxPostgresPoolConnection {
}
/// Bundle a set of SQL statements that execute together.
#[instrument(level = "trace")]
pub async fn begin(&self) -> Result<DatabaseTransaction, DbErr> {
if let Ok(conn) = self.pool.acquire().await {
DatabaseTransaction::new_postgres(conn).await
DatabaseTransaction::new_postgres(conn, self.metric_callback.clone()).await
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
@ -137,6 +167,7 @@ impl SqlxPostgresPoolConnection {
}
/// Create a PostgreSQL transaction
#[instrument(level = "trace", skip(callback))]
pub async fn transaction<F, T, E>(&self, callback: F) -> Result<T, TransactionError<E>>
where
F: for<'b> FnOnce(
@ -147,7 +178,7 @@ impl SqlxPostgresPoolConnection {
E: std::error::Error + Send,
{
if let Ok(conn) = self.pool.acquire().await {
let transaction = DatabaseTransaction::new_postgres(conn)
let transaction = DatabaseTransaction::new_postgres(conn, self.metric_callback.clone())
.await
.map_err(|e| TransactionError::Connection(e))?;
transaction.run(callback).await
@ -157,6 +188,13 @@ impl SqlxPostgresPoolConnection {
)))
}
}
pub(crate) fn set_metric_callback<F>(&mut self, callback: F)
where
F: Fn(&crate::metric::Info<'_>) + Send + Sync + 'static,
{
self.metric_callback = Some(Arc::new(callback));
}
}
impl From<PgRow> for QueryResult {

View File

@ -1,4 +1,4 @@
use std::{future::Future, pin::Pin};
use std::{future::Future, pin::Pin, sync::Arc};
use sqlx::{
sqlite::{SqliteArguments, SqliteConnectOptions, SqliteQueryResult, SqliteRow},
@ -7,6 +7,7 @@ use sqlx::{
sea_query::sea_query_driver_sqlite!();
use sea_query_driver_sqlite::bind_query;
use tracing::instrument;
use crate::{
debug_print, error::*, executor::*, ConnectOptions, DatabaseConnection, DatabaseTransaction,
@ -20,9 +21,16 @@ use super::sqlx_common::*;
pub struct SqlxSqliteConnector;
/// Defines a sqlx SQLite pool
#[derive(Debug, Clone)]
#[derive(Clone)]
pub struct SqlxSqlitePoolConnection {
pool: SqlitePool,
metric_callback: Option<crate::metric::Callback>,
}
impl std::fmt::Debug for SqlxSqlitePoolConnection {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "SqlxSqlitePoolConnection {{ pool: {:?} }}", self.pool)
}
}
impl SqlxSqliteConnector {
@ -32,6 +40,7 @@ impl SqlxSqliteConnector {
}
/// Add configuration options for the SQLite database
#[instrument(level = "trace")]
pub async fn connect(options: ConnectOptions) -> Result<DatabaseConnection, DbErr> {
let mut options = options;
let mut opt = options
@ -47,7 +56,10 @@ impl SqlxSqliteConnector {
}
match options.pool_options().connect_with(opt).await {
Ok(pool) => Ok(DatabaseConnection::SqlxSqlitePoolConnection(
SqlxSqlitePoolConnection { pool },
SqlxSqlitePoolConnection {
pool,
metric_callback: None,
},
)),
Err(e) => Err(sqlx_error_to_conn_err(e)),
}
@ -57,21 +69,27 @@ impl SqlxSqliteConnector {
impl SqlxSqliteConnector {
/// Instantiate a sqlx pool connection to a [DatabaseConnection]
pub fn from_sqlx_sqlite_pool(pool: SqlitePool) -> DatabaseConnection {
DatabaseConnection::SqlxSqlitePoolConnection(SqlxSqlitePoolConnection { pool })
DatabaseConnection::SqlxSqlitePoolConnection(SqlxSqlitePoolConnection {
pool,
metric_callback: None,
})
}
}
impl SqlxSqlitePoolConnection {
/// Execute a [Statement] on a SQLite backend
#[instrument(level = "trace")]
pub async fn execute(&self, stmt: Statement) -> Result<ExecResult, DbErr> {
debug_print!("{}", stmt);
let query = sqlx_query(&stmt);
if let Ok(conn) = &mut self.pool.acquire().await {
match query.execute(conn).await {
Ok(res) => Ok(res.into()),
Err(err) => Err(sqlx_error_to_exec_err(err)),
}
crate::metric::metric!(self.metric_callback, &stmt, {
match query.execute(conn).await {
Ok(res) => Ok(res.into()),
Err(err) => Err(sqlx_error_to_exec_err(err)),
}
})
} else {
Err(DbErr::Exec(
"Failed to acquire connection from pool.".to_owned(),
@ -80,18 +98,21 @@ impl SqlxSqlitePoolConnection {
}
/// Get one result from a SQL query. Returns [Option::None] if no match was found
#[instrument(level = "trace")]
pub async fn query_one(&self, stmt: Statement) -> Result<Option<QueryResult>, DbErr> {
debug_print!("{}", stmt);
let query = sqlx_query(&stmt);
if let Ok(conn) = &mut self.pool.acquire().await {
match query.fetch_one(conn).await {
Ok(row) => Ok(Some(row.into())),
Err(err) => match err {
sqlx::Error::RowNotFound => Ok(None),
_ => Err(DbErr::Query(err.to_string())),
},
}
crate::metric::metric!(self.metric_callback, &stmt, {
match query.fetch_one(conn).await {
Ok(row) => Ok(Some(row.into())),
Err(err) => match err {
sqlx::Error::RowNotFound => Ok(None),
_ => Err(DbErr::Query(err.to_string())),
},
}
})
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
@ -100,15 +121,18 @@ impl SqlxSqlitePoolConnection {
}
/// Get the results of a query returning them as a Vec<[QueryResult]>
#[instrument(level = "trace")]
pub async fn query_all(&self, stmt: Statement) -> Result<Vec<QueryResult>, DbErr> {
debug_print!("{}", stmt);
let query = sqlx_query(&stmt);
if let Ok(conn) = &mut self.pool.acquire().await {
match query.fetch_all(conn).await {
Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()),
Err(err) => Err(sqlx_error_to_query_err(err)),
}
crate::metric::metric!(self.metric_callback, &stmt, {
match query.fetch_all(conn).await {
Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()),
Err(err) => Err(sqlx_error_to_query_err(err)),
}
})
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
@ -117,11 +141,16 @@ impl SqlxSqlitePoolConnection {
}
/// Stream the results of executing a SQL query
#[instrument(level = "trace")]
pub async fn stream(&self, stmt: Statement) -> Result<QueryStream, DbErr> {
debug_print!("{}", stmt);
if let Ok(conn) = self.pool.acquire().await {
Ok(QueryStream::from((conn, stmt)))
Ok(QueryStream::from((
conn,
stmt,
self.metric_callback.clone(),
)))
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
@ -130,9 +159,10 @@ impl SqlxSqlitePoolConnection {
}
/// Bundle a set of SQL statements that execute together.
#[instrument(level = "trace")]
pub async fn begin(&self) -> Result<DatabaseTransaction, DbErr> {
if let Ok(conn) = self.pool.acquire().await {
DatabaseTransaction::new_sqlite(conn).await
DatabaseTransaction::new_sqlite(conn, self.metric_callback.clone()).await
} else {
Err(DbErr::Query(
"Failed to acquire connection from pool.".to_owned(),
@ -141,6 +171,7 @@ impl SqlxSqlitePoolConnection {
}
/// Create a MySQL transaction
#[instrument(level = "trace", skip(callback))]
pub async fn transaction<F, T, E>(&self, callback: F) -> Result<T, TransactionError<E>>
where
F: for<'b> FnOnce(
@ -151,7 +182,7 @@ impl SqlxSqlitePoolConnection {
E: std::error::Error + Send,
{
if let Ok(conn) = self.pool.acquire().await {
let transaction = DatabaseTransaction::new_sqlite(conn)
let transaction = DatabaseTransaction::new_sqlite(conn, self.metric_callback.clone())
.await
.map_err(|e| TransactionError::Connection(e))?;
transaction.run(callback).await
@ -161,6 +192,13 @@ impl SqlxSqlitePoolConnection {
)))
}
}
pub(crate) fn set_metric_callback<F>(&mut self, callback: F)
where
F: Fn(&crate::metric::Info<'_>) + Send + Sync + 'static,
{
self.metric_callback = Some(Arc::new(callback));
}
}
impl From<SqliteRow> for QueryResult {

View File

@ -5,35 +5,47 @@ use async_trait::async_trait;
use sea_query::{Nullable, ValueTuple};
use std::fmt::Debug;
/// Defines a value from an ActiveModel and its state.
/// The field `value` takes in an [Option] type where `Option::Some(V)` , with `V` holding
/// the value that operations like `UPDATE` are being performed on and
/// the `state` field is either `ActiveValueState::Set` or `ActiveValueState::Unchanged`.
/// [Option::None] in the `value` field indicates no value being performed by an operation
/// and that the `state` field of the [ActiveValue] is set to `ActiveValueState::Unset` .
/// #### Example snippet
/// ```no_run
/// // The code snipped below does an UPDATE operation on a [ActiveValue]
/// // yielding the the SQL statement ` r#"UPDATE "fruit" SET "name" = 'Orange' WHERE "fruit"."id" = 1"# `
pub use ActiveValue::NotSet;
/// Defines a stateful value used in ActiveModel.
///
/// There are three possible state represented by three enum variants.
/// - [ActiveValue::Set]: A defined [Value] actively being set
/// - [ActiveValue::Unchanged]: A defined [Value] remain unchanged
/// - [ActiveValue::NotSet]: An undefined [Value]
///
/// The stateful value is useful when constructing UPDATE SQL statement,
/// see an example below.
///
/// # Examples
///
/// ```
/// use sea_orm::tests_cfg::{cake, fruit};
/// use sea_orm::{entity::*, query::*, DbBackend};
///
/// Update::one(fruit::ActiveModel {
/// id: ActiveValue::set(1),
/// name: ActiveValue::set("Orange".to_owned()),
/// cake_id: ActiveValue::unset(),
/// })
/// .build(DbBackend::Postgres)
/// .to_string();
/// // The code snipped below does an UPDATE operation on a `ActiveValue`
/// assert_eq!(
/// Update::one(fruit::ActiveModel {
/// id: ActiveValue::set(1),
/// name: ActiveValue::set("Orange".to_owned()),
/// cake_id: ActiveValue::not_set(),
/// })
/// .build(DbBackend::Postgres)
/// .to_string(),
/// r#"UPDATE "fruit" SET "name" = 'Orange' WHERE "fruit"."id" = 1"#
/// );
/// ```
#[derive(Clone, Debug, Default)]
pub struct ActiveValue<V>
#[derive(Clone, Debug)]
pub enum ActiveValue<V>
where
V: Into<Value>,
{
value: Option<V>,
state: ActiveValueState,
/// A defined [Value] actively being set
Set(V),
/// A defined [Value] remain unchanged
Unchanged(V),
/// An undefined [Value]
NotSet,
}
/// Defines a set operation on an [ActiveValue]
@ -45,31 +57,22 @@ where
ActiveValue::set(v)
}
/// Defines an unset operation on an [ActiveValue]
/// Defines an not set operation on an [ActiveValue]
#[deprecated(
since = "0.5.0",
note = "Please use [`ActiveValue::NotSet`] or [`NotSet`]"
)]
#[allow(non_snake_case)]
pub fn Unset<V>(_: Option<bool>) -> ActiveValue<V>
where
V: Into<Value>,
{
ActiveValue::unset()
ActiveValue::not_set()
}
// Defines the state of an [ActiveValue]
#[derive(Clone, Debug)]
enum ActiveValueState {
Set,
Unchanged,
Unset,
}
impl Default for ActiveValueState {
fn default() -> Self {
Self::Unset
}
}
#[doc(hidden)]
pub fn unchanged_active_value_not_intended_for_public_use<V>(value: V) -> ActiveValue<V>
/// Defines an unchanged operation on an [ActiveValue]
#[allow(non_snake_case)]
pub fn Unchanged<V>(value: V) -> ActiveValue<V>
where
V: Into<Value>,
{
@ -93,11 +96,11 @@ pub trait ActiveModelTrait: Clone + Debug {
/// Set the Value into an ActiveModel
fn set(&mut self, c: <Self::Entity as EntityTrait>::Column, v: Value);
/// Set the state of an [ActiveValue] to the Unset state
fn unset(&mut self, c: <Self::Entity as EntityTrait>::Column);
/// Set the state of an [ActiveValue] to the not set state
fn not_set(&mut self, c: <Self::Entity as EntityTrait>::Column);
/// Check the state of a [ActiveValue]
fn is_unset(&self, c: <Self::Entity as EntityTrait>::Column) -> bool;
fn is_not_set(&self, c: <Self::Entity as EntityTrait>::Column) -> bool;
/// The default implementation of the ActiveModel
fn default() -> Self;
@ -172,7 +175,6 @@ pub trait ActiveModelTrait: Clone + Debug {
/// id: 15,
/// name: "Apple Pie".to_owned(),
/// }
/// .into_active_model()
/// );
///
/// assert_eq!(
@ -225,7 +227,6 @@ pub trait ActiveModelTrait: Clone + Debug {
/// id: 15,
/// name: "Apple Pie".to_owned(),
/// }
/// .into_active_model()
/// );
///
/// assert_eq!(
@ -247,17 +248,17 @@ pub trait ActiveModelTrait: Clone + Debug {
/// # Ok(())
/// # }
/// ```
async fn insert<'a, C>(self, db: &'a C) -> Result<Self, DbErr>
async fn insert<'a, C>(self, db: &'a C) -> Result<<Self::Entity as EntityTrait>::Model, DbErr>
where
<Self::Entity as EntityTrait>::Model: IntoActiveModel<Self>,
Self: ActiveModelBehavior + 'a,
C: ConnectionTrait<'a>,
{
let am = ActiveModelBehavior::before_save(self, true)?;
let am = <Self::Entity as EntityTrait>::insert(am)
let model = <Self::Entity as EntityTrait>::insert(am)
.exec_with_returning(db)
.await?;
ActiveModelBehavior::after_save(am, true)
Self::after_save(model, true)
}
/// Perform the `UPDATE` operation on an ActiveModel
@ -296,7 +297,6 @@ pub trait ActiveModelTrait: Clone + Debug {
/// name: "Orange".to_owned(),
/// cake_id: None,
/// }
/// .into_active_model()
/// );
///
/// assert_eq!(
@ -351,7 +351,6 @@ pub trait ActiveModelTrait: Clone + Debug {
/// name: "Orange".to_owned(),
/// cake_id: None,
/// }
/// .into_active_model()
/// );
///
/// assert_eq!(
@ -371,40 +370,39 @@ pub trait ActiveModelTrait: Clone + Debug {
/// # Ok(())
/// # }
/// ```
async fn update<'a, C>(self, db: &'a C) -> Result<Self, DbErr>
async fn update<'a, C>(self, db: &'a C) -> Result<<Self::Entity as EntityTrait>::Model, DbErr>
where
<Self::Entity as EntityTrait>::Model: IntoActiveModel<Self>,
Self: ActiveModelBehavior + 'a,
C: ConnectionTrait<'a>,
{
let am = ActiveModelBehavior::before_save(self, false)?;
let am = Self::Entity::update(am).exec(db).await?;
ActiveModelBehavior::after_save(am, false)
let model: <Self::Entity as EntityTrait>::Model = Self::Entity::update(am).exec(db).await?;
Self::after_save(model, false)
}
/// Insert the model if primary key is unset, update otherwise.
/// Insert the model if primary key is not_set, update otherwise.
/// Only works if the entity has auto increment primary key.
async fn save<'a, C>(self, db: &'a C) -> Result<Self, DbErr>
async fn save<'a, C>(self, db: &'a C) -> Result<<Self::Entity as EntityTrait>::Model, DbErr>
where
<Self::Entity as EntityTrait>::Model: IntoActiveModel<Self>,
Self: ActiveModelBehavior + 'a,
C: ConnectionTrait<'a>,
{
let mut am = self;
let am = self;
let mut is_update = true;
for key in <Self::Entity as EntityTrait>::PrimaryKey::iter() {
let col = key.into_column();
if am.is_unset(col) {
if am.is_not_set(col) {
is_update = false;
break;
}
}
if !is_update {
am = am.insert(db).await?;
am.insert(db).await
} else {
am = am.update(db).await?;
am.update(db).await
}
Ok(am)
}
/// Delete an active model by its primary key
@ -503,8 +501,11 @@ pub trait ActiveModelBehavior: ActiveModelTrait {
}
/// Will be called after saving
fn after_save(self, insert: bool) -> Result<Self, DbErr> {
Ok(self)
fn after_save(
model: <Self::Entity as EntityTrait>::Model,
insert: bool,
) -> Result<<Self::Entity as EntityTrait>::Model, DbErr> {
Ok(model)
}
/// Will be called before deleting
@ -557,7 +558,7 @@ macro_rules! impl_into_active_value {
fn into_active_value(self) -> ActiveValue<Option<$ty>> {
match self {
Some(value) => Set(Some(value)),
None => Unset(None),
None => NotSet,
}
}
}
@ -566,7 +567,7 @@ macro_rules! impl_into_active_value {
fn into_active_value(self) -> ActiveValue<Option<$ty>> {
match self {
Some(value) => Set(value),
None => Unset(None),
None => NotSet,
}
}
}
@ -615,74 +616,80 @@ impl_into_active_value!(crate::prelude::Decimal, Set);
#[cfg_attr(docsrs, doc(cfg(feature = "with-uuid")))]
impl_into_active_value!(crate::prelude::Uuid, Set);
impl<V> Default for ActiveValue<V>
where
V: Into<Value>,
{
fn default() -> Self {
Self::NotSet
}
}
impl<V> ActiveValue<V>
where
V: Into<Value>,
{
/// Set the value of an [ActiveValue] and also set its state to `ActiveValueState::Set`
/// Create an [ActiveValue::Set]
pub fn set(value: V) -> Self {
Self {
value: Some(value),
state: ActiveValueState::Set,
}
Self::Set(value)
}
/// Check if the state of an [ActiveValue] is `ActiveValueState::Set` which returns true
/// Check if the [ActiveValue] is [ActiveValue::Set]
pub fn is_set(&self) -> bool {
matches!(self.state, ActiveValueState::Set)
matches!(self, Self::Set(_))
}
pub(crate) fn unchanged(value: V) -> Self {
Self {
value: Some(value),
state: ActiveValueState::Unchanged,
}
/// Create an [ActiveValue::Unchanged]
pub fn unchanged(value: V) -> Self {
Self::Unchanged(value)
}
/// Check if the status of the [ActiveValue] is `ActiveValueState::Unchanged`
/// which returns `true` if it is
/// Check if the [ActiveValue] is [ActiveValue::Unchanged]
pub fn is_unchanged(&self) -> bool {
matches!(self.state, ActiveValueState::Unchanged)
matches!(self, Self::Unchanged(_))
}
/// Set the `value` field of the ActiveModel to [Option::None] and the
/// `state` field to `ActiveValueState::Unset`
pub fn unset() -> Self {
Self {
value: None,
state: ActiveValueState::Unset,
/// Create an [ActiveValue::NotSet]
pub fn not_set() -> Self {
Self::default()
}
/// Check if the [ActiveValue] is [ActiveValue::NotSet]
pub fn is_not_set(&self) -> bool {
matches!(self, Self::NotSet)
}
/// Get the mutable value an [ActiveValue]
/// also setting itself to [ActiveValue::NotSet]
pub fn take(&mut self) -> Option<V> {
match std::mem::take(self) {
ActiveValue::Set(value) | ActiveValue::Unchanged(value) => Some(value),
ActiveValue::NotSet => None,
}
}
/// Check if the state of an [ActiveValue] is `ActiveValueState::Unset`
/// which returns true if it is
pub fn is_unset(&self) -> bool {
matches!(self.state, ActiveValueState::Unset)
}
/// Get the mutable value of the `value` field of an [ActiveValue]
/// also setting it's state to `ActiveValueState::Unset`
pub fn take(&mut self) -> Option<V> {
self.state = ActiveValueState::Unset;
self.value.take()
}
/// Get an owned value of the `value` field of the [ActiveValue]
/// Get an owned value of the [ActiveValue]
pub fn unwrap(self) -> V {
self.value.unwrap()
match self {
ActiveValue::Set(value) | ActiveValue::Unchanged(value) => value,
ActiveValue::NotSet => panic!("Cannot unwrap ActiveValue::NotSet"),
}
}
/// Check is a [Value] exists or not
pub fn into_value(self) -> Option<Value> {
self.value.map(Into::into)
match self {
ActiveValue::Set(value) | ActiveValue::Unchanged(value) => Some(value.into()),
ActiveValue::NotSet => None,
}
}
/// Wrap the [Value] into a `ActiveValue<Value>`
pub fn into_wrapped_value(self) -> ActiveValue<Value> {
match self.state {
ActiveValueState::Set => ActiveValue::set(self.into_value().unwrap()),
ActiveValueState::Unchanged => ActiveValue::unchanged(self.into_value().unwrap()),
ActiveValueState::Unset => ActiveValue::unset(),
match self {
Self::Set(value) => ActiveValue::set(value.into()),
Self::Unchanged(value) => ActiveValue::unchanged(value.into()),
Self::NotSet => ActiveValue::not_set(),
}
}
}
@ -692,7 +699,10 @@ where
V: Into<Value>,
{
fn as_ref(&self) -> &V {
self.value.as_ref().unwrap()
match self {
ActiveValue::Set(value) | ActiveValue::Unchanged(value) => value,
ActiveValue::NotSet => panic!("Cannot borrow ActiveValue::NotSet"),
}
}
}
@ -701,7 +711,12 @@ where
V: Into<Value> + std::cmp::PartialEq,
{
fn eq(&self, other: &Self) -> bool {
self.value.as_ref() == other.value.as_ref()
match (self, other) {
(ActiveValue::Set(l), ActiveValue::Set(r)) => l == r,
(ActiveValue::Unchanged(l), ActiveValue::Unchanged(r)) => l == r,
(ActiveValue::NotSet, ActiveValue::NotSet) => true,
_ => false,
}
}
}
@ -710,10 +725,10 @@ where
V: Into<Value> + Nullable,
{
fn from(value: ActiveValue<V>) -> Self {
match value.state {
ActiveValueState::Set => Set(value.value),
ActiveValueState::Unset => Unset(None),
ActiveValueState::Unchanged => ActiveValue::unchanged(value.value),
match value {
ActiveValue::Set(value) => ActiveValue::set(Some(value)),
ActiveValue::Unchanged(value) => ActiveValue::unchanged(Some(value)),
ActiveValue::NotSet => ActiveValue::not_set(),
}
}
}
@ -748,7 +763,7 @@ mod tests {
}
.into_active_model(),
fruit::ActiveModel {
id: Unset(None),
id: NotSet,
name: Set("Apple".to_owned()),
cake_id: Set(Some(1)),
}
@ -777,8 +792,8 @@ mod tests {
}
.into_active_model(),
fruit::ActiveModel {
id: Unset(None),
name: Unset(None),
id: NotSet,
name: NotSet,
cake_id: Set(Some(1)),
}
);
@ -789,8 +804,8 @@ mod tests {
}
.into_active_model(),
fruit::ActiveModel {
id: Unset(None),
name: Unset(None),
id: NotSet,
name: NotSet,
cake_id: Set(None),
}
);
@ -798,9 +813,9 @@ mod tests {
assert_eq!(
my_fruit::UpdateFruit { cake_id: None }.into_active_model(),
fruit::ActiveModel {
id: Unset(None),
name: Unset(None),
cake_id: Unset(None),
id: NotSet,
name: NotSet,
cake_id: NotSet,
}
);
}

View File

@ -505,7 +505,6 @@ pub trait EntityTrait: EntityName {
/// name: "Orange".to_owned(),
/// cake_id: None,
/// }
/// .into_active_model(),
/// );
///
/// assert_eq!(
@ -563,7 +562,6 @@ pub trait EntityTrait: EntityName {
/// name: "Orange".to_owned(),
/// cake_id: None,
/// }
/// .into_active_model(),
/// );
///
/// assert_eq!(

View File

@ -30,7 +30,7 @@ pub trait Linked {
select.query().join_as(
JoinType::InnerJoin,
unpack_table_ref(&rel.from_tbl),
rel.from_tbl,
SeaRc::clone(&from_tbl),
join_tbl_on_condition(from_tbl, to_tbl, rel.from_col, rel.to_col),
);

View File

@ -1,11 +1,14 @@
use crate::{
DbErr, EntityTrait, Linked, QueryFilter, QueryResult, Related, Select, SelectModel,
SelectorRaw, Statement,
ActiveModelBehavior, ActiveModelTrait, ConnectionTrait, DbErr, DeleteResult, EntityTrait,
IntoActiveModel, Linked, QueryFilter, QueryResult, Related, Select, SelectModel, SelectorRaw,
Statement,
};
use async_trait::async_trait;
pub use sea_query::Value;
use std::fmt::Debug;
/// A Trait for a Model
#[async_trait]
pub trait ModelTrait: Clone + Send + Debug {
#[allow(missing_docs)]
type Entity: EntityTrait;
@ -33,6 +36,16 @@ pub trait ModelTrait: Clone + Send + Debug {
let tbl_alias = &format!("r{}", l.link().len() - 1);
l.find_linked().belongs_to_tbl_alias(self, tbl_alias)
}
/// Delete an model
async fn delete<'a, A, C>(self, db: &'a C) -> Result<DeleteResult, DbErr>
where
Self: IntoActiveModel<A>,
C: ConnectionTrait<'a>,
A: ActiveModelTrait<Entity = Self::Entity> + ActiveModelBehavior + Send + 'a,
{
self.into_active_model().delete(db).await
}
}
/// A Trait for implementing a [QueryResult]

View File

@ -55,7 +55,7 @@ where
pub fn exec_with_returning<'a, C>(
self,
db: &'a C,
) -> impl Future<Output = Result<A, DbErr>> + '_
) -> impl Future<Output = Result<<A::Entity as EntityTrait>::Model, DbErr>> + '_
where
<A::Entity as EntityTrait>::Model: IntoActiveModel<A>,
C: ConnectionTrait<'a>,
@ -92,13 +92,13 @@ where
pub fn exec_with_returning<'a, C>(
self,
db: &'a C,
) -> impl Future<Output = Result<A, DbErr>> + '_
) -> impl Future<Output = Result<<A::Entity as EntityTrait>::Model, DbErr>> + '_
where
<A::Entity as EntityTrait>::Model: IntoActiveModel<A>,
C: ConnectionTrait<'a>,
A: 'a,
{
exec_insert_with_returning(self.primary_key, self.query, db)
exec_insert_with_returning::<A, _>(self.primary_key, self.query, db)
}
}
@ -140,7 +140,7 @@ async fn exec_insert_with_returning<'a, A, C>(
primary_key: Option<ValueTuple>,
mut insert_statement: InsertStatement,
db: &'a C,
) -> Result<A, DbErr>
) -> Result<<A::Entity as EntityTrait>::Model, DbErr>
where
<A::Entity as EntityTrait>::Model: IntoActiveModel<A>,
C: ConnectionTrait<'a>,
@ -175,7 +175,7 @@ where
}
};
match found {
Some(model) => Ok(model.into_active_model()),
Some(model) => Ok(model),
None => Err(DbErr::Exec("Failed to find inserted item".to_owned())),
}
}

View File

@ -199,15 +199,20 @@ macro_rules! try_getable_mysql {
};
}
macro_rules! try_getable_postgres {
macro_rules! try_getable_date_time {
( $type: ty ) => {
impl TryGetable for $type {
fn try_get(res: &QueryResult, pre: &str, col: &str) -> Result<Self, TryGetError> {
let _column = format!("{}{}", pre, col);
match &res.row {
#[cfg(feature = "sqlx-mysql")]
QueryResultRow::SqlxMySql(_) => {
panic!("{} unsupported by sqlx-mysql", stringify!($type))
QueryResultRow::SqlxMySql(row) => {
use chrono::{DateTime, Utc};
use sqlx::Row;
row.try_get::<Option<DateTime<Utc>>, _>(_column.as_str())
.map_err(|e| TryGetError::DbErr(crate::sqlx_error_to_query_err(e)))
.and_then(|opt| opt.ok_or(TryGetError::Null))
.map(|v| v.into())
}
#[cfg(feature = "sqlx-postgres")]
QueryResultRow::SqlxPostgres(row) => {
@ -217,8 +222,13 @@ macro_rules! try_getable_postgres {
.and_then(|opt| opt.ok_or(TryGetError::Null))
}
#[cfg(feature = "sqlx-sqlite")]
QueryResultRow::SqlxSqlite(_) => {
panic!("{} unsupported by sqlx-sqlite", stringify!($type))
QueryResultRow::SqlxSqlite(row) => {
use chrono::{DateTime, Utc};
use sqlx::Row;
row.try_get::<Option<DateTime<Utc>>, _>(_column.as_str())
.map_err(|e| TryGetError::DbErr(crate::sqlx_error_to_query_err(e)))
.and_then(|opt| opt.ok_or(TryGetError::Null))
.map(|v| v.into())
}
#[cfg(feature = "mock")]
#[allow(unused_variables)]
@ -259,7 +269,7 @@ try_getable_all!(chrono::NaiveTime);
try_getable_all!(chrono::NaiveDateTime);
#[cfg(feature = "with-chrono")]
try_getable_postgres!(chrono::DateTime<chrono::FixedOffset>);
try_getable_date_time!(chrono::DateTime<chrono::FixedOffset>);
#[cfg(feature = "with-rust_decimal")]
use rust_decimal::Decimal;

View File

@ -28,6 +28,7 @@ where
S: SelectorTrait,
{
stmt: Statement,
#[allow(dead_code)]
selector: S,
}

View File

@ -1,6 +1,6 @@
use crate::{
error::*, ActiveModelTrait, ColumnTrait, ConnectionTrait, EntityTrait, IntoActiveModel,
Iterable, SelectModel, SelectorRaw, Statement, UpdateMany, UpdateOne,
error::*, ActiveModelTrait, ColumnTrait, ConnectionTrait, EntityTrait, Iterable, SelectModel,
SelectorRaw, Statement, UpdateMany, UpdateOne,
};
use sea_query::{Alias, Expr, FromValueTuple, Query, UpdateStatement};
use std::future::Future;
@ -24,9 +24,8 @@ where
A: ActiveModelTrait,
{
/// Execute an update operation on an ActiveModel
pub async fn exec<'b, C>(self, db: &'b C) -> Result<A, DbErr>
pub async fn exec<'b, C>(self, db: &'b C) -> Result<<A::Entity as EntityTrait>::Model, DbErr>
where
<A::Entity as EntityTrait>::Model: IntoActiveModel<A>,
C: ConnectionTrait<'b>,
{
// so that self is dropped before entering await
@ -84,9 +83,8 @@ async fn exec_update_and_return_updated<'a, A, C>(
mut query: UpdateStatement,
model: A,
db: &'a C,
) -> Result<A, DbErr>
) -> Result<<A::Entity as EntityTrait>::Model, DbErr>
where
<A::Entity as EntityTrait>::Model: IntoActiveModel<A>,
A: ActiveModelTrait,
C: ConnectionTrait<'a>,
{
@ -112,7 +110,7 @@ where
.await?;
// If we got `None` then we are updating a row that does not exist.
match found {
Some(model) => Ok(model.into_active_model()),
Some(model) => Ok(model),
None => Err(DbErr::RecordNotFound(
"None of the database rows are affected".to_owned(),
)),
@ -130,7 +128,7 @@ where
.await?;
// If we cannot select the updated row from db by the cached primary key
match found {
Some(model) => Ok(model.into_active_model()),
Some(model) => Ok(model),
None => Err(DbErr::Exec("Failed to find inserted item".to_owned())),
}
}
@ -196,7 +194,6 @@ mod tests {
id: 1,
name: "Cheese Cake".to_owned(),
}
.into_active_model()
);
let model = cake::Model {

View File

@ -184,7 +184,7 @@
//! pear.name = Set("Sweet pear".to_owned());
//!
//! // update one
//! let pear: fruit::ActiveModel = pear.update(db).await?;
//! let pear: fruit::Model = pear.update(db).await?;
//!
//! // update many: UPDATE "fruit" SET "cake_id" = NULL WHERE "fruit"."name" LIKE '%Apple%'
//! Fruit::update_many()
@ -201,13 +201,13 @@
//! # use sea_orm::{DbConn, error::*, entity::*, query::*, tests_cfg::*};
//! # async fn function(db: &DbConn) -> Result<(), DbErr> {
//! let banana = fruit::ActiveModel {
//! id: Unset(None),
//! id: NotSet,
//! name: Set("Banana".to_owned()),
//! ..Default::default()
//! };
//!
//! // create, because primary key `id` is `Unset`
//! let mut banana = banana.save(db).await?;
//! // create, because primary key `id` is `NotSet`
//! let mut banana = banana.save(db).await?.into_active_model();
//!
//! banana.name = Set("Banana Mongo".to_owned());
//!
@ -221,13 +221,16 @@
//! ```
//! # use sea_orm::{DbConn, error::*, entity::*, query::*, tests_cfg::*};
//! # async fn function(db: &DbConn) -> Result<(), DbErr> {
//! let orange: Option<fruit::Model> = Fruit::find_by_id(1).one(db).await?;
//! let orange: fruit::ActiveModel = orange.unwrap().into();
//!
//! // delete one
//! fruit::Entity::delete(orange).exec(db).await?;
//! let orange: Option<fruit::Model> = Fruit::find_by_id(1).one(db).await?;
//! let orange: fruit::Model = orange.unwrap();
//! fruit::Entity::delete(orange.into_active_model())
//! .exec(db)
//! .await?;
//!
//! // or simply
//! # let orange: fruit::ActiveModel = Fruit::find_by_id(1).one(db).await.unwrap().unwrap().into();
//! let orange: Option<fruit::Model> = Fruit::find_by_id(1).one(db).await?;
//! let orange: fruit::Model = orange.unwrap();
//! orange.delete(db).await?;
//!
//! // delete many: DELETE FROM "fruit" WHERE "fruit"."name" LIKE 'Orange'
@ -276,6 +279,8 @@ pub mod entity;
pub mod error;
/// This module performs execution of queries on a Model or ActiveModel
mod executor;
/// Holds types and methods to perform metric collection
pub mod metric;
/// Holds types and methods to perform queries
pub mod query;
/// Holds types that defines the schemas of an Entity

54
src/metric.rs Normal file
View File

@ -0,0 +1,54 @@
use std::{sync::Arc, time::Duration};
pub(crate) type Callback = Arc<dyn Fn(&Info<'_>) + Send + Sync>;
#[allow(unused_imports)]
pub(crate) use inner::{metric, metric_ok};
#[derive(Debug)]
/// Query execution infos
pub struct Info<'a> {
/// Query executiuon duration
pub elapsed: Duration,
/// Query data
pub statement: &'a crate::Statement,
/// Query execution failed
pub failed: bool,
}
mod inner {
#[allow(unused_macros)]
macro_rules! metric {
($metric_callback:expr, $stmt:expr, $code:block) => {{
let _start = std::time::SystemTime::now();
let res = $code;
if let Some(callback) = $metric_callback.as_deref() {
let info = crate::metric::Info {
elapsed: _start.elapsed().unwrap_or_default(),
statement: $stmt,
failed: res.is_err(),
};
callback(&info);
}
res
}};
}
pub(crate) use metric;
#[allow(unused_macros)]
macro_rules! metric_ok {
($metric_callback:expr, $stmt:expr, $code:block) => {{
let _start = std::time::SystemTime::now();
let res = $code;
if let Some(callback) = $metric_callback.as_deref() {
let info = crate::metric::Info {
elapsed: _start.elapsed().unwrap_or_default(),
statement: $stmt,
failed: false,
};
callback(&info);
}
res
}};
}
pub(crate) use metric_ok;
}

View File

@ -1,9 +1,12 @@
use crate::{
EntityTrait, IdenStatic, IntoSimpleExpr, Iterable, QueryTrait, Select, SelectTwo, SelectTwoMany,
ColumnTrait, EntityTrait, IdenStatic, IntoSimpleExpr, Iterable, QueryTrait, Select, SelectTwo,
SelectTwoMany,
};
use core::marker::PhantomData;
pub use sea_query::JoinType;
use sea_query::{Alias, ColumnRef, Iden, Order, SeaRc, SelectExpr, SelectStatement, SimpleExpr};
use sea_query::{
Alias, ColumnRef, DynIden, Expr, Iden, Order, SeaRc, SelectExpr, SelectStatement, SimpleExpr,
};
macro_rules! select_def {
( $ident: ident, $str: expr ) => {
@ -42,10 +45,17 @@ where
None => {
let col = match &sel.expr {
SimpleExpr::Column(col_ref) => match &col_ref {
ColumnRef::Column(col) => col,
ColumnRef::TableColumn(_, col) => col,
ColumnRef::Column(col) | ColumnRef::TableColumn(_, col) => col,
},
_ => panic!("cannot apply alias for expr other than Column"),
SimpleExpr::AsEnum(_, simple_expr) => match simple_expr.as_ref() {
SimpleExpr::Column(col_ref) => match &col_ref {
ColumnRef::Column(col) | ColumnRef::TableColumn(_, col) => col,
},
_ => {
panic!("cannot apply alias for AsEnum with expr other than Column")
}
},
_ => panic!("cannot apply alias for expr other than Column or AsEnum"),
};
let alias = format!("{}{}", pre, col.to_string().as_str());
sel.alias = Some(SeaRc::new(Alias::new(&alias)));
@ -128,10 +138,18 @@ where
F: EntityTrait,
S: QueryTrait<QueryStatement = SelectStatement>,
{
let text_type = SeaRc::new(Alias::new("text")) as DynIden;
for col in <F::Column as Iterable>::iter() {
let col_def = col.def();
let col_type = col_def.get_column_type();
let alias = format!("{}{}", SelectB.as_str(), col.as_str());
let expr = Expr::expr(col.into_simple_expr());
let expr = match col_type.get_enum_name() {
Some(_) => expr.as_enum(text_type.clone()),
None => expr.into(),
};
selector.query().expr(SelectExpr {
expr: col.into_simple_expr(),
expr,
alias: Some(SeaRc::new(Alias::new(&alias))),
});
}

View File

@ -3,7 +3,7 @@ use crate::{
QueryFilter, QueryTrait,
};
use core::marker::PhantomData;
use sea_query::{DeleteStatement, IntoIden};
use sea_query::DeleteStatement;
/// Defines the structure for a delete operation
#[derive(Clone, Debug)]
@ -94,7 +94,7 @@ impl Delete {
{
DeleteMany {
query: DeleteStatement::new()
.from_table(entity.into_iden())
.from_table(entity.table_ref())
.to_owned(),
entity: PhantomData,
}

View File

@ -465,10 +465,12 @@ pub(crate) fn join_tbl_on_condition(
pub(crate) fn unpack_table_ref(table_ref: &TableRef) -> DynIden {
match table_ref {
TableRef::Table(tbl) => SeaRc::clone(tbl),
TableRef::SchemaTable(_, tbl) => SeaRc::clone(tbl),
TableRef::TableAlias(tbl, _) => SeaRc::clone(tbl),
TableRef::SchemaTableAlias(_, tbl, _) => SeaRc::clone(tbl),
TableRef::SubQuery(_, tbl) => SeaRc::clone(tbl),
TableRef::Table(tbl)
| TableRef::SchemaTable(_, tbl)
| TableRef::DatabaseSchemaTable(_, _, tbl)
| TableRef::TableAlias(tbl, _)
| TableRef::SchemaTableAlias(_, tbl, _)
| TableRef::DatabaseSchemaTableAlias(_, _, tbl, _)
| TableRef::SubQuery(_, tbl) => SeaRc::clone(tbl),
}
}

View File

@ -63,7 +63,7 @@ where
///
/// assert_eq!(
/// Insert::one(cake::ActiveModel {
/// id: Unset(None),
/// id: NotSet,
/// name: Set("Apple Pie".to_owned()),
/// })
/// .build(DbBackend::Postgres)
@ -190,7 +190,7 @@ mod tests {
assert_eq!(
Insert::<cake::ActiveModel>::new()
.add(cake::ActiveModel {
id: ActiveValue::unset(),
id: ActiveValue::not_set(),
name: ActiveValue::set("Apple Pie".to_owned()),
})
.build(DbBackend::Postgres)

View File

@ -1,9 +1,9 @@
use crate::{
join_tbl_on_condition, unpack_table_ref, EntityTrait, IdenStatic, Iterable, Linked,
QuerySelect, Related, Select, SelectA, SelectB, SelectTwo, SelectTwoMany,
join_tbl_on_condition, unpack_table_ref, ColumnTrait, EntityTrait, IdenStatic, Iterable,
Linked, QuerySelect, Related, Select, SelectA, SelectB, SelectTwo, SelectTwoMany,
};
pub use sea_query::JoinType;
use sea_query::{Alias, Expr, IntoIden, SeaRc, SelectExpr};
use sea_query::{Alias, DynIden, Expr, IntoIden, SeaRc, SelectExpr};
impl<E> Select<E>
where
@ -79,21 +79,28 @@ where
slf.query().join_as(
JoinType::LeftJoin,
unpack_table_ref(&rel.to_tbl),
rel.to_tbl,
SeaRc::clone(&to_tbl),
join_tbl_on_condition(from_tbl, to_tbl, rel.from_col, rel.to_col),
);
}
slf = slf.apply_alias(SelectA.as_str());
let text_type = SeaRc::new(Alias::new("text")) as DynIden;
let mut select_two = SelectTwo::new_without_prepare(slf.query);
for col in <T::Column as Iterable>::iter() {
let col_def = col.def();
let col_type = col_def.get_column_type();
let alias = format!("{}{}", SelectB.as_str(), col.as_str());
let expr = Expr::tbl(
Alias::new(&format!("r{}", l.link().len() - 1)).into_iden(),
col.into_iden(),
);
let expr = match col_type.get_enum_name() {
Some(_) => expr.as_enum(text_type.clone()),
None => expr.into(),
};
select_two.query().expr(SelectExpr {
expr: Expr::tbl(
Alias::new(&format!("r{}", l.link().len() - 1)).into_iden(),
col.into_iden(),
)
.into(),
expr,
alias: Some(SeaRc::new(Alias::new(&alias))),
});
}

View File

@ -236,7 +236,7 @@ mod tests {
Update::one(fruit::ActiveModel {
id: ActiveValue::set(1),
name: ActiveValue::set("Orange".to_owned()),
cake_id: ActiveValue::unset(),
cake_id: ActiveValue::not_set(),
})
.build(DbBackend::Postgres)
.to_string(),

View File

@ -1,6 +1,6 @@
use crate::{
unpack_table_ref, ColumnTrait, ColumnType, DbBackend, EntityTrait, Identity, Iterable,
PrimaryKeyToColumn, PrimaryKeyTrait, RelationTrait, Schema,
unpack_table_ref, ActiveEnum, ColumnTrait, ColumnType, DbBackend, EntityTrait, Identity,
Iterable, PrimaryKeyToColumn, PrimaryKeyTrait, RelationTrait, Schema,
};
use sea_query::{
extension::postgres::{Type, TypeCreateStatement},
@ -8,6 +8,14 @@ use sea_query::{
};
impl Schema {
/// Creates Postgres enums from an ActiveEnum. See [TypeCreateStatement] for more details
pub fn create_enum_from_active_enum<A>(&self) -> TypeCreateStatement
where
A: ActiveEnum,
{
create_enum_from_active_enum::<A>(self.backend)
}
/// Creates Postgres enums from an Entity. See [TypeCreateStatement] for more details
pub fn create_enum_from_entity<E>(&self, entity: E) -> Vec<TypeCreateStatement>
where
@ -25,6 +33,30 @@ impl Schema {
}
}
pub(crate) fn create_enum_from_active_enum<A>(backend: DbBackend) -> TypeCreateStatement
where
A: ActiveEnum,
{
if matches!(backend, DbBackend::MySql | DbBackend::Sqlite) {
panic!("TypeCreateStatement is not supported in MySQL & SQLite");
}
let col_def = A::db_type();
let col_type = col_def.get_column_type();
create_enum_from_column_type(col_type)
}
pub(crate) fn create_enum_from_column_type(col_type: &ColumnType) -> TypeCreateStatement {
let (name, values) = match col_type {
ColumnType::Enum(s, v) => (s.as_str(), v),
_ => panic!("Should be ColumnType::Enum"),
};
Type::create()
.as_enum(Alias::new(name))
.values(values.iter().map(|val| Alias::new(val.as_str())))
.to_owned()
}
#[allow(clippy::needless_borrow)]
pub(crate) fn create_enum_from_entity<E>(_: E, backend: DbBackend) -> Vec<TypeCreateStatement>
where
E: EntityTrait,
@ -39,14 +71,7 @@ where
if !matches!(col_type, ColumnType::Enum(_, _)) {
continue;
}
let (name, values) = match col_type {
ColumnType::Enum(s, v) => (s.as_str(), v),
_ => unreachable!(),
};
let stmt = Type::create()
.as_enum(Alias::new(name))
.values(values.iter().map(|val| Alias::new(val.as_str())))
.to_owned();
let stmt = create_enum_from_column_type(&col_type);
vec.push(stmt);
}
vec
@ -184,17 +209,6 @@ mod tests {
}
}
#[test]
fn test_create_table_from_entity() {
for builder in [DbBackend::MySql, DbBackend::Sqlite] {
let schema = Schema::new(builder);
assert_eq!(
builder.build(&schema.create_table_from_entity(CakeFillingPrice)),
builder.build(&get_stmt().table(CakeFillingPrice).to_owned())
);
}
}
fn get_stmt() -> TableCreateStatement {
Table::create()
.col(

View File

@ -15,7 +15,7 @@
#[macro_export]
#[cfg(feature = "debug-print")]
macro_rules! debug_print {
($( $args:expr ),*) => { log::debug!( $( $args ),* ); }
($( $args:expr ),*) => { tracing::debug!( $( $args ),* ); }
}
#[macro_export]

View File

@ -1,6 +1,9 @@
pub mod common;
use active_enum::Entity as ActiveEnum;
use active_enum_child::Entity as ActiveEnumChild;
pub use common::{features::*, setup::*, TestContext};
use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
#[sea_orm_macros::test]
@ -13,6 +16,9 @@ async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("active_enum_tests").await;
create_tables(&ctx.db).await?;
insert_active_enum(&ctx.db).await?;
insert_active_enum_child(&ctx.db).await?;
find_related_active_enum(&ctx.db).await?;
find_linked_active_enum(&ctx.db).await?;
ctx.delete().await;
Ok(())
@ -21,25 +27,25 @@ async fn main() -> Result<(), DbErr> {
pub async fn insert_active_enum(db: &DatabaseConnection) -> Result<(), DbErr> {
use active_enum::*;
let am = ActiveModel {
category: Set(None),
color: Set(None),
tea: Set(None),
..Default::default()
}
.insert(db)
.await?;
let model = Model {
id: 1,
category: None,
color: None,
tea: None,
};
let model = Entity::find().one(db).await?.unwrap();
assert_eq!(
model,
Model {
id: 1,
category: None,
color: None,
tea: None,
ActiveModel {
category: Set(None),
color: Set(None),
tea: Set(None),
..Default::default()
}
.insert(db)
.await?
);
assert_eq!(model, Entity::find().one(db).await?.unwrap());
assert_eq!(
model,
Entity::find()
@ -52,11 +58,11 @@ pub async fn insert_active_enum(db: &DatabaseConnection) -> Result<(), DbErr> {
.unwrap()
);
let am = ActiveModel {
let _ = ActiveModel {
category: Set(Some(Category::Big)),
color: Set(Some(Color::Black)),
tea: Set(Some(Tea::EverydayTea)),
..am
..model.into_active_model()
}
.save(db)
.await?;
@ -83,10 +89,629 @@ pub async fn insert_active_enum(db: &DatabaseConnection) -> Result<(), DbErr> {
.unwrap()
);
let res = am.delete(db).await?;
let res = model.delete(db).await?;
assert_eq!(res.rows_affected, 1);
assert_eq!(Entity::find().one(db).await?, None);
Ok(())
}
pub async fn insert_active_enum_child(db: &DatabaseConnection) -> Result<(), DbErr> {
use active_enum_child::*;
active_enum::ActiveModel {
category: Set(Some(Category::Small)),
color: Set(Some(Color::White)),
tea: Set(Some(Tea::BreakfastTea)),
..Default::default()
}
.insert(db)
.await?;
let am = ActiveModel {
parent_id: Set(2),
category: Set(None),
color: Set(None),
tea: Set(None),
..Default::default()
}
.insert(db)
.await?;
let model = Entity::find().one(db).await?.unwrap();
assert_eq!(
model,
Model {
id: 1,
parent_id: 2,
category: None,
color: None,
tea: None,
}
);
assert_eq!(
model,
Entity::find()
.filter(Column::Id.is_not_null())
.filter(Column::Category.is_null())
.filter(Column::Color.is_null())
.filter(Column::Tea.is_null())
.one(db)
.await?
.unwrap()
);
ActiveModel {
category: Set(Some(Category::Big)),
color: Set(Some(Color::Black)),
tea: Set(Some(Tea::EverydayTea)),
..am.into_active_model()
}
.save(db)
.await?;
let model = Entity::find().one(db).await?.unwrap();
assert_eq!(
model,
Model {
id: 1,
parent_id: 2,
category: Some(Category::Big),
color: Some(Color::Black),
tea: Some(Tea::EverydayTea),
}
);
assert_eq!(
model,
Entity::find()
.filter(Column::Id.eq(1))
.filter(Column::Category.eq(Category::Big))
.filter(Column::Color.eq(Color::Black))
.filter(Column::Tea.eq(Tea::EverydayTea))
.one(db)
.await?
.unwrap()
);
Ok(())
}
pub async fn find_related_active_enum(db: &DatabaseConnection) -> Result<(), DbErr> {
assert_eq!(
active_enum::Model {
id: 2,
category: None,
color: None,
tea: None,
}
.find_related(ActiveEnumChild)
.all(db)
.await?,
vec![active_enum_child::Model {
id: 1,
parent_id: 2,
category: Some(Category::Big),
color: Some(Color::Black),
tea: Some(Tea::EverydayTea),
}]
);
assert_eq!(
ActiveEnum::find()
.find_with_related(ActiveEnumChild)
.all(db)
.await?,
vec![(
active_enum::Model {
id: 2,
category: Some(Category::Small),
color: Some(Color::White),
tea: Some(Tea::BreakfastTea),
},
vec![active_enum_child::Model {
id: 1,
parent_id: 2,
category: Some(Category::Big),
color: Some(Color::Black),
tea: Some(Tea::EverydayTea),
}]
)]
);
assert_eq!(
ActiveEnum::find()
.find_also_related(ActiveEnumChild)
.all(db)
.await?,
vec![(
active_enum::Model {
id: 2,
category: Some(Category::Small),
color: Some(Color::White),
tea: Some(Tea::BreakfastTea),
},
Some(active_enum_child::Model {
id: 1,
parent_id: 2,
category: Some(Category::Big),
color: Some(Color::Black),
tea: Some(Tea::EverydayTea),
})
)]
);
assert_eq!(
active_enum_child::Model {
id: 1,
parent_id: 2,
category: None,
color: None,
tea: None,
}
.find_related(ActiveEnum)
.all(db)
.await?,
vec![active_enum::Model {
id: 2,
category: Some(Category::Small),
color: Some(Color::White),
tea: Some(Tea::BreakfastTea),
}]
);
assert_eq!(
ActiveEnumChild::find()
.find_with_related(ActiveEnum)
.all(db)
.await?,
vec![(
active_enum_child::Model {
id: 1,
parent_id: 2,
category: Some(Category::Big),
color: Some(Color::Black),
tea: Some(Tea::EverydayTea),
},
vec![active_enum::Model {
id: 2,
category: Some(Category::Small),
color: Some(Color::White),
tea: Some(Tea::BreakfastTea),
}]
)]
);
assert_eq!(
ActiveEnumChild::find()
.find_also_related(ActiveEnum)
.all(db)
.await?,
vec![(
active_enum_child::Model {
id: 1,
parent_id: 2,
category: Some(Category::Big),
color: Some(Color::Black),
tea: Some(Tea::EverydayTea),
},
Some(active_enum::Model {
id: 2,
category: Some(Category::Small),
color: Some(Color::White),
tea: Some(Tea::BreakfastTea),
})
)]
);
Ok(())
}
pub async fn find_linked_active_enum(db: &DatabaseConnection) -> Result<(), DbErr> {
assert_eq!(
active_enum::Model {
id: 2,
category: None,
color: None,
tea: None,
}
.find_linked(active_enum::ActiveEnumChildLink)
.all(db)
.await?,
vec![active_enum_child::Model {
id: 1,
parent_id: 2,
category: Some(Category::Big),
color: Some(Color::Black),
tea: Some(Tea::EverydayTea),
}]
);
assert_eq!(
ActiveEnum::find()
.find_also_linked(active_enum::ActiveEnumChildLink)
.all(db)
.await?,
vec![(
active_enum::Model {
id: 2,
category: Some(Category::Small),
color: Some(Color::White),
tea: Some(Tea::BreakfastTea),
},
Some(active_enum_child::Model {
id: 1,
parent_id: 2,
category: Some(Category::Big),
color: Some(Color::Black),
tea: Some(Tea::EverydayTea),
})
)]
);
assert_eq!(
active_enum_child::Model {
id: 1,
parent_id: 2,
category: None,
color: None,
tea: None,
}
.find_linked(active_enum_child::ActiveEnumLink)
.all(db)
.await?,
vec![active_enum::Model {
id: 2,
category: Some(Category::Small),
color: Some(Color::White),
tea: Some(Tea::BreakfastTea),
}]
);
assert_eq!(
ActiveEnumChild::find()
.find_also_linked(active_enum_child::ActiveEnumLink)
.all(db)
.await?,
vec![(
active_enum_child::Model {
id: 1,
parent_id: 2,
category: Some(Category::Big),
color: Some(Color::Black),
tea: Some(Tea::EverydayTea),
},
Some(active_enum::Model {
id: 2,
category: Some(Category::Small),
color: Some(Color::White),
tea: Some(Tea::BreakfastTea),
})
)]
);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
pub use pretty_assertions::assert_eq;
pub use sea_orm::{DbBackend, QueryTrait};
#[test]
fn active_enum_find_related() {
let active_enum_model = active_enum::Model {
id: 1,
category: None,
color: None,
tea: None,
};
let _select = active_enum_model.find_related(ActiveEnumChild);
#[cfg(any(feature = "sqlx-mysql", feature = "sqlx-sqlite"))]
{
assert_eq!(
_select.build(DbBackend::MySql).to_string(),
_select.build(DbBackend::Sqlite).to_string(),
);
assert_eq!(
_select.build(DbBackend::MySql).to_string(),
[
"SELECT `active_enum_child`.`id`, `active_enum_child`.`parent_id`, `active_enum_child`.`category`, `active_enum_child`.`color`, `active_enum_child`.`tea`",
"FROM `active_enum_child`",
"INNER JOIN `active_enum` ON `active_enum`.`id` = `active_enum_child`.`parent_id`",
"WHERE `active_enum`.`id` = 1",
]
.join(" ")
);
}
#[cfg(feature = "sqlx-postgres")]
assert_eq!(
_select.build(DbBackend::Postgres).to_string(),
[
r#"SELECT "active_enum_child"."id", "active_enum_child"."parent_id", "active_enum_child"."category", "active_enum_child"."color", CAST("active_enum_child"."tea" AS text)"#,
r#"FROM "public"."active_enum_child""#,
r#"INNER JOIN "public"."active_enum" ON "active_enum"."id" = "active_enum_child"."parent_id""#,
r#"WHERE "active_enum"."id" = 1"#,
]
.join(" ")
);
let _select = ActiveEnum::find().find_also_related(ActiveEnumChild);
#[cfg(any(feature = "sqlx-mysql", feature = "sqlx-sqlite"))]
{
assert_eq!(
_select.build(DbBackend::MySql).to_string(),
_select.build(DbBackend::Sqlite).to_string(),
);
assert_eq!(
_select
.build(DbBackend::MySql)
.to_string(),
[
"SELECT `active_enum`.`id` AS `A_id`, `active_enum`.`category` AS `A_category`, `active_enum`.`color` AS `A_color`, `active_enum`.`tea` AS `A_tea`,",
"`active_enum_child`.`id` AS `B_id`, `active_enum_child`.`parent_id` AS `B_parent_id`, `active_enum_child`.`category` AS `B_category`, `active_enum_child`.`color` AS `B_color`, `active_enum_child`.`tea` AS `B_tea`",
"FROM `active_enum`",
"LEFT JOIN `active_enum_child` ON `active_enum`.`id` = `active_enum_child`.`parent_id`",
]
.join(" ")
);
}
#[cfg(feature = "sqlx-postgres")]
assert_eq!(
_select
.build(DbBackend::Postgres)
.to_string(),
[
r#"SELECT "active_enum"."id" AS "A_id", "active_enum"."category" AS "A_category", "active_enum"."color" AS "A_color", CAST("active_enum"."tea" AS text) AS "A_tea","#,
r#""active_enum_child"."id" AS "B_id", "active_enum_child"."parent_id" AS "B_parent_id", "active_enum_child"."category" AS "B_category", "active_enum_child"."color" AS "B_color", CAST("active_enum_child"."tea" AS text) AS "B_tea""#,
r#"FROM "public"."active_enum""#,
r#"LEFT JOIN "public"."active_enum_child" ON "active_enum"."id" = "active_enum_child"."parent_id""#,
]
.join(" ")
);
}
#[test]
fn active_enum_find_linked() {
let active_enum_model = active_enum::Model {
id: 1,
category: None,
color: None,
tea: None,
};
let _select = active_enum_model.find_linked(active_enum::ActiveEnumChildLink);
#[cfg(any(feature = "sqlx-mysql", feature = "sqlx-sqlite"))]
{
assert_eq!(
_select.build(DbBackend::MySql).to_string(),
_select.build(DbBackend::Sqlite).to_string(),
);
assert_eq!(
_select.build(DbBackend::MySql).to_string(),
[
"SELECT `active_enum_child`.`id`, `active_enum_child`.`parent_id`, `active_enum_child`.`category`, `active_enum_child`.`color`, `active_enum_child`.`tea`",
"FROM `active_enum_child`",
"INNER JOIN `active_enum` AS `r0` ON `r0`.`id` = `active_enum_child`.`parent_id`",
"WHERE `r0`.`id` = 1",
]
.join(" ")
);
}
#[cfg(feature = "sqlx-postgres")]
assert_eq!(
_select.build(DbBackend::Postgres).to_string(),
[
r#"SELECT "active_enum_child"."id", "active_enum_child"."parent_id", "active_enum_child"."category", "active_enum_child"."color", CAST("active_enum_child"."tea" AS text)"#,
r#"FROM "public"."active_enum_child""#,
r#"INNER JOIN "public"."active_enum" AS "r0" ON "r0"."id" = "active_enum_child"."parent_id""#,
r#"WHERE "r0"."id" = 1"#,
]
.join(" ")
);
let _select = ActiveEnum::find().find_also_linked(active_enum::ActiveEnumChildLink);
#[cfg(any(feature = "sqlx-mysql", feature = "sqlx-sqlite"))]
{
assert_eq!(
_select.build(DbBackend::MySql).to_string(),
_select.build(DbBackend::Sqlite).to_string(),
);
assert_eq!(
_select
.build(DbBackend::MySql)
.to_string(),
[
"SELECT `active_enum`.`id` AS `A_id`, `active_enum`.`category` AS `A_category`, `active_enum`.`color` AS `A_color`, `active_enum`.`tea` AS `A_tea`,",
"`r0`.`id` AS `B_id`, `r0`.`parent_id` AS `B_parent_id`, `r0`.`category` AS `B_category`, `r0`.`color` AS `B_color`, `r0`.`tea` AS `B_tea`",
"FROM `active_enum`",
"LEFT JOIN `active_enum_child` AS `r0` ON `active_enum`.`id` = `r0`.`parent_id`",
]
.join(" ")
);
}
#[cfg(feature = "sqlx-postgres")]
assert_eq!(
_select
.build(DbBackend::Postgres)
.to_string(),
[
r#"SELECT "active_enum"."id" AS "A_id", "active_enum"."category" AS "A_category", "active_enum"."color" AS "A_color", CAST("active_enum"."tea" AS text) AS "A_tea","#,
r#""r0"."id" AS "B_id", "r0"."parent_id" AS "B_parent_id", "r0"."category" AS "B_category", "r0"."color" AS "B_color", CAST("r0"."tea" AS text) AS "B_tea""#,
r#"FROM "public"."active_enum""#,
r#"LEFT JOIN "public"."active_enum_child" AS "r0" ON "active_enum"."id" = "r0"."parent_id""#,
]
.join(" ")
);
}
#[test]
fn active_enum_child_find_related() {
let active_enum_child_model = active_enum_child::Model {
id: 1,
parent_id: 2,
category: None,
color: None,
tea: None,
};
let _select = active_enum_child_model.find_related(ActiveEnum);
#[cfg(any(feature = "sqlx-mysql", feature = "sqlx-sqlite"))]
{
assert_eq!(
_select.build(DbBackend::MySql).to_string(),
_select.build(DbBackend::Sqlite).to_string(),
);
assert_eq!(
_select.build(DbBackend::MySql).to_string(),
[
"SELECT `active_enum`.`id`, `active_enum`.`category`, `active_enum`.`color`, `active_enum`.`tea`",
"FROM `active_enum`",
"INNER JOIN `active_enum_child` ON `active_enum_child`.`parent_id` = `active_enum`.`id`",
"WHERE `active_enum_child`.`id` = 1",
]
.join(" ")
);
}
#[cfg(feature = "sqlx-postgres")]
assert_eq!(
_select.build(DbBackend::Postgres).to_string(),
[
r#"SELECT "active_enum"."id", "active_enum"."category", "active_enum"."color", CAST("active_enum"."tea" AS text)"#,
r#"FROM "public"."active_enum""#,
r#"INNER JOIN "public"."active_enum_child" ON "active_enum_child"."parent_id" = "active_enum"."id""#,
r#"WHERE "active_enum_child"."id" = 1"#,
]
.join(" ")
);
let _select = ActiveEnumChild::find().find_also_related(ActiveEnum);
#[cfg(any(feature = "sqlx-mysql", feature = "sqlx-sqlite"))]
{
assert_eq!(
_select.build(DbBackend::MySql).to_string(),
_select.build(DbBackend::Sqlite).to_string(),
);
assert_eq!(
_select
.build(DbBackend::MySql)
.to_string(),
[
"SELECT `active_enum_child`.`id` AS `A_id`, `active_enum_child`.`parent_id` AS `A_parent_id`, `active_enum_child`.`category` AS `A_category`, `active_enum_child`.`color` AS `A_color`, `active_enum_child`.`tea` AS `A_tea`,",
"`active_enum`.`id` AS `B_id`, `active_enum`.`category` AS `B_category`, `active_enum`.`color` AS `B_color`, `active_enum`.`tea` AS `B_tea`",
"FROM `active_enum_child`",
"LEFT JOIN `active_enum` ON `active_enum_child`.`parent_id` = `active_enum`.`id`",
]
.join(" ")
);
}
#[cfg(feature = "sqlx-postgres")]
assert_eq!(
_select
.build(DbBackend::Postgres)
.to_string(),
[
r#"SELECT "active_enum_child"."id" AS "A_id", "active_enum_child"."parent_id" AS "A_parent_id", "active_enum_child"."category" AS "A_category", "active_enum_child"."color" AS "A_color", CAST("active_enum_child"."tea" AS text) AS "A_tea","#,
r#""active_enum"."id" AS "B_id", "active_enum"."category" AS "B_category", "active_enum"."color" AS "B_color", CAST("active_enum"."tea" AS text) AS "B_tea""#,
r#"FROM "public"."active_enum_child""#,
r#"LEFT JOIN "public"."active_enum" ON "active_enum_child"."parent_id" = "active_enum"."id""#,
]
.join(" ")
);
}
#[test]
fn active_enum_child_find_linked() {
let active_enum_child_model = active_enum_child::Model {
id: 1,
parent_id: 2,
category: None,
color: None,
tea: None,
};
let _select = active_enum_child_model.find_linked(active_enum_child::ActiveEnumLink);
#[cfg(any(feature = "sqlx-mysql", feature = "sqlx-sqlite"))]
{
assert_eq!(
_select.build(DbBackend::MySql).to_string(),
_select.build(DbBackend::Sqlite).to_string(),
);
assert_eq!(
_select.build(DbBackend::MySql).to_string(),
[
"SELECT `active_enum`.`id`, `active_enum`.`category`, `active_enum`.`color`, `active_enum`.`tea`",
"FROM `active_enum`",
"INNER JOIN `active_enum_child` AS `r0` ON `r0`.`parent_id` = `active_enum`.`id`",
"WHERE `r0`.`id` = 1",
]
.join(" ")
);
}
#[cfg(feature = "sqlx-postgres")]
assert_eq!(
_select.build(DbBackend::Postgres).to_string(),
[
r#"SELECT "active_enum"."id", "active_enum"."category", "active_enum"."color", CAST("active_enum"."tea" AS text)"#,
r#"FROM "public"."active_enum""#,
r#"INNER JOIN "public"."active_enum_child" AS "r0" ON "r0"."parent_id" = "active_enum"."id""#,
r#"WHERE "r0"."id" = 1"#,
]
.join(" ")
);
let _select = ActiveEnumChild::find().find_also_linked(active_enum_child::ActiveEnumLink);
#[cfg(any(feature = "sqlx-mysql", feature = "sqlx-sqlite"))]
{
assert_eq!(
_select.build(DbBackend::MySql).to_string(),
_select.build(DbBackend::Sqlite).to_string(),
);
assert_eq!(
_select
.build(DbBackend::MySql)
.to_string(),
[
"SELECT `active_enum_child`.`id` AS `A_id`, `active_enum_child`.`parent_id` AS `A_parent_id`, `active_enum_child`.`category` AS `A_category`, `active_enum_child`.`color` AS `A_color`, `active_enum_child`.`tea` AS `A_tea`,",
"`r0`.`id` AS `B_id`, `r0`.`category` AS `B_category`, `r0`.`color` AS `B_color`, `r0`.`tea` AS `B_tea`",
"FROM `active_enum_child`",
"LEFT JOIN `active_enum` AS `r0` ON `active_enum_child`.`parent_id` = `r0`.`id`",
]
.join(" ")
);
}
#[cfg(feature = "sqlx-postgres")]
assert_eq!(
_select
.build(DbBackend::Postgres)
.to_string(),
[
r#"SELECT "active_enum_child"."id" AS "A_id", "active_enum_child"."parent_id" AS "A_parent_id", "active_enum_child"."category" AS "A_category", "active_enum_child"."color" AS "A_color", CAST("active_enum_child"."tea" AS text) AS "A_tea","#,
r#""r0"."id" AS "B_id", "r0"."category" AS "B_category", "r0"."color" AS "B_color", CAST("r0"."tea" AS text) AS "B_tea""#,
r#"FROM "public"."active_enum_child""#,
r#"LEFT JOIN "public"."active_enum" AS "r0" ON "active_enum_child"."parent_id" = "r0"."id""#,
]
.join(" ")
);
}
#[test]
fn create_enum_from() {
use sea_orm::{Schema, Statement};
let db_postgres = DbBackend::Postgres;
let schema = Schema::new(db_postgres);
assert_eq!(
schema
.create_enum_from_entity(active_enum::Entity)
.iter()
.map(|stmt| db_postgres.build(stmt))
.collect::<Vec<_>>(),
vec![Statement::from_string(
db_postgres,
r#"CREATE TYPE "tea" AS ENUM ('EverydayTea', 'BreakfastTea')"#.to_owned()
),]
);
assert_eq!(
db_postgres.build(&schema.create_enum_from_active_enum::<Tea>()),
Statement::from_string(
db_postgres,
r#"CREATE TYPE "tea" AS ENUM ('EverydayTea', 'BreakfastTea')"#.to_owned()
)
);
}
}

View File

@ -45,17 +45,17 @@ async fn crud_cake(db: &DbConn) -> Result<(), DbErr> {
..Default::default()
};
let mut apple = apple.save(db).await?;
let mut apple = apple.save(db).await?.into_active_model();
println!();
println!("Inserted: {:?}", apple);
assert_eq!(
apple,
cake::ActiveModel {
id: Set(1),
name: Set("Apple Pie".to_owned()),
},
apple
id: Unchanged(1),
name: Unchanged("Apple Pie".to_owned()),
}
);
apple.name = Set("Lemon Tart".to_owned());
@ -81,7 +81,7 @@ async fn crud_cake(db: &DbConn) -> Result<(), DbErr> {
apple
);
let apple: cake::ActiveModel = apple.unwrap().into();
let apple: cake::Model = apple.unwrap();
let result = apple.delete(db).await?;

View File

@ -52,12 +52,18 @@ pub async fn create_and_update(db: &DatabaseConnection) -> Result<(), DbErr> {
))
);
let update_res = Entity::update(updated_active_model.clone())
let update_res = Entity::update(updated_active_model)
.filter(Column::Id.eq(vec![1, 2, 3]))
.exec(db)
.await?;
assert_eq!(update_res, updated_active_model);
assert_eq!(
update_res,
Model {
id: vec![1, 2, 3],
value: "First Row (Updated)".to_owned(),
}
);
assert_eq!(
Entity::find()

View File

@ -70,15 +70,15 @@ impl ActiveModelBehavior for ActiveModel {
}
}
fn after_save(self, insert: bool) -> Result<Self, DbErr> {
fn after_save(model: Model, insert: bool) -> Result<Model, DbErr> {
use rust_decimal_macros::dec;
if self.price.as_ref() < &dec!(0) {
if model.price < dec!(0) {
Err(DbErr::Custom(format!(
"[after_save] Invalid Price, insert: {}",
insert
)))
} else {
Ok(self)
Ok(model)
}
}

View File

@ -1,7 +1,9 @@
use super::sea_orm_active_enums::*;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(schema_name = "public", table_name = "active_enum")]
#[cfg_attr(feature = "sqlx-postgres", sea_orm(schema_name = "public"))]
#[sea_orm(table_name = "active_enum")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i32,
@ -11,33 +13,27 @@ pub struct Model {
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
pub enum Relation {
#[sea_orm(has_many = "super::active_enum_child::Entity")]
ActiveEnumChild,
}
impl Related<super::active_enum_child::Entity> for Entity {
fn to() -> RelationDef {
Relation::ActiveEnumChild.def()
}
}
pub struct ActiveEnumChildLink;
impl Linked for ActiveEnumChildLink {
type FromEntity = Entity;
type ToEntity = super::active_enum_child::Entity;
fn link(&self) -> Vec<RelationDef> {
vec![Relation::ActiveEnumChild.def()]
}
}
impl ActiveModelBehavior for ActiveModel {}
#[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum)]
#[sea_orm(rs_type = "String", db_type = "String(Some(1))")]
pub enum Category {
#[sea_orm(string_value = "B")]
Big,
#[sea_orm(string_value = "S")]
Small,
}
#[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum)]
#[sea_orm(rs_type = "i32", db_type = "Integer")]
pub enum Color {
#[sea_orm(num_value = 0)]
Black,
#[sea_orm(num_value = 1)]
White,
}
#[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "tea")]
pub enum Tea {
#[sea_orm(string_value = "EverydayTea")]
EverydayTea,
#[sea_orm(string_value = "BreakfastTea")]
BreakfastTea,
}

View File

@ -0,0 +1,44 @@
use super::sea_orm_active_enums::*;
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[cfg_attr(feature = "sqlx-postgres", sea_orm(schema_name = "public"))]
#[sea_orm(table_name = "active_enum_child")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i32,
pub parent_id: i32,
pub category: Option<Category>,
pub color: Option<Color>,
pub tea: Option<Tea>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::active_enum::Entity",
from = "Column::ParentId",
to = "super::active_enum::Column::Id"
)]
ActiveEnum,
}
impl Related<super::active_enum::Entity> for Entity {
fn to() -> RelationDef {
Relation::ActiveEnum.def()
}
}
pub struct ActiveEnumLink;
impl Linked for ActiveEnumLink {
type FromEntity = Entity;
type ToEntity = super::active_enum::Entity;
fn link(&self) -> Vec<RelationDef> {
vec![Relation::ActiveEnum.def()]
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@ -1,15 +1,19 @@
pub mod active_enum;
pub mod active_enum_child;
pub mod applog;
pub mod byte_primary_key;
pub mod metadata;
pub mod repository;
pub mod schema;
pub mod sea_orm_active_enums;
pub mod self_join;
pub use active_enum::Entity as ActiveEnum;
pub use active_enum_child::Entity as ActiveEnumChild;
pub use applog::Entity as Applog;
pub use byte_primary_key::Entity as BytePrimaryKey;
pub use metadata::Entity as Metadata;
pub use repository::Entity as Repository;
pub use schema::*;
pub use sea_orm_active_enums::*;
pub use self_join::Entity as SelfJoin;

View File

@ -4,17 +4,38 @@ use super::*;
use crate::common::setup::{create_enum, create_table, create_table_without_asserts};
use sea_orm::{
error::*, sea_query, ConnectionTrait, DatabaseConnection, DbBackend, DbConn, EntityName,
ExecResult,
ExecResult, Schema,
};
use sea_query::{extension::postgres::Type, Alias, ColumnDef, ForeignKeyCreateStatement};
pub async fn create_tables(db: &DatabaseConnection) -> Result<(), DbErr> {
let db_backend = db.get_database_backend();
create_log_table(db).await?;
create_metadata_table(db).await?;
create_repository_table(db).await?;
create_self_join_table(db).await?;
create_byte_primary_key_table(db).await?;
let create_enum_stmts = match db_backend {
DbBackend::MySql | DbBackend::Sqlite => Vec::new(),
DbBackend::Postgres => {
let schema = Schema::new(db_backend);
let enum_create_stmt = Type::create()
.as_enum(Alias::new("tea"))
.values(vec![Alias::new("EverydayTea"), Alias::new("BreakfastTea")])
.to_owned();
assert_eq!(
db_backend.build(&enum_create_stmt),
db_backend.build(&schema.create_enum_from_active_enum::<Tea>())
);
vec![enum_create_stmt]
}
};
create_enum(db, &create_enum_stmts, ActiveEnum).await?;
create_active_enum_table(db).await?;
create_active_enum_child_table(db).await?;
Ok(())
}
@ -127,18 +148,6 @@ pub async fn create_byte_primary_key_table(db: &DbConn) -> Result<ExecResult, Db
}
pub async fn create_active_enum_table(db: &DbConn) -> Result<ExecResult, DbErr> {
let db_backend = db.get_database_backend();
let create_enum_stmts = match db_backend {
DbBackend::MySql | DbBackend::Sqlite => Vec::new(),
DbBackend::Postgres => vec![Type::create()
.as_enum(Alias::new("tea"))
.values(vec![Alias::new("EverydayTea"), Alias::new("BreakfastTea")])
.to_owned()],
};
create_enum(db, &create_enum_stmts, ActiveEnum).await?;
let create_table_stmt = sea_query::Table::create()
.table(active_enum::Entity.table_ref())
.col(
@ -158,3 +167,37 @@ pub async fn create_active_enum_table(db: &DbConn) -> Result<ExecResult, DbErr>
create_table(db, &create_table_stmt, ActiveEnum).await
}
pub async fn create_active_enum_child_table(db: &DbConn) -> Result<ExecResult, DbErr> {
let create_table_stmt = sea_query::Table::create()
.table(active_enum_child::Entity.table_ref())
.col(
ColumnDef::new(active_enum_child::Column::Id)
.integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(
ColumnDef::new(active_enum_child::Column::ParentId)
.integer()
.not_null(),
)
.col(ColumnDef::new(active_enum_child::Column::Category).string_len(1))
.col(ColumnDef::new(active_enum_child::Column::Color).integer())
.col(
ColumnDef::new(active_enum_child::Column::Tea)
.enumeration("tea", vec!["EverydayTea", "BreakfastTea"]),
)
.foreign_key(
ForeignKeyCreateStatement::new()
.name("fk-active_enum_child-active_enum")
.from_tbl(ActiveEnumChild)
.from_col(active_enum_child::Column::ParentId)
.to_tbl(ActiveEnum)
.to_col(active_enum::Column::Id),
)
.to_owned();
create_table(db, &create_table_stmt, ActiveEnumChild).await
}

View File

@ -0,0 +1,28 @@
use sea_orm::entity::prelude::*;
#[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum)]
#[sea_orm(rs_type = "String", db_type = "String(Some(1))")]
pub enum Category {
#[sea_orm(string_value = "B")]
Big,
#[sea_orm(string_value = "S")]
Small,
}
#[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum)]
#[sea_orm(rs_type = "i32", db_type = "Integer")]
pub enum Color {
#[sea_orm(num_value = 0)]
Black,
#[sea_orm(num_value = 1)]
White,
}
#[derive(Debug, Clone, PartialEq, EnumIter, DeriveActiveEnum)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "tea")]
pub enum Tea {
#[sea_orm(string_value = "EverydayTea")]
EverydayTea,
#[sea_orm(string_value = "BreakfastTea")]
BreakfastTea,
}

View File

@ -43,8 +43,7 @@ pub async fn test_update_cake(db: &DbConn) {
cake_am.name = Set("Extra chocolate mud cake".to_owned());
cake_am.price = Set(dec!(20.00));
let _cake_update_res: cake::ActiveModel =
cake_am.update(db).await.expect("could not update cake");
let _cake_update_res: cake::Model = cake_am.update(db).await.expect("could not update cake");
let cake: Option<cake::Model> = Cake::find_by_id(cake_insert_res.last_insert_id)
.one(db)
@ -80,7 +79,7 @@ pub async fn test_update_bakery(db: &DbConn) {
bakery_am.name = Set("SeaBreeze Bakery".to_owned());
bakery_am.profit_margin = Set(12.00);
let _bakery_update_res: bakery::ActiveModel =
let _bakery_update_res: bakery::Model =
bakery_am.update(db).await.expect("could not update bakery");
let bakery: Option<bakery::Model> = Bakery::find_by_id(bakery_insert_res.last_insert_id)
@ -109,13 +108,13 @@ pub async fn test_update_deleted_customer(db: &DbConn) {
init_n_customers + 1
);
let customer_id = customer.id.clone();
let customer_id = customer.id;
let _ = customer.delete(db).await;
assert_eq!(Customer::find().count(db).await.unwrap(), init_n_customers);
let customer = customer::ActiveModel {
id: customer_id.clone(),
id: Set(customer_id),
name: Set("John 2".to_owned()),
..Default::default()
};
@ -131,7 +130,7 @@ pub async fn test_update_deleted_customer(db: &DbConn) {
assert_eq!(Customer::find().count(db).await.unwrap(), init_n_customers);
let customer: Option<customer::Model> = Customer::find_by_id(customer_id.clone().unwrap())
let customer: Option<customer::Model> = Customer::find_by_id(customer_id)
.one(db)
.await
.expect("could not find customer");

View File

@ -43,7 +43,7 @@ pub async fn find_one_with_result() {
let result = Bakery::find().one(&ctx.db).await.unwrap().unwrap();
assert_eq!(result.id, bakery.id.unwrap());
assert_eq!(result.id, bakery.id);
ctx.delete().await;
}
@ -83,13 +83,13 @@ pub async fn find_by_id_with_result() {
.await
.expect("could not insert bakery");
let result = Bakery::find_by_id(bakery.id.clone().unwrap())
let result = Bakery::find_by_id(bakery.id.clone())
.one(&ctx.db)
.await
.unwrap()
.unwrap();
assert_eq!(result.id, bakery.id.unwrap());
assert_eq!(result.id, bakery.id);
ctx.delete().await;
}

View File

@ -35,7 +35,7 @@ pub async fn left_join() {
"home": "0395555555",
"address": "12 Test St, Testville, Vic, Australia"
})),
bakery_id: Set(Some(bakery.id.clone().unwrap())),
bakery_id: Set(Some(bakery.id.clone())),
..Default::default()
}
.save(&ctx.db)
@ -71,6 +71,7 @@ pub async fn left_join() {
.await
.unwrap()
.unwrap();
assert_eq!(result.name.as_str(), "Baker 1");
assert_eq!(result.bakery_name, Some("SeaSide Bakery".to_string()));
let select = baker::Entity::find()
@ -123,8 +124,8 @@ pub async fn right_join() {
.expect("could not insert customer");
let _order = order::ActiveModel {
bakery_id: Set(bakery.id.clone().unwrap()),
customer_id: Set(customer_kate.id.clone().unwrap()),
bakery_id: Set(bakery.id.clone()),
customer_id: Set(customer_kate.id.clone()),
total: Set(dec!(15.10)),
placed_at: Set(Utc::now().naive_utc()),
@ -209,8 +210,8 @@ pub async fn inner_join() {
.expect("could not insert customer");
let kate_order_1 = order::ActiveModel {
bakery_id: Set(bakery.id.clone().unwrap()),
customer_id: Set(customer_kate.id.clone().unwrap()),
bakery_id: Set(bakery.id.clone()),
customer_id: Set(customer_kate.id.clone()),
total: Set(dec!(15.10)),
placed_at: Set(Utc::now().naive_utc()),
@ -221,8 +222,8 @@ pub async fn inner_join() {
.expect("could not insert order");
let kate_order_2 = order::ActiveModel {
bakery_id: Set(bakery.id.clone().unwrap()),
customer_id: Set(customer_kate.id.clone().unwrap()),
bakery_id: Set(bakery.id.clone()),
customer_id: Set(customer_kate.id.clone()),
total: Set(dec!(100.00)),
placed_at: Set(Utc::now().naive_utc()),
@ -253,12 +254,12 @@ pub async fn inner_join() {
assert_eq!(results.len(), 2);
assert!((&results)
.into_iter()
.any(|result| result.name == customer_kate.name.clone().unwrap()
&& result.order_total == Some(kate_order_1.total.clone().unwrap())));
.any(|result| result.name == customer_kate.name.clone()
&& result.order_total == Some(kate_order_1.total.clone())));
assert!((&results)
.into_iter()
.any(|result| result.name == customer_kate.name.clone().unwrap()
&& result.order_total == Some(kate_order_2.total.clone().unwrap())));
.any(|result| result.name == customer_kate.name.clone()
&& result.order_total == Some(kate_order_2.total.clone())));
ctx.delete().await;
}
@ -291,8 +292,8 @@ pub async fn group_by() {
.expect("could not insert customer");
let kate_order_1 = order::ActiveModel {
bakery_id: Set(bakery.id.clone().unwrap()),
customer_id: Set(customer_kate.id.clone().unwrap()),
bakery_id: Set(bakery.id.clone()),
customer_id: Set(customer_kate.id.clone()),
total: Set(dec!(99.95)),
placed_at: Set(Utc::now().naive_utc()),
@ -303,8 +304,8 @@ pub async fn group_by() {
.expect("could not insert order");
let kate_order_2 = order::ActiveModel {
bakery_id: Set(bakery.id.clone().unwrap()),
customer_id: Set(customer_kate.id.clone().unwrap()),
bakery_id: Set(bakery.id.clone()),
customer_id: Set(customer_kate.id.clone()),
total: Set(dec!(200.00)),
placed_at: Set(Utc::now().naive_utc()),
@ -340,30 +341,19 @@ pub async fn group_by() {
.unwrap()
.unwrap();
assert_eq!(result.name.as_str(), "Kate");
assert_eq!(result.number_orders, Some(2));
assert_eq!(
result.total_spent,
Some(kate_order_1.total.clone().unwrap() + kate_order_2.total.clone().unwrap())
Some(kate_order_1.total.clone() + kate_order_2.total.clone())
);
assert_eq!(
result.min_spent,
Some(
kate_order_1
.total
.clone()
.unwrap()
.min(kate_order_2.total.clone().unwrap())
)
Some(kate_order_1.total.clone().min(kate_order_2.total.clone()))
);
assert_eq!(
result.max_spent,
Some(
kate_order_1
.total
.clone()
.unwrap()
.max(kate_order_2.total.clone().unwrap())
)
Some(kate_order_1.total.clone().max(kate_order_2.total.clone()))
);
ctx.delete().await;
}
@ -397,8 +387,8 @@ pub async fn having() {
.expect("could not insert customer");
let kate_order_1 = order::ActiveModel {
bakery_id: Set(bakery.id.clone().unwrap()),
customer_id: Set(customer_kate.id.clone().unwrap()),
bakery_id: Set(bakery.id.clone()),
customer_id: Set(customer_kate.id.clone()),
total: Set(dec!(100.00)),
placed_at: Set(Utc::now().naive_utc()),
@ -409,8 +399,8 @@ pub async fn having() {
.expect("could not insert order");
let _kate_order_2 = order::ActiveModel {
bakery_id: Set(bakery.id.clone().unwrap()),
customer_id: Set(customer_kate.id.clone().unwrap()),
bakery_id: Set(bakery.id.clone()),
customer_id: Set(customer_kate.id.clone()),
total: Set(dec!(12.00)),
placed_at: Set(Utc::now().naive_utc()),
@ -429,8 +419,8 @@ pub async fn having() {
.expect("could not insert customer");
let _bob_order_1 = order::ActiveModel {
bakery_id: Set(bakery.id.clone().unwrap()),
customer_id: Set(customer_bob.id.clone().unwrap()),
bakery_id: Set(bakery.id.clone()),
customer_id: Set(customer_bob.id.clone()),
total: Set(dec!(50.0)),
placed_at: Set(Utc::now().naive_utc()),
@ -441,8 +431,8 @@ pub async fn having() {
.expect("could not insert order");
let _bob_order_2 = order::ActiveModel {
bakery_id: Set(bakery.id.clone().unwrap()),
customer_id: Set(customer_bob.id.clone().unwrap()),
bakery_id: Set(bakery.id.clone()),
customer_id: Set(customer_bob.id.clone()),
total: Set(dec!(50.0)),
placed_at: Set(Utc::now().naive_utc()),
@ -472,11 +462,8 @@ pub async fn having() {
.unwrap();
assert_eq!(results.len(), 1);
assert_eq!(results[0].name, customer_kate.name.clone().unwrap());
assert_eq!(
results[0].order_total,
Some(kate_order_1.total.clone().unwrap())
);
assert_eq!(results[0].name, customer_kate.name.clone());
assert_eq!(results[0].order_total, Some(kate_order_1.total.clone()));
ctx.delete().await;
}

View File

@ -42,7 +42,7 @@ async fn seed_data(db: &DatabaseConnection) {
let baker_1 = baker::ActiveModel {
name: Set("Baker 1".to_owned()),
contact_details: Set(serde_json::json!({})),
bakery_id: Set(Some(bakery.id.clone().unwrap())),
bakery_id: Set(Some(bakery.id.clone())),
..Default::default()
}
.save(db)
@ -52,7 +52,7 @@ async fn seed_data(db: &DatabaseConnection) {
let _baker_2 = baker::ActiveModel {
name: Set("Baker 2".to_owned()),
contact_details: Set(serde_json::json!({})),
bakery_id: Set(Some(bakery.id.clone().unwrap())),
bakery_id: Set(Some(bakery.id.clone())),
..Default::default()
}
.save(db)
@ -64,7 +64,7 @@ async fn seed_data(db: &DatabaseConnection) {
price: Set(dec!(10.25)),
gluten_free: Set(false),
serial: Set(Uuid::new_v4()),
bakery_id: Set(Some(bakery.id.clone().unwrap())),
bakery_id: Set(Some(bakery.id.clone())),
..Default::default()
};
@ -75,7 +75,7 @@ async fn seed_data(db: &DatabaseConnection) {
let cake_baker = cakes_bakers::ActiveModel {
cake_id: Set(cake_insert_res.last_insert_id as i32),
baker_id: Set(baker_1.id.clone().unwrap()),
baker_id: Set(baker_1.id.clone()),
..Default::default()
};
@ -97,8 +97,8 @@ async fn seed_data(db: &DatabaseConnection) {
.expect("could not insert customer");
let kate_order_1 = order::ActiveModel {
bakery_id: Set(bakery.id.clone().unwrap()),
customer_id: Set(customer_kate.id.clone().unwrap()),
bakery_id: Set(bakery.id.clone()),
customer_id: Set(customer_kate.id.clone()),
total: Set(dec!(99.95)),
placed_at: Set(Utc::now().naive_utc()),
@ -112,7 +112,7 @@ async fn seed_data(db: &DatabaseConnection) {
cake_id: Set(cake_insert_res.last_insert_id as i32),
price: Set(dec!(10.00)),
quantity: Set(12),
order_id: Set(kate_order_1.id.clone().unwrap()),
order_id: Set(kate_order_1.id.clone()),
..Default::default()
}
.save(db)
@ -123,7 +123,7 @@ async fn seed_data(db: &DatabaseConnection) {
cake_id: Set(cake_insert_res.last_insert_id as i32),
price: Set(dec!(50.00)),
quantity: Set(2),
order_id: Set(kate_order_1.id.clone().unwrap()),
order_id: Set(kate_order_1.id.clone()),
..Default::default()
}
.save(db)
@ -243,7 +243,7 @@ async fn create_order(db: &DatabaseConnection, cake: cake::Model) {
let order = order::ActiveModel {
bakery_id: Set(cake.bakery_id.unwrap()),
customer_id: Set(another_customer.id.clone().unwrap()),
customer_id: Set(another_customer.id.clone()),
total: Set(dec!(200.00)),
placed_at: Set(Utc::now().naive_utc()),
@ -257,7 +257,7 @@ async fn create_order(db: &DatabaseConnection, cake: cake::Model) {
cake_id: Set(cake.id),
price: Set(dec!(10.00)),
quantity: Set(300),
order_id: Set(order.id.clone().unwrap()),
order_id: Set(order.id.clone()),
..Default::default()
}
.save(db)

View File

@ -24,14 +24,14 @@ pub async fn stream() -> Result<(), DbErr> {
.save(&ctx.db)
.await?;
let result = Bakery::find_by_id(bakery.id.clone().unwrap())
let result = Bakery::find_by_id(bakery.id.clone())
.stream(&ctx.db)
.await?
.next()
.await
.unwrap()?;
assert_eq!(result.id, bakery.id.unwrap());
assert_eq!(result.id, bakery.id);
ctx.delete().await;

View File

@ -29,9 +29,17 @@ pub async fn insert_repository(db: &DatabaseConnection) -> Result<(), DbErr> {
}
.into_active_model();
let result = repository.clone().insert(db).await?;
let result = repository.insert(db).await?;
assert_eq!(repository, result);
assert_eq!(
result,
repository::Model {
id: "unique-id-001".to_owned(),
owner: "GC".to_owned(),
name: "G.C.".to_owned(),
description: None,
}
);
Ok(())
}
@ -69,12 +77,20 @@ pub async fn create_and_update_repository(db: &DatabaseConnection) -> Result<(),
))
);
let update_res = Repository::update(updated_active_model.clone())
let update_res = Repository::update(updated_active_model)
.filter(repository::Column::Id.eq("unique-id-002".to_owned()))
.exec(db)
.await?;
assert_eq!(update_res, updated_active_model);
assert_eq!(
update_res,
repository::Model {
id: "unique-id-002".to_owned(),
owner: "GC".to_owned(),
name: "G.C.".to_owned(),
description: Some("description...".to_owned()),
}
);
let updated_active_model = repository::ActiveModel {
description: Set(None),
@ -86,7 +102,15 @@ pub async fn create_and_update_repository(db: &DatabaseConnection) -> Result<(),
.exec(db)
.await?;
assert_eq!(update_res, updated_active_model);
assert_eq!(
update_res,
repository::Model {
id: "unique-id-002".to_owned(),
owner: "GC".to_owned(),
name: "G.C.".to_owned(),
description: None,
}
);
Ok(())
}

View File

@ -4,7 +4,11 @@ pub use common::{features::*, setup::*, TestContext};
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel};
#[sea_orm_macros::test]
#[cfg(feature = "sqlx-postgres")]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("bakery_chain_schema_timestamp_tests").await;
create_tables(&ctx.db).await?;

View File

@ -28,12 +28,11 @@ pub async fn insert_metadata(db: &DatabaseConnection) -> Result<(), DbErr> {
bytes: vec![1, 2, 3],
date: Some(Date::from_ymd(2021, 9, 27)),
time: Some(Time::from_hms(11, 32, 55)),
}
.into_active_model();
};
let result = metadata.clone().insert(db).await?;
let result = metadata.clone().into_active_model().insert(db).await?;
assert_eq!(metadata, result);
assert_eq!(result, metadata);
Ok(())
}