diff --git a/Cargo.toml b/Cargo.toml index 6e1a3d34..11d0d7eb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -31,10 +31,11 @@ sea-orm-macros = { path = "sea-orm-macros", optional = true } serde = { version = "^1.0", features = [ "derive" ] } sqlx = { version = "^0.5", optional = true } strum = { version = "^0.20", features = [ "derive" ] } +serde_json = { version = "^1.0.64", optional = true } [features] debug-print = [] -default = [ "macros", "sqlx-mysql", "runtime-async-std-native-tls" ] +default = [ "macros", "sqlx-mysql", "runtime-async-std-native-tls", "serialize-query-result" ] sqlx-dep = [ "sqlx" ] sqlx-mysql = [ "sqlx-dep", "sea-query/sqlx-mysql", "sqlx/mysql" ] sqlx-postgres = [ "sqlx-dep", "sea-query/sqlx-postgres", "sqlx/postgres" ] @@ -45,3 +46,4 @@ runtime-actix-rustls = [ "sqlx/runtime-actix-rustls" ] runtime-async-std-rustls = [ "sqlx/runtime-async-std-rustls" ] runtime-tokio-rustls = [ "sqlx/runtime-tokio-rustls" ] macros = [ "sea-orm-macros" ] +serialize-query-result = [ "serde_json" ] diff --git a/examples/sqlx-mysql/Cargo.toml b/examples/sqlx-mysql/Cargo.toml index d2516bc7..ed22ec9d 100644 --- a/examples/sqlx-mysql/Cargo.toml +++ b/examples/sqlx-mysql/Cargo.toml @@ -8,4 +8,5 @@ publish = false async-std = { version = "^1.9", features = [ "attributes" ] } sea-orm = { path = "../../", features = [ "sqlx-mysql", "runtime-async-std-native-tls", "debug-print" ] } # sea-query = { path = "../../../sea-query" } -strum = { version = "^0.20", features = [ "derive" ] } \ No newline at end of file +strum = { version = "^0.20", features = [ "derive" ] } +serde = { version = "^1.0", features = [ "derive" ] } \ No newline at end of file diff --git a/examples/sqlx-mysql/src/main.rs b/examples/sqlx-mysql/src/main.rs index 6061335a..d21af5da 100644 --- a/examples/sqlx-mysql/src/main.rs +++ b/examples/sqlx-mysql/src/main.rs @@ -39,6 +39,22 @@ async fn main() { println!("===== =====\n"); find_many_to_many(&db).await.unwrap(); + + println!("===== =====\n"); + + find_all_json(&db).await.unwrap(); + + println!("===== =====\n"); + + find_one_json(&db).await.unwrap(); + + println!("===== =====\n"); + + find_together_json(&db).await.unwrap(); + + println!("===== =====\n"); + + count_fruits_by_cake_json(&db).await.unwrap(); } async fn find_all(db: &Database) -> Result<(), QueryErr> { @@ -174,3 +190,81 @@ async fn find_many_to_many(db: &Database) -> Result<(), QueryErr> { Ok(()) } + +async fn find_all_json(db: &Database) -> Result<(), QueryErr> { + print!("find all cakes: "); + + let cakes = cake::Entity::find().as_json().all(db).await?; + + println!("\n{:#?}\n", cakes); + + print!("find all fruits: "); + + let fruits = fruit::Entity::find().as_json().all(db).await?; + + println!("\n{:#?}\n", fruits); + + Ok(()) +} + +async fn find_one_json(db: &Database) -> Result<(), QueryErr> { + print!("find one by primary key: "); + + let cheese = cake::Entity::find_by(1).as_json().one(db).await?; + + println!("\n{:#?}\n", cheese); + + print!("find one by like: "); + + let chocolate = cake::Entity::find() + .filter(cake::Column::Name.contains("chocolate")) + .as_json() + .one(db) + .await?; + + println!("\n{:#?}\n", chocolate); + + Ok(()) +} + +async fn find_together_json(db: &Database) -> Result<(), QueryErr> { + print!("find cakes and fruits: "); + + let cakes_fruits = cake::Entity::find() + .left_join_and_select(fruit::Entity) + .as_json() + .all(db) + .await?; + + println!("\n{:#?}\n", cakes_fruits); + + print!("find one cake and fruit: "); + + let cake_fruit = cake::Entity::find() + .left_join_and_select(fruit::Entity) + .as_json() + .one(db) + .await?; + + println!("\n{:#?}\n", cake_fruit); + + Ok(()) +} + +async fn count_fruits_by_cake_json(db: &Database) -> Result<(), QueryErr> { + print!("count fruits by cake: "); + + let count = cake::Entity::find() + .left_join(fruit::Entity) + .select_only() + .column(cake::Column::Name) + .column_as(fruit::Column::Id.count(), "num_of_fruits") + .group_by(cake::Column::Name) + .as_json() + .all(db) + .await?; + + println!("\n{:#?}\n", count); + + Ok(()) +} diff --git a/src/connector/mod.rs b/src/connector/mod.rs index 0f5c0e54..7240ac7f 100644 --- a/src/connector/mod.rs +++ b/src/connector/mod.rs @@ -1,7 +1,13 @@ mod select; +#[cfg(feature = "serialize-query-result")] +mod select_json; + pub use select::*; +#[cfg(feature = "serialize-query-result")] +pub use select_json::*; + use crate::{DatabaseConnection, QueryResult, Statement, TypeErr}; use async_trait::async_trait; use std::{error::Error, fmt}; diff --git a/src/connector/select.rs b/src/connector/select.rs index 810e93cd..099f77c5 100644 --- a/src/connector/select.rs +++ b/src/connector/select.rs @@ -5,6 +5,9 @@ use crate::{ use sea_query::{QueryBuilder, SelectStatement}; use std::marker::PhantomData; +#[cfg(feature = "serialize-query-result")] +use super::select_json::{SelectJson, SelectTwoJson}; + #[derive(Clone, Debug)] pub struct SelectModel where @@ -38,6 +41,13 @@ where } } + #[cfg(feature = "serialize-query-result")] + pub fn as_json(self) -> SelectJson { + SelectJson { + query: self.query, + } + } + pub async fn one(self, db: &Database) -> Result { self.into_model::().one(db).await } @@ -63,6 +73,13 @@ where } } + #[cfg(feature = "serialize-query-result")] + pub fn as_json(self) -> SelectTwoJson { + SelectTwoJson { + query: self.query, + } + } + pub async fn one(self, db: &Database) -> Result<(E::Model, F::Model), QueryErr> { self.into_model::().one(db).await } diff --git a/src/connector/select_json.rs b/src/connector/select_json.rs new file mode 100644 index 00000000..0532bb9c --- /dev/null +++ b/src/connector/select_json.rs @@ -0,0 +1,75 @@ +use crate::query::combine; +use crate::{Connection, Database, QueryErr, Statement}; +use sea_query::{QueryBuilder, SelectStatement}; +use serde_json::Value as JsonValue; + +#[derive(Clone, Debug)] +pub struct SelectJson { + pub(crate) query: SelectStatement, +} + +impl SelectJson { + pub fn build(&self, builder: B) -> Statement + where + B: QueryBuilder, + { + self.query.build(builder).into() + } + + pub async fn one(mut self, db: &Database) -> Result { + let builder = db.get_query_builder_backend(); + self.query.limit(1); + // TODO: Error handling + db.get_connection().query_one(self.build(builder)).await?.as_json("").map_err(|_e| QueryErr) + } + + pub async fn all(self, db: &Database) -> Result { + let builder = db.get_query_builder_backend(); + let rows = db.get_connection().query_all(self.build(builder)).await?; + let mut values = Vec::new(); + for row in rows.into_iter() { + // TODO: Error handling + values.push(row.as_json("").map_err(|_e| QueryErr)?); + } + Ok(JsonValue::Array(values)) + } +} + +#[derive(Clone, Debug)] +pub struct SelectTwoJson { + pub(crate) query: SelectStatement, +} + +impl SelectTwoJson { + pub fn build(&self, builder: B) -> Statement + where + B: QueryBuilder, + { + self.query.build(builder).into() + } + + pub async fn one(mut self, db: &Database) -> Result { + let builder = db.get_query_builder_backend(); + self.query.limit(1); + let row = db.get_connection().query_one(self.build(builder)).await?; + Ok(JsonValue::Array(vec![ + // TODO: Error handling + row.as_json(combine::SELECT_A).map_err(|_e| QueryErr)?, + row.as_json(combine::SELECT_B).map_err(|_e| QueryErr)?, + ])) + } + + pub async fn all(self, db: &Database) -> Result { + let builder = db.get_query_builder_backend(); + let rows = db.get_connection().query_all(self.build(builder)).await?; + let mut json_values = Vec::new(); + for row in rows.into_iter() { + json_values.push(JsonValue::Array(vec![ + // TODO: Error handling + row.as_json(combine::SELECT_A).map_err(|_e| QueryErr)?, + row.as_json(combine::SELECT_B).map_err(|_e| QueryErr)?, + ])); + } + Ok(JsonValue::Array(json_values)) + } +} diff --git a/src/query/result.rs b/src/query/result.rs index 713fbb87..750a7e33 100644 --- a/src/query/result.rs +++ b/src/query/result.rs @@ -75,6 +75,46 @@ impl QueryResult { { T::try_get(self, pre, col) } + + #[cfg(feature = "serialize-query-result")] + pub fn as_json(&self, pre: &str) -> Result { + use serde_json::{Value, Map, json}; + match &self.row { + QueryResultRow::SqlxMySql(row) => { + use sqlx::{Row, Column, Type, MySql}; + let mut map = Map::new(); + for column in row.columns() { + let col = if !column.name().starts_with(pre) { + continue + } else { + column.name().replacen(pre, "", 1) + }; + let col_type = column.type_info(); + macro_rules! match_mysql_type { + ( $type: ty ) => { + if <$type as Type>::type_info().eq(col_type) { + map.insert(col.to_owned(), json!(self.try_get::<$type>(pre, &col)?)); + continue + } + }; + } + match_mysql_type!(bool); + match_mysql_type!(i8); + match_mysql_type!(i16); + match_mysql_type!(i32); + match_mysql_type!(i64); + match_mysql_type!(u8); + match_mysql_type!(u16); + match_mysql_type!(u32); + match_mysql_type!(u64); + match_mysql_type!(f32); + match_mysql_type!(f64); + match_mysql_type!(String); + } + Ok(Value::Object(map)) + }, + } + } } // TypeErr //