Bump version of sqlx, sea-query, sea-schema, time and uuid (#834)

* Bump version of sqlx, sea-query, sea-schema, time and uuid

* PostgreSQL `u32` was wrapped in `Oid` (launchbadge/sqlx#1602)

* Update test cases

* Fix clippy warnings

* cargo fmt

* Fix clippy warnings

* Bump sea-schema to ^0.9.2

* Update test cases

* Pin on sea-query minor version
This commit is contained in:
Billy Chan 2022-07-05 01:05:11 +08:00 committed by GitHub
parent 1c4acf1402
commit d6831e5295
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 112 additions and 69 deletions

View File

@ -25,19 +25,19 @@ path = "src/lib.rs"
async-stream = { version = "^0.3" } async-stream = { version = "^0.3" }
async-trait = { version = "^0.1" } async-trait = { version = "^0.1" }
chrono = { version = "^0", optional = true } chrono = { version = "^0", optional = true }
time = { version = "^0.2", optional = true } time = { version = "^0.3", optional = true }
futures = { version = "^0.3" } futures = { version = "^0.3" }
futures-util = { version = "^0.3" } futures-util = { version = "^0.3" }
log = { version = "^0" } log = { version = "^0" }
tracing = { version = "0.1", features = ["log"] } tracing = { version = "0.1", features = ["log"] }
rust_decimal = { version = "^1", optional = true } rust_decimal = { version = "^1", optional = true }
sea-orm-macros = { version = "^0.8.0", path = "sea-orm-macros", optional = true } sea-orm-macros = { version = "^0.8.0", path = "sea-orm-macros", optional = true }
sea-query = { version = "^0.24.5", features = ["thread-safe"] } sea-query = { version = "^0.26", features = ["thread-safe"] }
sea-strum = { version = "^0.23", features = ["derive", "sea-orm"] } sea-strum = { version = "^0.23", features = ["derive", "sea-orm"] }
serde = { version = "^1.0", features = ["derive"] } serde = { version = "^1.0", features = ["derive"] }
serde_json = { version = "^1", optional = true } serde_json = { version = "^1", optional = true }
sqlx = { version = "^0.5", optional = true } sqlx = { version = "^0.6", optional = true }
uuid = { version = "0.8", features = ["serde", "v4"], optional = true } uuid = { version = "^1", features = ["serde", "v4"], optional = true }
ouroboros = "0.15" ouroboros = "0.15"
url = "^2.2" url = "^2.2"
once_cell = "1.8" once_cell = "1.8"
@ -53,6 +53,7 @@ rust_decimal_macros = { version = "^1" }
tracing-subscriber = { version = "0.3", features = ["env-filter"] } tracing-subscriber = { version = "0.3", features = ["env-filter"] }
sea-orm = { path = ".", features = ["mock", "debug-print"] } sea-orm = { path = ".", features = ["mock", "debug-print"] }
pretty_assertions = { version = "^0.7" } pretty_assertions = { version = "^0.7" }
time = { version = "^0.3", features = ["macros"] }
[features] [features]
debug-print = [] debug-print = []

View File

@ -33,8 +33,8 @@ clap = { version = "^3.2", features = ["env", "derive"] }
dotenv = { version = "^0.15" } dotenv = { version = "^0.15" }
async-std = { version = "^1.9", features = [ "attributes", "tokio1" ] } async-std = { version = "^1.9", features = [ "attributes", "tokio1" ] }
sea-orm-codegen = { version = "^0.8.0", path = "../sea-orm-codegen", optional = true } sea-orm-codegen = { version = "^0.8.0", path = "../sea-orm-codegen", optional = true }
sea-schema = { version = "^0.8.1" } sea-schema = { version = "^0.9.2" }
sqlx = { version = "^0.5", default-features = false, features = [ "mysql", "postgres" ], optional = true } sqlx = { version = "^0.6", default-features = false, features = [ "mysql", "postgres" ], optional = true }
tracing-subscriber = { version = "0.3", features = ["env-filter"] } tracing-subscriber = { version = "0.3", features = ["env-filter"] }
tracing = { version = "0.1" } tracing = { version = "0.1" }
url = "^2.2" url = "^2.2"

View File

@ -15,7 +15,7 @@ name = "sea_orm_codegen"
path = "src/lib.rs" path = "src/lib.rs"
[dependencies] [dependencies]
sea-query = { version = "^0.24.0" } sea-query = { version = "^0.26.0" }
syn = { version = "^1", default-features = false, features = [ syn = { version = "^1", default-features = false, features = [
"derive", "derive",
"parsing", "parsing",

View File

@ -213,7 +213,7 @@ mod tests {
use crate::Column; use crate::Column;
use proc_macro2::TokenStream; use proc_macro2::TokenStream;
use quote::quote; use quote::quote;
use sea_query::{Alias, ColumnDef, ColumnType, SeaRc}; use sea_query::{Alias, BlobSize, ColumnDef, ColumnType, SeaRc};
fn setup() -> Vec<Column> { fn setup() -> Vec<Column> {
macro_rules! make_col { macro_rules! make_col {
@ -243,7 +243,7 @@ mod tests {
make_col!("CakeFillingId", ColumnType::BigUnsigned(Some(12))), make_col!("CakeFillingId", ColumnType::BigUnsigned(Some(12))),
make_col!("cake-filling-id", ColumnType::Float(None)), make_col!("cake-filling-id", ColumnType::Float(None)),
make_col!("CAKE_FILLING_ID", ColumnType::Double(None)), make_col!("CAKE_FILLING_ID", ColumnType::Double(None)),
make_col!("CAKE-FILLING-ID", ColumnType::Binary(None)), make_col!("CAKE-FILLING-ID", ColumnType::Binary(BlobSize::Blob(None))),
make_col!("CAKE", ColumnType::Boolean), make_col!("CAKE", ColumnType::Boolean),
make_col!("date", ColumnType::Date), make_col!("date", ColumnType::Date),
make_col!("time", ColumnType::Time(None)), make_col!("time", ColumnType::Time(None)),

View File

@ -1,8 +1,7 @@
use proc_macro2::{Ident, TokenStream}; use proc_macro2::{Ident, TokenStream};
use quote::{format_ident, quote, quote_spanned}; use quote::quote;
use syn::{ext::IdentExt, Data, DataStruct, Field, Fields};
pub fn expand_derive_from_json_query_result(ident: Ident, data: Data) -> syn::Result<TokenStream> { pub fn expand_derive_from_json_query_result(ident: Ident) -> syn::Result<TokenStream> {
Ok(quote!( Ok(quote!(
#[automatically_derived] #[automatically_derived]
impl sea_orm::TryGetableFromJson for #ident {} impl sea_orm::TryGetableFromJson for #ident {}

View File

@ -611,9 +611,9 @@ pub fn derive_migration_name(input: TokenStream) -> TokenStream {
#[proc_macro_derive(FromJsonQueryResult)] #[proc_macro_derive(FromJsonQueryResult)]
pub fn derive_from_json_query_result(input: TokenStream) -> TokenStream { pub fn derive_from_json_query_result(input: TokenStream) -> TokenStream {
let DeriveInput { ident, data, .. } = parse_macro_input!(input); let DeriveInput { ident, .. } = parse_macro_input!(input);
match derives::expand_derive_from_json_query_result(ident, data) { match derives::expand_derive_from_json_query_result(ident) {
Ok(ts) => ts.into(), Ok(ts) => ts.into(),
Err(e) => e.to_compile_error().into(), Err(e) => e.to_compile_error().into(),
} }

View File

@ -23,7 +23,7 @@ clap = { version = "^3.2", features = ["env", "derive"] }
dotenv = { version = "^0.15" } dotenv = { version = "^0.15" }
sea-orm = { version = "^0.8.0", path = "../", default-features = false, features = ["macros"] } sea-orm = { version = "^0.8.0", path = "../", default-features = false, features = ["macros"] }
sea-orm-cli = { version = "^0.8.1", path = "../sea-orm-cli", default-features = false } sea-orm-cli = { version = "^0.8.1", path = "../sea-orm-cli", default-features = false }
sea-schema = { version = "^0.8.1" } sea-schema = { version = "^0.9.2" }
tracing = { version = "0.1", features = ["log"] } tracing = { version = "0.1", features = ["log"] }
tracing-subscriber = { version = "0.3", features = ["env-filter"] } tracing-subscriber = { version = "0.3", features = ["env-filter"] }

View File

@ -132,7 +132,7 @@ impl ConnectOptions {
opt = opt.min_connections(min_connections); opt = opt.min_connections(min_connections);
} }
if let Some(connect_timeout) = self.connect_timeout { if let Some(connect_timeout) = self.connect_timeout {
opt = opt.connect_timeout(connect_timeout); opt = opt.acquire_timeout(connect_timeout);
} }
if let Some(idle_timeout) = self.idle_timeout { if let Some(idle_timeout) = self.idle_timeout {
opt = opt.idle_timeout(Some(idle_timeout)); opt = opt.idle_timeout(Some(idle_timeout));

View File

@ -54,7 +54,7 @@ impl<'a> Drop for MetricStream<'a> {
fn drop(&mut self) { fn drop(&mut self) {
if let (Some(callback), Some(elapsed)) = (self.metric_callback.as_deref(), self.elapsed) { if let (Some(callback), Some(elapsed)) = (self.metric_callback.as_deref(), self.elapsed) {
let info = crate::metric::Info { let info = crate::metric::Info {
elapsed: elapsed, elapsed,
statement: self.stmt, statement: self.stmt,
failed: false, failed: false,
}; };

View File

@ -244,7 +244,7 @@ pub trait ColumnTrait: IdenStatic + Iterable + FromStr {
/// ``` /// ```
fn starts_with(&self, s: &str) -> SimpleExpr { fn starts_with(&self, s: &str) -> SimpleExpr {
let pattern = format!("{}%", s); let pattern = format!("{}%", s);
Expr::tbl(self.entity_name(), *self).like(&pattern) Expr::tbl(self.entity_name(), *self).like(pattern)
} }
/// ``` /// ```
@ -260,7 +260,7 @@ pub trait ColumnTrait: IdenStatic + Iterable + FromStr {
/// ``` /// ```
fn ends_with(&self, s: &str) -> SimpleExpr { fn ends_with(&self, s: &str) -> SimpleExpr {
let pattern = format!("%{}", s); let pattern = format!("%{}", s);
Expr::tbl(self.entity_name(), *self).like(&pattern) Expr::tbl(self.entity_name(), *self).like(pattern)
} }
/// ``` /// ```
@ -276,7 +276,7 @@ pub trait ColumnTrait: IdenStatic + Iterable + FromStr {
/// ``` /// ```
fn contains(&self, s: &str) -> SimpleExpr { fn contains(&self, s: &str) -> SimpleExpr {
let pattern = format!("%{}%", s); let pattern = format!("%{}%", s);
Expr::tbl(self.entity_name(), *self).like(&pattern) Expr::tbl(self.entity_name(), *self).like(pattern)
} }
bind_func_no_params!(max); bind_func_no_params!(max);
@ -382,7 +382,7 @@ impl From<ColumnType> for sea_query::ColumnType {
ColumnType::TimestampWithTimeZone => sea_query::ColumnType::TimestampWithTimeZone(None), ColumnType::TimestampWithTimeZone => sea_query::ColumnType::TimestampWithTimeZone(None),
ColumnType::Time => sea_query::ColumnType::Time(None), ColumnType::Time => sea_query::ColumnType::Time(None),
ColumnType::Date => sea_query::ColumnType::Date, ColumnType::Date => sea_query::ColumnType::Date,
ColumnType::Binary => sea_query::ColumnType::Binary(None), ColumnType::Binary => sea_query::ColumnType::Binary(sea_query::BlobSize::Blob(None)),
ColumnType::Boolean => sea_query::ColumnType::Boolean, ColumnType::Boolean => sea_query::ColumnType::Boolean,
ColumnType::Money(s) => sea_query::ColumnType::Money(s), ColumnType::Money(s) => sea_query::ColumnType::Money(s),
ColumnType::Json => sea_query::ColumnType::Json, ColumnType::Json => sea_query::ColumnType::Json,

View File

@ -42,8 +42,7 @@ where
// so that self is dropped before entering await // so that self is dropped before entering await
let mut query = self.query; let mut query = self.query;
if db.support_returning() && <A::Entity as EntityTrait>::PrimaryKey::iter().count() > 0 { if db.support_returning() && <A::Entity as EntityTrait>::PrimaryKey::iter().count() > 0 {
let mut returning = Query::select(); let returning = Query::returning().columns(
returning.columns(
<A::Entity as EntityTrait>::PrimaryKey::iter().map(|c| c.into_column_ref()), <A::Entity as EntityTrait>::PrimaryKey::iter().map(|c| c.into_column_ref()),
); );
query.returning(returning); query.returning(returning);
@ -149,8 +148,8 @@ where
let db_backend = db.get_database_backend(); let db_backend = db.get_database_backend();
let found = match db.support_returning() { let found = match db.support_returning() {
true => { true => {
let mut returning = Query::select(); let returning =
returning.exprs(<A::Entity as EntityTrait>::Column::iter().map(|c| { Query::returning().exprs(<A::Entity as EntityTrait>::Column::iter().map(|c| {
let col = Expr::col(c); let col = Expr::col(c);
let col_def = ColumnTrait::def(&c); let col_def = ColumnTrait::def(&c);
let col_type = col_def.get_column_type(); let col_type = col_def.get_column_type();

View File

@ -98,7 +98,10 @@ where
let number_of_items = self.num_items().await?; let number_of_items = self.num_items().await?;
let number_of_pages = self.compute_pages_number(number_of_items); let number_of_pages = self.compute_pages_number(number_of_items);
Ok(ItemsAndPagesNumber { number_of_items, number_of_pages }) Ok(ItemsAndPagesNumber {
number_of_items,
number_of_pages,
})
} }
/// Compute the number of pages for the current page /// Compute the number of pages for the current page

View File

@ -298,7 +298,6 @@ try_getable_all!(i32);
try_getable_all!(i64); try_getable_all!(i64);
try_getable_unsigned!(u8); try_getable_unsigned!(u8);
try_getable_unsigned!(u16); try_getable_unsigned!(u16);
try_getable_all!(u32);
try_getable_mysql!(u64); try_getable_mysql!(u64);
try_getable_all!(f32); try_getable_all!(f32);
try_getable_all!(f64); try_getable_all!(f64);
@ -391,6 +390,47 @@ impl TryGetable for Decimal {
#[cfg(feature = "with-uuid")] #[cfg(feature = "with-uuid")]
try_getable_all!(uuid::Uuid); try_getable_all!(uuid::Uuid);
impl TryGetable for u32 {
fn try_get(res: &QueryResult, pre: &str, col: &str) -> Result<Self, TryGetError> {
let _column = format!("{}{}", pre, col);
match &res.row {
#[cfg(feature = "sqlx-mysql")]
QueryResultRow::SqlxMySql(row) => {
use sqlx::Row;
row.try_get::<Option<u32>, _>(_column.as_str())
.map_err(|e| TryGetError::DbErr(crate::sqlx_error_to_query_err(e)))
.and_then(|opt| opt.ok_or(TryGetError::Null))
}
#[cfg(feature = "sqlx-postgres")]
QueryResultRow::SqlxPostgres(row) => {
use sqlx::postgres::types::Oid;
// Since 0.6.0, SQLx has dropped direct mapping from PostgreSQL's OID to Rust's `u32`;
// Instead, `u32` was wrapped by a `sqlx::Oid`.
use sqlx::Row;
row.try_get::<Option<Oid>, _>(_column.as_str())
.map_err(|e| TryGetError::DbErr(crate::sqlx_error_to_query_err(e)))
.and_then(|opt| opt.ok_or(TryGetError::Null))
.map(|oid| oid.0)
}
#[cfg(feature = "sqlx-sqlite")]
QueryResultRow::SqlxSqlite(row) => {
use sqlx::Row;
row.try_get::<Option<u32>, _>(_column.as_str())
.map_err(|e| TryGetError::DbErr(crate::sqlx_error_to_query_err(e)))
.and_then(|opt| opt.ok_or(TryGetError::Null))
}
#[cfg(feature = "mock")]
#[allow(unused_variables)]
QueryResultRow::Mock(row) => row.try_get(_column.as_str()).map_err(|e| {
debug_print!("{:#?}", e.to_string());
TryGetError::Null
}),
#[allow(unreachable_patterns)]
_ => unreachable!(),
}
}
}
// TryGetableMany // // TryGetableMany //
/// Perform a query on multiple columns /// Perform a query on multiple columns

View File

@ -91,8 +91,8 @@ where
{ {
match db.support_returning() { match db.support_returning() {
true => { true => {
let mut returning = Query::select(); let returning =
returning.exprs(<A::Entity as EntityTrait>::Column::iter().map(|c| { Query::returning().exprs(<A::Entity as EntityTrait>::Column::iter().map(|c| {
let col = Expr::col(c); let col = Expr::col(c);
let col_def = c.def(); let col_def = c.def();
let col_type = col_def.get_column_type(); let col_type = col_def.get_column_type();

View File

@ -482,6 +482,7 @@ pub(crate) fn unpack_table_ref(table_ref: &TableRef) -> DynIden {
| TableRef::TableAlias(tbl, _) | TableRef::TableAlias(tbl, _)
| TableRef::SchemaTableAlias(_, tbl, _) | TableRef::SchemaTableAlias(_, tbl, _)
| TableRef::DatabaseSchemaTableAlias(_, _, tbl, _) | TableRef::DatabaseSchemaTableAlias(_, _, tbl, _)
| TableRef::SubQuery(_, tbl) => SeaRc::clone(tbl), | TableRef::SubQuery(_, tbl)
| TableRef::ValuesList(_, tbl) => SeaRc::clone(tbl),
} }
} }

View File

@ -67,7 +67,7 @@ impl FromQueryResult for JsonValue {
#[cfg(feature = "sqlx-postgres")] #[cfg(feature = "sqlx-postgres")]
QueryResultRow::SqlxPostgres(row) => { QueryResultRow::SqlxPostgres(row) => {
use serde_json::json; use serde_json::json;
use sqlx::{Column, Postgres, Row, Type}; use sqlx::{postgres::types::Oid, Column, Postgres, Row, Type};
for column in row.columns() { for column in row.columns() {
let col = if !column.name().starts_with(pre) { let col = if !column.name().starts_with(pre) {
continue; continue;
@ -89,7 +89,11 @@ impl FromQueryResult for JsonValue {
match_postgres_type!(i64); match_postgres_type!(i64);
// match_postgres_type!(u8); // unsupported by SQLx Postgres // match_postgres_type!(u8); // unsupported by SQLx Postgres
// match_postgres_type!(u16); // unsupported by SQLx Postgres // match_postgres_type!(u16); // unsupported by SQLx Postgres
match_postgres_type!(u32); // Since 0.6.0, SQLx has dropped direct mapping from PostgreSQL's OID to Rust's `u32`;
// Instead, `u32` was wrapped by a `sqlx::Oid`.
if <Oid as Type<Postgres>>::type_info().eq(col_type) {
try_get_type!(u32, col)
}
// match_postgres_type!(u64); // unsupported by SQLx Postgres // match_postgres_type!(u64); // unsupported by SQLx Postgres
match_postgres_type!(f32); match_postgres_type!(f32);
match_postgres_type!(f64); match_postgres_type!(f64);

View File

@ -20,7 +20,7 @@ pub struct StringVec(pub Vec<String>);
impl From<StringVec> for Value { impl From<StringVec> for Value {
fn from(source: StringVec) -> Self { fn from(source: StringVec) -> Self {
Value::String(serde_json::to_string(&source).ok().map(|s| Box::new(s))) Value::String(serde_json::to_string(&source).ok().map(Box::new))
} }
} }

View File

@ -37,15 +37,13 @@ pub async fn insert_json_struct_1(db: &DatabaseConnection) -> Result<(), DbErr>
name: "apple".into(), name: "apple".into(),
price: 12.01, price: 12.01,
notes: Some("hand picked, organic".into()), notes: Some("hand picked, organic".into()),
} },
.into(),
json_value_opt: Some(KeyValue { json_value_opt: Some(KeyValue {
id: 1, id: 1,
name: "apple".into(), name: "apple".into(),
price: 12.01, price: 12.01,
notes: Some("hand picked, organic".into()), notes: Some("hand picked, organic".into()),
}) }),
.into(),
}; };
let result = model.clone().into_active_model().insert(db).await?; let result = model.clone().into_active_model().insert(db).await?;
@ -79,9 +77,8 @@ pub async fn insert_json_struct_2(db: &DatabaseConnection) -> Result<(), DbErr>
name: "orange".into(), name: "orange".into(),
price: 10.93, price: 10.93,
notes: None, notes: None,
} },
.into(), json_value_opt: None,
json_value_opt: None.into(),
}; };
let result = model.clone().into_active_model().insert(db).await?; let result = model.clone().into_active_model().insert(db).await?;

View File

@ -32,8 +32,8 @@ async fn main() -> Result<(), DbErr> {
]) ])
.and_where(Column::Id.eq(1)); .and_where(Column::Id.eq(1));
let mut returning = Query::select(); let returning =
returning.columns(vec![Column::Id, Column::Name, Column::ProfitMargin]); Query::returning().columns(vec![Column::Id, Column::Name, Column::ProfitMargin]);
create_tables(db).await?; create_tables(db).await?;

View File

@ -1,7 +1,7 @@
pub mod common; pub mod common;
pub use common::{features::*, setup::*, TestContext}; pub use common::{features::*, setup::*, TestContext};
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel}; use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel};
use time::{date, time}; use time::macros::{date, time};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any( #[cfg(any(

View File

@ -2,7 +2,6 @@ pub mod common;
pub use common::{features::*, setup::*, TestContext}; pub use common::{features::*, setup::*, TestContext};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel}; use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel};
use serde_json::json;
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any( #[cfg(any(
@ -39,17 +38,17 @@ pub async fn create_applog(db: &DatabaseConnection) -> Result<(), DbErr> {
#[cfg(feature = "sqlx-sqlite")] #[cfg(feature = "sqlx-sqlite")]
assert_eq!( assert_eq!(
Applog::find().into_json().one(db).await?, Applog::find().into_json().one(db).await?,
Some(json!({ Some(serde_json::json!({
"id": 1, "id": 1,
"action": "Testing", "action": "Testing",
"json": r#""HI""#, "json": r#""HI""#,
"created_at": "2021-09-17 09:50:20", "created_at": "2021-09-17T17:50:20+08:00",
})) }))
); );
#[cfg(feature = "sqlx-mysql")] #[cfg(feature = "sqlx-mysql")]
assert_eq!( assert_eq!(
Applog::find().into_json().one(db).await?, Applog::find().into_json().one(db).await?,
Some(json!({ Some(serde_json::json!({
"id": 1, "id": 1,
"action": "Testing", "action": "Testing",
"json": "HI", "json": "HI",
@ -59,7 +58,7 @@ pub async fn create_applog(db: &DatabaseConnection) -> Result<(), DbErr> {
#[cfg(feature = "sqlx-postgres")] #[cfg(feature = "sqlx-postgres")]
assert_eq!( assert_eq!(
Applog::find().into_json().one(db).await?, Applog::find().into_json().one(db).await?,
Some(json!({ Some(serde_json::json!({
"id": 1, "id": 1,
"action": "Testing", "action": "Testing",
"json": "HI", "json": "HI",
@ -88,17 +87,17 @@ pub async fn create_satellites_log(db: &DatabaseConnection) -> Result<(), DbErr>
#[cfg(feature = "sqlx-sqlite")] #[cfg(feature = "sqlx-sqlite")]
assert_eq!( assert_eq!(
Satellite::find().into_json().one(db).await?, Satellite::find().into_json().one(db).await?,
Some(json!({ Some(serde_json::json!({
"id": 1, "id": 1,
"satellite_name": "Sea-00001-2022", "satellite_name": "Sea-00001-2022",
"launch_date": "2022-01-07 12:11:23", "launch_date": "2022-01-07T12:11:23+00:00",
"deployment_date": "2022-01-07 12:11:23", "deployment_date": "2022-01-07T12:11:23Z".parse::<DateTimeLocal>().unwrap(),
})) }))
); );
#[cfg(feature = "sqlx-mysql")] #[cfg(feature = "sqlx-mysql")]
assert_eq!( assert_eq!(
Satellite::find().into_json().one(db).await?, Satellite::find().into_json().one(db).await?,
Some(json!({ Some(serde_json::json!({
"id": 1, "id": 1,
"satellite_name": "Sea-00001-2022", "satellite_name": "Sea-00001-2022",
"launch_date": "2022-01-07T12:11:23Z", "launch_date": "2022-01-07T12:11:23Z",
@ -108,7 +107,7 @@ pub async fn create_satellites_log(db: &DatabaseConnection) -> Result<(), DbErr>
#[cfg(feature = "sqlx-postgres")] #[cfg(feature = "sqlx-postgres")]
assert_eq!( assert_eq!(
Satellite::find().into_json().one(db).await?, Satellite::find().into_json().one(db).await?,
Some(json!({ Some(serde_json::json!({
"id": 1, "id": 1,
"satellite_name": "Sea-00001-2022", "satellite_name": "Sea-00001-2022",
"launch_date": "2022-01-07T12:11:23+00:00", "launch_date": "2022-01-07T12:11:23+00:00",