Upstream Changes - 1 (#2145)

* upstream changes

* universal `#[sea_orm_macros::test]`

* fix

* fix

* `ColumnTrait::into_returning_expr`

* fix

* fix

* Do not pub sqlx_common

---------

Co-authored-by: Chris Tsang <chris.2y3@outlook.com>
This commit is contained in:
Billy Chan 2024-03-11 02:14:46 +08:00 committed by GitHub
parent b775027fee
commit f3967fdaca
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
49 changed files with 1120 additions and 1297 deletions

View File

@ -810,6 +810,11 @@ pub fn test(_: TokenStream, input: TokenStream) -> TokenStream {
quote::quote! ( quote::quote! (
#[test] #[test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres",
))]
#(#attrs)* #(#attrs)*
fn #name() #ret { fn #name() #ret {
let _ = ::tracing_subscriber::fmt() let _ = ::tracing_subscriber::fmt()

View File

@ -579,14 +579,10 @@ impl DbBackend {
/// Check if the database supports `RETURNING` syntax on insert and update /// Check if the database supports `RETURNING` syntax on insert and update
pub fn support_returning(&self) -> bool { pub fn support_returning(&self) -> bool {
#[cfg(not(feature = "sqlite-use-returning-for-3_35"))] match self {
{ Self::Postgres => true,
matches!(self, Self::Postgres) Self::Sqlite if cfg!(feature = "sqlite-use-returning-for-3_35") => true,
} _ => false,
#[cfg(feature = "sqlite-use-returning-for-3_35")]
{
matches!(self, Self::Postgres | Self::Sqlite)
} }
} }
} }

View File

@ -160,35 +160,6 @@ impl ConnectOptions {
} }
} }
#[cfg(feature = "sqlx-dep")]
/// Convert [ConnectOptions] into [sqlx::pool::PoolOptions]
pub fn pool_options<DB>(self) -> sqlx::pool::PoolOptions<DB>
where
DB: sqlx::Database,
{
let mut opt = sqlx::pool::PoolOptions::new();
if let Some(max_connections) = self.max_connections {
opt = opt.max_connections(max_connections);
}
if let Some(min_connections) = self.min_connections {
opt = opt.min_connections(min_connections);
}
if let Some(connect_timeout) = self.connect_timeout {
opt = opt.acquire_timeout(connect_timeout);
}
if let Some(idle_timeout) = self.idle_timeout {
opt = opt.idle_timeout(Some(idle_timeout));
}
if let Some(acquire_timeout) = self.acquire_timeout {
opt = opt.acquire_timeout(acquire_timeout);
}
if let Some(max_lifetime) = self.max_lifetime {
opt = opt.max_lifetime(Some(max_lifetime));
}
opt = opt.test_before_acquire(self.test_before_acquire);
opt
}
/// Get the database URL of the pool /// Get the database URL of the pool
pub fn get_url(&self) -> &str { pub fn get_url(&self) -> &str {
&self.url &self.url

View File

@ -150,11 +150,11 @@ impl Into<serde_json::Value> for ProxyRow {
pub fn from_query_result_to_proxy_row(result: &QueryResult) -> ProxyRow { pub fn from_query_result_to_proxy_row(result: &QueryResult) -> ProxyRow {
match &result.row { match &result.row {
#[cfg(feature = "sqlx-mysql")] #[cfg(feature = "sqlx-mysql")]
QueryResultRow::SqlxMySql(row) => from_sqlx_mysql_row_to_proxy_row(&row), QueryResultRow::SqlxMySql(row) => crate::from_sqlx_mysql_row_to_proxy_row(&row),
#[cfg(feature = "sqlx-postgres")] #[cfg(feature = "sqlx-postgres")]
QueryResultRow::SqlxPostgres(row) => from_sqlx_postgres_row_to_proxy_row(&row), QueryResultRow::SqlxPostgres(row) => crate::from_sqlx_postgres_row_to_proxy_row(&row),
#[cfg(feature = "sqlx-sqlite")] #[cfg(feature = "sqlx-sqlite")]
QueryResultRow::SqlxSqlite(row) => from_sqlx_sqlite_row_to_proxy_row(&row), QueryResultRow::SqlxSqlite(row) => crate::from_sqlx_sqlite_row_to_proxy_row(&row),
#[cfg(feature = "mock")] #[cfg(feature = "mock")]
QueryResultRow::Mock(row) => ProxyRow { QueryResultRow::Mock(row) => ProxyRow {
values: row.values.clone(), values: row.values.clone(),
@ -163,675 +163,6 @@ pub fn from_query_result_to_proxy_row(result: &QueryResult) -> ProxyRow {
} }
} }
#[cfg(feature = "sqlx-mysql")]
pub(crate) fn from_sqlx_mysql_row_to_proxy_row(row: &sqlx::mysql::MySqlRow) -> ProxyRow {
// https://docs.rs/sqlx-mysql/0.7.2/src/sqlx_mysql/protocol/text/column.rs.html
// https://docs.rs/sqlx-mysql/0.7.2/sqlx_mysql/types/index.html
use sqlx::{Column, Row, TypeInfo};
ProxyRow {
values: row
.columns()
.iter()
.map(|c| {
(
c.name().to_string(),
match c.type_info().name() {
"TINYINT(1)" | "BOOLEAN" => Value::Bool(Some(
row.try_get(c.ordinal()).expect("Failed to get boolean"),
)),
"TINYINT UNSIGNED" => Value::TinyUnsigned(Some(
row.try_get(c.ordinal())
.expect("Failed to get unsigned tiny integer"),
)),
"SMALLINT UNSIGNED" => Value::SmallUnsigned(Some(
row.try_get(c.ordinal())
.expect("Failed to get unsigned small integer"),
)),
"INT UNSIGNED" => Value::Unsigned(Some(
row.try_get(c.ordinal())
.expect("Failed to get unsigned integer"),
)),
"MEDIUMINT UNSIGNED" | "BIGINT UNSIGNED" => Value::BigUnsigned(Some(
row.try_get(c.ordinal())
.expect("Failed to get unsigned big integer"),
)),
"TINYINT" => Value::TinyInt(Some(
row.try_get(c.ordinal())
.expect("Failed to get tiny integer"),
)),
"SMALLINT" => Value::SmallInt(Some(
row.try_get(c.ordinal())
.expect("Failed to get small integer"),
)),
"INT" => Value::Int(Some(
row.try_get(c.ordinal()).expect("Failed to get integer"),
)),
"MEDIUMINT" | "BIGINT" => Value::BigInt(Some(
row.try_get(c.ordinal()).expect("Failed to get big integer"),
)),
"FLOAT" => Value::Float(Some(
row.try_get(c.ordinal()).expect("Failed to get float"),
)),
"DOUBLE" => Value::Double(Some(
row.try_get(c.ordinal()).expect("Failed to get double"),
)),
"BIT" | "BINARY" | "VARBINARY" | "TINYBLOB" | "BLOB" | "MEDIUMBLOB"
| "LONGBLOB" => Value::Bytes(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get bytes"),
))),
"CHAR" | "VARCHAR" | "TINYTEXT" | "TEXT" | "MEDIUMTEXT" | "LONGTEXT" => {
Value::String(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get string"),
)))
}
#[cfg(feature = "with-chrono")]
"TIMESTAMP" => Value::ChronoDateTimeUtc(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIMESTAMP" => Value::TimeDateTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
))),
#[cfg(feature = "with-chrono")]
"DATE" => Value::ChronoDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get date"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"DATE" => Value::TimeDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get date"),
))),
#[cfg(feature = "with-chrono")]
"TIME" => Value::ChronoTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get time"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIME" => Value::TimeTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get time"),
))),
#[cfg(feature = "with-chrono")]
"DATETIME" => Value::ChronoDateTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get datetime"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"DATETIME" => Value::TimeDateTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get datetime"),
))),
#[cfg(feature = "with-chrono")]
"YEAR" => Value::ChronoDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get year"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"YEAR" => Value::TimeDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get year"),
))),
"ENUM" | "SET" | "GEOMETRY" => Value::String(Some(Box::new(
row.try_get(c.ordinal())
.expect("Failed to get serialized string"),
))),
#[cfg(feature = "with-bigdecimal")]
"DECIMAL" => Value::BigDecimal(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get decimal"),
))),
#[cfg(all(
feature = "with-rust_decimal",
not(feature = "with-bigdecimal")
))]
"DECIMAL" => Value::Decimal(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get decimal"),
))),
#[cfg(feature = "with-json")]
"JSON" => Value::Json(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get json"),
))),
_ => unreachable!("Unknown column type: {}", c.type_info().name()),
},
)
})
.collect(),
}
}
#[cfg(feature = "sqlx-postgres")]
pub(crate) fn from_sqlx_postgres_row_to_proxy_row(row: &sqlx::postgres::PgRow) -> ProxyRow {
// https://docs.rs/sqlx-postgres/0.7.2/src/sqlx_postgres/type_info.rs.html
// https://docs.rs/sqlx-postgres/0.7.2/sqlx_postgres/types/index.html
use sqlx::{Column, Row, TypeInfo};
ProxyRow {
values: row
.columns()
.iter()
.map(|c| {
(
c.name().to_string(),
match c.type_info().name() {
"BOOL" => Value::Bool(Some(
row.try_get(c.ordinal()).expect("Failed to get boolean"),
)),
#[cfg(feature = "postgres-array")]
"BOOL[]" => Value::Array(
sea_query::ArrayType::Bool,
Some(Box::new(
row.try_get::<Vec<bool>, _>(c.ordinal())
.expect("Failed to get boolean array")
.iter()
.map(|val| Value::Bool(Some(*val)))
.collect(),
)),
),
"\"CHAR\"" => Value::TinyInt(Some(
row.try_get(c.ordinal())
.expect("Failed to get small integer"),
)),
#[cfg(feature = "postgres-array")]
"\"CHAR\"[]" => Value::Array(
sea_query::ArrayType::TinyInt,
Some(Box::new(
row.try_get::<Vec<i8>, _>(c.ordinal())
.expect("Failed to get small integer array")
.iter()
.map(|val| Value::TinyInt(Some(*val)))
.collect(),
)),
),
"SMALLINT" | "SMALLSERIAL" | "INT2" => Value::SmallInt(Some(
row.try_get(c.ordinal())
.expect("Failed to get small integer"),
)),
#[cfg(feature = "postgres-array")]
"SMALLINT[]" | "SMALLSERIAL[]" | "INT2[]" => Value::Array(
sea_query::ArrayType::SmallInt,
Some(Box::new(
row.try_get::<Vec<i16>, _>(c.ordinal())
.expect("Failed to get small integer array")
.iter()
.map(|val| Value::SmallInt(Some(*val)))
.collect(),
)),
),
"INT" | "SERIAL" | "INT4" => Value::Int(Some(
row.try_get(c.ordinal()).expect("Failed to get integer"),
)),
#[cfg(feature = "postgres-array")]
"INT[]" | "SERIAL[]" | "INT4[]" => Value::Array(
sea_query::ArrayType::Int,
Some(Box::new(
row.try_get::<Vec<i32>, _>(c.ordinal())
.expect("Failed to get integer array")
.iter()
.map(|val| Value::Int(Some(*val)))
.collect(),
)),
),
"BIGINT" | "BIGSERIAL" | "INT8" => Value::BigInt(Some(
row.try_get(c.ordinal()).expect("Failed to get big integer"),
)),
#[cfg(feature = "postgres-array")]
"BIGINT[]" | "BIGSERIAL[]" | "INT8[]" => Value::Array(
sea_query::ArrayType::BigInt,
Some(Box::new(
row.try_get::<Vec<i64>, _>(c.ordinal())
.expect("Failed to get big integer array")
.iter()
.map(|val| Value::BigInt(Some(*val)))
.collect(),
)),
),
"FLOAT4" | "REAL" => Value::Float(Some(
row.try_get(c.ordinal()).expect("Failed to get float"),
)),
#[cfg(feature = "postgres-array")]
"FLOAT4[]" | "REAL[]" => Value::Array(
sea_query::ArrayType::Float,
Some(Box::new(
row.try_get::<Vec<f32>, _>(c.ordinal())
.expect("Failed to get float array")
.iter()
.map(|val| Value::Float(Some(*val)))
.collect(),
)),
),
"FLOAT8" | "DOUBLE PRECISION" => Value::Double(Some(
row.try_get(c.ordinal()).expect("Failed to get double"),
)),
#[cfg(feature = "postgres-array")]
"FLOAT8[]" | "DOUBLE PRECISION[]" => Value::Array(
sea_query::ArrayType::Double,
Some(Box::new(
row.try_get::<Vec<f64>, _>(c.ordinal())
.expect("Failed to get double array")
.iter()
.map(|val| Value::Double(Some(*val)))
.collect(),
)),
),
"VARCHAR" | "CHAR" | "TEXT" | "NAME" => Value::String(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get string"),
))),
#[cfg(feature = "postgres-array")]
"VARCHAR[]" | "CHAR[]" | "TEXT[]" | "NAME[]" => Value::Array(
sea_query::ArrayType::String,
Some(Box::new(
row.try_get::<Vec<String>, _>(c.ordinal())
.expect("Failed to get string array")
.iter()
.map(|val| Value::String(Some(Box::new(val.clone()))))
.collect(),
)),
),
"BYTEA" => Value::Bytes(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get bytes"),
))),
#[cfg(feature = "postgres-array")]
"BYTEA[]" => Value::Array(
sea_query::ArrayType::Bytes,
Some(Box::new(
row.try_get::<Vec<Vec<u8>>, _>(c.ordinal())
.expect("Failed to get bytes array")
.iter()
.map(|val| Value::Bytes(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-bigdecimal")]
"NUMERIC" => Value::BigDecimal(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get numeric"),
))),
#[cfg(all(
feature = "with-rust_decimal",
not(feature = "with-bigdecimal")
))]
"NUMERIC" => Value::Decimal(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get numeric"),
))),
#[cfg(all(feature = "with-bigdecimal", feature = "postgres-array"))]
"NUMERIC[]" => Value::Array(
sea_query::ArrayType::BigDecimal,
Some(Box::new(
row.try_get::<Vec<bigdecimal::BigDecimal>, _>(c.ordinal())
.expect("Failed to get numeric array")
.iter()
.map(|val| Value::BigDecimal(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(all(
feature = "with-rust_decimal",
not(feature = "with-bigdecimal"),
feature = "postgres-array"
))]
"NUMERIC[]" => Value::Array(
sea_query::ArrayType::Decimal,
Some(Box::new(
row.try_get::<Vec<rust_decimal::Decimal>, _>(c.ordinal())
.expect("Failed to get numeric array")
.iter()
.map(|val| Value::Decimal(Some(Box::new(val.clone()))))
.collect(),
)),
),
"OID" => Value::BigInt(Some(
row.try_get(c.ordinal()).expect("Failed to get oid"),
)),
#[cfg(feature = "postgres-array")]
"OID[]" => Value::Array(
sea_query::ArrayType::BigInt,
Some(Box::new(
row.try_get::<Vec<i64>, _>(c.ordinal())
.expect("Failed to get oid array")
.iter()
.map(|val| Value::BigInt(Some(*val)))
.collect(),
)),
),
"JSON" | "JSONB" => Value::Json(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get json"),
))),
#[cfg(any(feature = "json-array", feature = "postgres-array"))]
"JSON[]" | "JSONB[]" => Value::Array(
sea_query::ArrayType::Json,
Some(Box::new(
row.try_get::<Vec<serde_json::Value>, _>(c.ordinal())
.expect("Failed to get json array")
.iter()
.map(|val| Value::Json(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-ipnetwork")]
"INET" | "CIDR" => Value::IpNetwork(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get ip address"),
))),
#[cfg(feature = "with-ipnetwork")]
"INET[]" | "CIDR[]" => Value::Array(
sea_query::ArrayType::IpNetwork,
Some(Box::new(
row.try_get::<Vec<ipnetwork::IpNetwork>, _>(c.ordinal())
.expect("Failed to get ip address array")
.iter()
.map(|val| Value::IpNetwork(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-mac_address")]
"MACADDR" | "MACADDR8" => Value::MacAddress(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get mac address"),
))),
#[cfg(all(feature = "with-mac_address", feature = "postgres-array"))]
"MACADDR[]" | "MACADDR8[]" => Value::Array(
sea_query::ArrayType::MacAddress,
Some(Box::new(
row.try_get::<Vec<mac_address::MacAddress>, _>(c.ordinal())
.expect("Failed to get mac address array")
.iter()
.map(|val| Value::MacAddress(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-chrono")]
"TIMESTAMP" => Value::ChronoDateTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIMESTAMP" => Value::TimeDateTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
))),
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
"TIMESTAMP[]" => Value::Array(
sea_query::ArrayType::ChronoDateTime,
Some(Box::new(
row.try_get::<Vec<chrono::NaiveDateTime>, _>(c.ordinal())
.expect("Failed to get timestamp array")
.iter()
.map(|val| Value::ChronoDateTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(all(
feature = "with-time",
not(feature = "with-chrono"),
feature = "postgres-array"
))]
"TIMESTAMP[]" => Value::Array(
sea_query::ArrayType::TimeDateTime,
Some(Box::new(
row.try_get::<Vec<time::OffsetDateTime>, _>(c.ordinal())
.expect("Failed to get timestamp array")
.iter()
.map(|val| Value::TimeDateTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-chrono")]
"DATE" => Value::ChronoDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get date"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"DATE" => Value::TimeDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get date"),
))),
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
"DATE[]" => Value::Array(
sea_query::ArrayType::ChronoDate,
Some(Box::new(
row.try_get::<Vec<chrono::NaiveDate>, _>(c.ordinal())
.expect("Failed to get date array")
.iter()
.map(|val| Value::ChronoDate(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(all(
feature = "with-time",
not(feature = "with-chrono"),
feature = "postgres-array"
))]
"DATE[]" => Value::Array(
sea_query::ArrayType::TimeDate,
Some(Box::new(
row.try_get::<Vec<time::Date>, _>(c.ordinal())
.expect("Failed to get date array")
.iter()
.map(|val| Value::TimeDate(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-chrono")]
"TIME" => Value::ChronoTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get time"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIME" => Value::TimeTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get time"),
))),
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
"TIME[]" => Value::Array(
sea_query::ArrayType::ChronoTime,
Some(Box::new(
row.try_get::<Vec<chrono::NaiveTime>, _>(c.ordinal())
.expect("Failed to get time array")
.iter()
.map(|val| Value::ChronoTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(all(
feature = "with-time",
not(feature = "with-chrono"),
feature = "postgres-array"
))]
"TIME[]" => Value::Array(
sea_query::ArrayType::TimeTime,
Some(Box::new(
row.try_get::<Vec<time::Time>, _>(c.ordinal())
.expect("Failed to get time array")
.iter()
.map(|val| Value::TimeTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-chrono")]
"TIMESTAMPTZ" => Value::ChronoDateTimeUtc(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamptz"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIMESTAMPTZ" => Value::TimeDateTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamptz"),
))),
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
"TIMESTAMPTZ[]" => Value::Array(
sea_query::ArrayType::ChronoDateTimeUtc,
Some(Box::new(
row.try_get::<Vec<chrono::DateTime<chrono::Utc>>, _>(c.ordinal())
.expect("Failed to get timestamptz array")
.iter()
.map(|val| {
Value::ChronoDateTimeUtc(Some(Box::new(val.clone())))
})
.collect(),
)),
),
#[cfg(all(
feature = "with-time",
not(feature = "with-chrono"),
feature = "postgres-array"
))]
"TIMESTAMPTZ[]" => Value::Array(
sea_query::ArrayType::TimeDateTime,
Some(Box::new(
row.try_get::<Vec<time::OffsetDateTime>, _>(c.ordinal())
.expect("Failed to get timestamptz array")
.iter()
.map(|val| Value::TimeDateTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-chrono")]
"TIMETZ" => Value::ChronoTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timetz"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIMETZ" => Value::TimeTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timetz"),
))),
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
"TIMETZ[]" => Value::Array(
sea_query::ArrayType::ChronoTime,
Some(Box::new(
row.try_get::<Vec<chrono::NaiveTime>, _>(c.ordinal())
.expect("Failed to get timetz array")
.iter()
.map(|val| Value::ChronoTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(all(
feature = "with-time",
not(feature = "with-chrono"),
feature = "postgres-array"
))]
"TIMETZ[]" => Value::Array(
sea_query::ArrayType::TimeTime,
Some(Box::new(
row.try_get::<Vec<time::Time>, _>(c.ordinal())
.expect("Failed to get timetz array")
.iter()
.map(|val| Value::TimeTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-uuid")]
"UUID" => Value::Uuid(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get uuid"),
))),
#[cfg(all(feature = "with-uuid", feature = "postgres-array"))]
"UUID[]" => Value::Array(
sea_query::ArrayType::Uuid,
Some(Box::new(
row.try_get::<Vec<uuid::Uuid>, _>(c.ordinal())
.expect("Failed to get uuid array")
.iter()
.map(|val| Value::Uuid(Some(Box::new(val.clone()))))
.collect(),
)),
),
_ => unreachable!("Unknown column type: {}", c.type_info().name()),
},
)
})
.collect(),
}
}
#[cfg(feature = "sqlx-sqlite")]
pub(crate) fn from_sqlx_sqlite_row_to_proxy_row(row: &sqlx::sqlite::SqliteRow) -> ProxyRow {
// https://docs.rs/sqlx-sqlite/0.7.2/src/sqlx_sqlite/type_info.rs.html
// https://docs.rs/sqlx-sqlite/0.7.2/sqlx_sqlite/types/index.html
use sqlx::{Column, Row, TypeInfo};
ProxyRow {
values: row
.columns()
.iter()
.map(|c| {
(
c.name().to_string(),
match c.type_info().name() {
"BOOLEAN" => Value::Bool(Some(
row.try_get(c.ordinal()).expect("Failed to get boolean"),
)),
"INTEGER" => Value::Int(Some(
row.try_get(c.ordinal()).expect("Failed to get integer"),
)),
"BIGINT" | "INT8" => Value::BigInt(Some(
row.try_get(c.ordinal()).expect("Failed to get big integer"),
)),
"REAL" => Value::Double(Some(
row.try_get(c.ordinal()).expect("Failed to get double"),
)),
"TEXT" => Value::String(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get string"),
))),
"BLOB" => Value::Bytes(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get bytes"),
))),
#[cfg(feature = "with-chrono")]
"DATETIME" => Value::ChronoDateTimeUtc(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"DATETIME" => Value::TimeDateTimeWithTimeZone(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
))),
#[cfg(feature = "with-chrono")]
"DATE" => Value::ChronoDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get date"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"DATE" => Value::TimeDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get date"),
))),
#[cfg(feature = "with-chrono")]
"TIME" => Value::ChronoTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get time"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIME" => Value::TimeTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get time"),
))),
_ => unreachable!("Unknown column type: {}", c.type_info().name()),
},
)
})
.collect(),
}
}
impl ProxyRow { impl ProxyRow {
/// Get a value from the [ProxyRow] /// Get a value from the [ProxyRow]
pub fn try_get<T, I: crate::ColIdx>(&self, index: I) -> Result<T, DbErr> pub fn try_get<T, I: crate::ColIdx>(&self, index: I) -> Result<T, DbErr>

View File

@ -1,17 +1,14 @@
#![allow(missing_docs, unreachable_code, unused_variables)] #![allow(missing_docs, unreachable_code, unused_variables)]
use futures::Stream;
use std::{pin::Pin, task::Poll};
use tracing::instrument; use tracing::instrument;
#[cfg(any(feature = "mock", feature = "proxy"))]
use std::sync::Arc;
use std::{pin::Pin, task::Poll};
use futures::Stream;
#[cfg(feature = "sqlx-dep")] #[cfg(feature = "sqlx-dep")]
use futures::TryStreamExt; use futures::TryStreamExt;
#[cfg(feature = "sqlx-dep")] #[cfg(feature = "sqlx-dep")]
use sqlx::{pool::PoolConnection, Executor}; use sqlx::Executor;
use super::metric::MetricStream; use super::metric::MetricStream;
#[cfg(feature = "sqlx-dep")] #[cfg(feature = "sqlx-dep")]
@ -29,101 +26,6 @@ pub struct QueryStream {
stream: MetricStream<'this>, stream: MetricStream<'this>,
} }
#[cfg(feature = "sqlx-mysql")]
impl
From<(
PoolConnection<sqlx::MySql>,
Statement,
Option<crate::metric::Callback>,
)> for QueryStream
{
fn from(
(conn, stmt, metric_callback): (
PoolConnection<sqlx::MySql>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
QueryStream::build(stmt, InnerConnection::MySql(conn), metric_callback)
}
}
#[cfg(feature = "sqlx-postgres")]
impl
From<(
PoolConnection<sqlx::Postgres>,
Statement,
Option<crate::metric::Callback>,
)> for QueryStream
{
fn from(
(conn, stmt, metric_callback): (
PoolConnection<sqlx::Postgres>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
QueryStream::build(stmt, InnerConnection::Postgres(conn), metric_callback)
}
}
#[cfg(feature = "sqlx-sqlite")]
impl
From<(
PoolConnection<sqlx::Sqlite>,
Statement,
Option<crate::metric::Callback>,
)> for QueryStream
{
fn from(
(conn, stmt, metric_callback): (
PoolConnection<sqlx::Sqlite>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
QueryStream::build(stmt, InnerConnection::Sqlite(conn), metric_callback)
}
}
#[cfg(feature = "mock")]
impl
From<(
Arc<crate::MockDatabaseConnection>,
Statement,
Option<crate::metric::Callback>,
)> for QueryStream
{
fn from(
(conn, stmt, metric_callback): (
Arc<crate::MockDatabaseConnection>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
QueryStream::build(stmt, InnerConnection::Mock(conn), metric_callback)
}
}
#[cfg(feature = "proxy")]
impl
From<(
Arc<crate::ProxyDatabaseConnection>,
Statement,
Option<crate::metric::Callback>,
)> for QueryStream
{
fn from(
(conn, stmt, metric_callback): (
Arc<crate::ProxyDatabaseConnection>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
QueryStream::build(stmt, InnerConnection::Proxy(conn), metric_callback)
}
}
impl std::fmt::Debug for QueryStream { impl std::fmt::Debug for QueryStream {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "QueryStream") write!(f, "QueryStream")
@ -132,7 +34,7 @@ impl std::fmt::Debug for QueryStream {
impl QueryStream { impl QueryStream {
#[instrument(level = "trace", skip(metric_callback))] #[instrument(level = "trace", skip(metric_callback))]
fn build( pub(crate) fn build(
stmt: Statement, stmt: Statement,
conn: InnerConnection, conn: InnerConnection,
metric_callback: Option<crate::metric::Callback>, metric_callback: Option<crate::metric::Callback>,

View File

@ -7,7 +7,7 @@ use crate::{
use crate::{sqlx_error_to_exec_err, sqlx_error_to_query_err}; use crate::{sqlx_error_to_exec_err, sqlx_error_to_query_err};
use futures::lock::Mutex; use futures::lock::Mutex;
#[cfg(feature = "sqlx-dep")] #[cfg(feature = "sqlx-dep")]
use sqlx::{pool::PoolConnection, TransactionManager}; use sqlx::TransactionManager;
use std::{future::Future, pin::Pin, sync::Arc}; use std::{future::Future, pin::Pin, sync::Arc};
use tracing::instrument; use tracing::instrument;
@ -28,91 +28,8 @@ impl std::fmt::Debug for DatabaseTransaction {
} }
impl DatabaseTransaction { impl DatabaseTransaction {
#[cfg(feature = "sqlx-mysql")]
pub(crate) async fn new_mysql(
inner: PoolConnection<sqlx::MySql>,
metric_callback: Option<crate::metric::Callback>,
isolation_level: Option<IsolationLevel>,
access_mode: Option<AccessMode>,
) -> Result<DatabaseTransaction, DbErr> {
Self::begin(
Arc::new(Mutex::new(InnerConnection::MySql(inner))),
DbBackend::MySql,
metric_callback,
isolation_level,
access_mode,
)
.await
}
#[cfg(feature = "sqlx-postgres")]
pub(crate) async fn new_postgres(
inner: PoolConnection<sqlx::Postgres>,
metric_callback: Option<crate::metric::Callback>,
isolation_level: Option<IsolationLevel>,
access_mode: Option<AccessMode>,
) -> Result<DatabaseTransaction, DbErr> {
Self::begin(
Arc::new(Mutex::new(InnerConnection::Postgres(inner))),
DbBackend::Postgres,
metric_callback,
isolation_level,
access_mode,
)
.await
}
#[cfg(feature = "sqlx-sqlite")]
pub(crate) async fn new_sqlite(
inner: PoolConnection<sqlx::Sqlite>,
metric_callback: Option<crate::metric::Callback>,
isolation_level: Option<IsolationLevel>,
access_mode: Option<AccessMode>,
) -> Result<DatabaseTransaction, DbErr> {
Self::begin(
Arc::new(Mutex::new(InnerConnection::Sqlite(inner))),
DbBackend::Sqlite,
metric_callback,
isolation_level,
access_mode,
)
.await
}
#[cfg(feature = "mock")]
pub(crate) async fn new_mock(
inner: Arc<crate::MockDatabaseConnection>,
metric_callback: Option<crate::metric::Callback>,
) -> Result<DatabaseTransaction, DbErr> {
let backend = inner.get_database_backend();
Self::begin(
Arc::new(Mutex::new(InnerConnection::Mock(inner))),
backend,
metric_callback,
None,
None,
)
.await
}
#[cfg(feature = "proxy")]
pub(crate) async fn new_proxy(
inner: Arc<crate::ProxyDatabaseConnection>,
metric_callback: Option<crate::metric::Callback>,
) -> Result<DatabaseTransaction, DbErr> {
let backend = inner.get_database_backend();
Self::begin(
Arc::new(Mutex::new(InnerConnection::Proxy(inner))),
backend,
metric_callback,
None,
None,
)
.await
}
#[instrument(level = "trace", skip(metric_callback))] #[instrument(level = "trace", skip(metric_callback))]
async fn begin( pub(crate) async fn begin(
conn: Arc<Mutex<InnerConnection>>, conn: Arc<Mutex<InnerConnection>>,
backend: DbBackend, backend: DbBackend,
metric_callback: Option<crate::metric::Callback>, metric_callback: Option<crate::metric::Callback>,
@ -293,17 +210,6 @@ impl DatabaseTransaction {
} }
Ok(()) Ok(())
} }
#[cfg(feature = "sqlx-dep")]
fn map_err_ignore_not_found<T: std::fmt::Debug>(
err: Result<Option<T>, sqlx::Error>,
) -> Result<Option<T>, DbErr> {
if let Err(sqlx::Error::RowNotFound) = err {
Ok(None)
} else {
err.map_err(sqlx_error_to_query_err)
}
}
} }
impl Drop for DatabaseTransaction { impl Drop for DatabaseTransaction {
@ -411,7 +317,7 @@ impl ConnectionTrait for DatabaseTransaction {
let query = crate::driver::sqlx_mysql::sqlx_query(&stmt); let query = crate::driver::sqlx_mysql::sqlx_query(&stmt);
let conn: &mut sqlx::MySqlConnection = &mut *conn; let conn: &mut sqlx::MySqlConnection = &mut *conn;
crate::metric::metric!(self.metric_callback, &stmt, { crate::metric::metric!(self.metric_callback, &stmt, {
Self::map_err_ignore_not_found( crate::sqlx_map_err_ignore_not_found(
query.fetch_one(conn).await.map(|row| Some(row.into())), query.fetch_one(conn).await.map(|row| Some(row.into())),
) )
}) })
@ -421,7 +327,7 @@ impl ConnectionTrait for DatabaseTransaction {
let query = crate::driver::sqlx_postgres::sqlx_query(&stmt); let query = crate::driver::sqlx_postgres::sqlx_query(&stmt);
let conn: &mut sqlx::PgConnection = &mut *conn; let conn: &mut sqlx::PgConnection = &mut *conn;
crate::metric::metric!(self.metric_callback, &stmt, { crate::metric::metric!(self.metric_callback, &stmt, {
Self::map_err_ignore_not_found( crate::sqlx_map_err_ignore_not_found(
query.fetch_one(conn).await.map(|row| Some(row.into())), query.fetch_one(conn).await.map(|row| Some(row.into())),
) )
}) })
@ -431,7 +337,7 @@ impl ConnectionTrait for DatabaseTransaction {
let query = crate::driver::sqlx_sqlite::sqlx_query(&stmt); let query = crate::driver::sqlx_sqlite::sqlx_query(&stmt);
let conn: &mut sqlx::SqliteConnection = &mut *conn; let conn: &mut sqlx::SqliteConnection = &mut *conn;
crate::metric::metric!(self.metric_callback, &stmt, { crate::metric::metric!(self.metric_callback, &stmt, {
Self::map_err_ignore_not_found( crate::sqlx_map_err_ignore_not_found(
query.fetch_one(conn).await.map(|row| Some(row.into())), query.fetch_one(conn).await.map(|row| Some(row.into())),
) )
}) })

View File

@ -207,3 +207,39 @@ impl MockDatabaseConnection {
self.mocker.lock().map_err(query_err)?.ping() self.mocker.lock().map_err(query_err)?.ping()
} }
} }
impl
From<(
Arc<crate::MockDatabaseConnection>,
Statement,
Option<crate::metric::Callback>,
)> for crate::QueryStream
{
fn from(
(conn, stmt, metric_callback): (
Arc<crate::MockDatabaseConnection>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
crate::QueryStream::build(stmt, crate::InnerConnection::Mock(conn), metric_callback)
}
}
impl crate::DatabaseTransaction {
pub(crate) async fn new_mock(
inner: Arc<crate::MockDatabaseConnection>,
metric_callback: Option<crate::metric::Callback>,
) -> Result<crate::DatabaseTransaction, DbErr> {
use futures::lock::Mutex;
let backend = inner.get_database_backend();
Self::begin(
Arc::new(Mutex::new(crate::InnerConnection::Mock(inner))),
backend,
metric_callback,
None,
None,
)
.await
}
}

View File

@ -16,7 +16,7 @@ pub use mock::*;
#[cfg(feature = "proxy")] #[cfg(feature = "proxy")]
pub use proxy::*; pub use proxy::*;
#[cfg(feature = "sqlx-dep")] #[cfg(feature = "sqlx-dep")]
pub use sqlx_common::*; pub(crate) use sqlx_common::*;
#[cfg(feature = "sqlx-mysql")] #[cfg(feature = "sqlx-mysql")]
pub use sqlx_mysql::*; pub use sqlx_mysql::*;
#[cfg(feature = "sqlx-postgres")] #[cfg(feature = "sqlx-postgres")]

View File

@ -138,3 +138,39 @@ impl ProxyDatabaseConnection {
self.proxy.lock().map_err(query_err)?.ping() self.proxy.lock().map_err(query_err)?.ping()
} }
} }
impl
From<(
Arc<crate::ProxyDatabaseConnection>,
Statement,
Option<crate::metric::Callback>,
)> for crate::QueryStream
{
fn from(
(conn, stmt, metric_callback): (
Arc<crate::ProxyDatabaseConnection>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
crate::QueryStream::build(stmt, crate::InnerConnection::Proxy(conn), metric_callback)
}
}
impl crate::DatabaseTransaction {
pub(crate) async fn new_proxy(
inner: Arc<crate::ProxyDatabaseConnection>,
metric_callback: Option<crate::metric::Callback>,
) -> Result<crate::DatabaseTransaction, DbErr> {
use futures::lock::Mutex;
let backend = inner.get_database_backend();
Self::begin(
Arc::new(Mutex::new(crate::InnerConnection::Proxy(inner))),
backend,
metric_callback,
None,
None,
)
.await
}
}

View File

@ -1,4 +1,4 @@
use crate::{DbErr, RuntimeErr}; use crate::{ConnAcquireErr, ConnectOptions, DbErr, RuntimeErr};
/// Converts an [sqlx::error] execution error to a [DbErr] /// Converts an [sqlx::error] execution error to a [DbErr]
pub fn sqlx_error_to_exec_err(err: sqlx::Error) -> DbErr { pub fn sqlx_error_to_exec_err(err: sqlx::Error) -> DbErr {
@ -14,3 +14,53 @@ pub fn sqlx_error_to_query_err(err: sqlx::Error) -> DbErr {
pub fn sqlx_error_to_conn_err(err: sqlx::Error) -> DbErr { pub fn sqlx_error_to_conn_err(err: sqlx::Error) -> DbErr {
DbErr::Conn(RuntimeErr::SqlxError(err)) DbErr::Conn(RuntimeErr::SqlxError(err))
} }
/// Converts an [sqlx::error] error to a [DbErr]
pub fn sqlx_map_err_ignore_not_found<T: std::fmt::Debug>(
err: Result<Option<T>, sqlx::Error>,
) -> Result<Option<T>, DbErr> {
if let Err(sqlx::Error::RowNotFound) = err {
Ok(None)
} else {
err.map_err(sqlx_error_to_query_err)
}
}
/// Converts an [sqlx::error] error to a [DbErr]
pub fn sqlx_conn_acquire_err(sqlx_err: sqlx::Error) -> DbErr {
match sqlx_err {
sqlx::Error::PoolTimedOut => DbErr::ConnectionAcquire(ConnAcquireErr::Timeout),
sqlx::Error::PoolClosed => DbErr::ConnectionAcquire(ConnAcquireErr::ConnectionClosed),
_ => DbErr::Conn(RuntimeErr::SqlxError(sqlx_err)),
}
}
impl ConnectOptions {
/// Convert [ConnectOptions] into [sqlx::pool::PoolOptions]
pub fn sqlx_pool_options<DB>(self) -> sqlx::pool::PoolOptions<DB>
where
DB: sqlx::Database,
{
let mut opt = sqlx::pool::PoolOptions::new();
if let Some(max_connections) = self.max_connections {
opt = opt.max_connections(max_connections);
}
if let Some(min_connections) = self.min_connections {
opt = opt.min_connections(min_connections);
}
if let Some(connect_timeout) = self.connect_timeout {
opt = opt.acquire_timeout(connect_timeout);
}
if let Some(idle_timeout) = self.idle_timeout {
opt = opt.idle_timeout(Some(idle_timeout));
}
if let Some(acquire_timeout) = self.acquire_timeout {
opt = opt.acquire_timeout(acquire_timeout);
}
if let Some(max_lifetime) = self.max_lifetime {
opt = opt.max_lifetime(Some(max_lifetime));
}
opt = opt.test_before_acquire(self.test_before_acquire);
opt
}
}

View File

@ -1,3 +1,4 @@
use futures::lock::Mutex;
use log::LevelFilter; use log::LevelFilter;
use sea_query::Values; use sea_query::Values;
use std::{future::Future, pin::Pin, sync::Arc}; use std::{future::Future, pin::Pin, sync::Arc};
@ -60,7 +61,7 @@ impl SqlxMySqlConnector {
); );
} }
} }
match options.pool_options().connect_with(opt).await { match options.sqlx_pool_options().connect_with(opt).await {
Ok(pool) => Ok(DatabaseConnection::SqlxMySqlPoolConnection( Ok(pool) => Ok(DatabaseConnection::SqlxMySqlPoolConnection(
SqlxMySqlPoolConnection { SqlxMySqlPoolConnection {
pool, pool,
@ -89,7 +90,7 @@ impl SqlxMySqlPoolConnection {
debug_print!("{}", stmt); debug_print!("{}", stmt);
let query = sqlx_query(&stmt); let query = sqlx_query(&stmt);
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
crate::metric::metric!(self.metric_callback, &stmt, { crate::metric::metric!(self.metric_callback, &stmt, {
match query.execute(&mut *conn).await { match query.execute(&mut *conn).await {
Ok(res) => Ok(res.into()), Ok(res) => Ok(res.into()),
@ -103,7 +104,7 @@ impl SqlxMySqlPoolConnection {
pub async fn execute_unprepared(&self, sql: &str) -> Result<ExecResult, DbErr> { pub async fn execute_unprepared(&self, sql: &str) -> Result<ExecResult, DbErr> {
debug_print!("{}", sql); debug_print!("{}", sql);
let conn = &mut self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = &mut self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
match conn.execute(sql).await { match conn.execute(sql).await {
Ok(res) => Ok(res.into()), Ok(res) => Ok(res.into()),
Err(err) => Err(sqlx_error_to_exec_err(err)), Err(err) => Err(sqlx_error_to_exec_err(err)),
@ -116,7 +117,7 @@ impl SqlxMySqlPoolConnection {
debug_print!("{}", stmt); debug_print!("{}", stmt);
let query = sqlx_query(&stmt); let query = sqlx_query(&stmt);
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
crate::metric::metric!(self.metric_callback, &stmt, { crate::metric::metric!(self.metric_callback, &stmt, {
match query.fetch_one(&mut *conn).await { match query.fetch_one(&mut *conn).await {
Ok(row) => Ok(Some(row.into())), Ok(row) => Ok(Some(row.into())),
@ -134,7 +135,7 @@ impl SqlxMySqlPoolConnection {
debug_print!("{}", stmt); debug_print!("{}", stmt);
let query = sqlx_query(&stmt); let query = sqlx_query(&stmt);
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
crate::metric::metric!(self.metric_callback, &stmt, { crate::metric::metric!(self.metric_callback, &stmt, {
match query.fetch_all(&mut *conn).await { match query.fetch_all(&mut *conn).await {
Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()), Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()),
@ -148,7 +149,7 @@ impl SqlxMySqlPoolConnection {
pub async fn stream(&self, stmt: Statement) -> Result<QueryStream, DbErr> { pub async fn stream(&self, stmt: Statement) -> Result<QueryStream, DbErr> {
debug_print!("{}", stmt); debug_print!("{}", stmt);
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
Ok(QueryStream::from(( Ok(QueryStream::from((
conn, conn,
stmt, stmt,
@ -163,7 +164,7 @@ impl SqlxMySqlPoolConnection {
isolation_level: Option<IsolationLevel>, isolation_level: Option<IsolationLevel>,
access_mode: Option<AccessMode>, access_mode: Option<AccessMode>,
) -> Result<DatabaseTransaction, DbErr> { ) -> Result<DatabaseTransaction, DbErr> {
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
DatabaseTransaction::new_mysql( DatabaseTransaction::new_mysql(
conn, conn,
self.metric_callback.clone(), self.metric_callback.clone(),
@ -189,7 +190,7 @@ impl SqlxMySqlPoolConnection {
T: Send, T: Send,
E: std::error::Error + Send, E: std::error::Error + Send,
{ {
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
let transaction = DatabaseTransaction::new_mysql( let transaction = DatabaseTransaction::new_mysql(
conn, conn,
self.metric_callback.clone(), self.metric_callback.clone(),
@ -210,7 +211,7 @@ impl SqlxMySqlPoolConnection {
/// Checks if a connection to the database is still valid. /// Checks if a connection to the database is still valid.
pub async fn ping(&self) -> Result<(), DbErr> { pub async fn ping(&self) -> Result<(), DbErr> {
let conn = &mut self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = &mut self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
match conn.ping().await { match conn.ping().await {
Ok(_) => Ok(()), Ok(_) => Ok(()),
Err(err) => Err(sqlx_error_to_conn_err(err)), Err(err) => Err(sqlx_error_to_conn_err(err)),
@ -273,3 +274,179 @@ pub(crate) async fn set_transaction_config(
} }
Ok(()) Ok(())
} }
impl
From<(
PoolConnection<sqlx::MySql>,
Statement,
Option<crate::metric::Callback>,
)> for crate::QueryStream
{
fn from(
(conn, stmt, metric_callback): (
PoolConnection<sqlx::MySql>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
crate::QueryStream::build(stmt, crate::InnerConnection::MySql(conn), metric_callback)
}
}
impl crate::DatabaseTransaction {
pub(crate) async fn new_mysql(
inner: PoolConnection<sqlx::MySql>,
metric_callback: Option<crate::metric::Callback>,
isolation_level: Option<IsolationLevel>,
access_mode: Option<AccessMode>,
) -> Result<crate::DatabaseTransaction, DbErr> {
Self::begin(
Arc::new(Mutex::new(crate::InnerConnection::MySql(inner))),
crate::DbBackend::MySql,
metric_callback,
isolation_level,
access_mode,
)
.await
}
}
#[cfg(feature = "proxy")]
pub(crate) fn from_sqlx_mysql_row_to_proxy_row(row: &sqlx::mysql::MySqlRow) -> crate::ProxyRow {
// https://docs.rs/sqlx-mysql/0.7.2/src/sqlx_mysql/protocol/text/column.rs.html
// https://docs.rs/sqlx-mysql/0.7.2/sqlx_mysql/types/index.html
use sea_query::Value;
use sqlx::{Column, Row, TypeInfo};
crate::ProxyRow {
values: row
.columns()
.iter()
.map(|c| {
(
c.name().to_string(),
match c.type_info().name() {
"TINYINT(1)" | "BOOLEAN" => Value::Bool(Some(
row.try_get(c.ordinal()).expect("Failed to get boolean"),
)),
"TINYINT UNSIGNED" => Value::TinyUnsigned(Some(
row.try_get(c.ordinal())
.expect("Failed to get unsigned tiny integer"),
)),
"SMALLINT UNSIGNED" => Value::SmallUnsigned(Some(
row.try_get(c.ordinal())
.expect("Failed to get unsigned small integer"),
)),
"INT UNSIGNED" => Value::Unsigned(Some(
row.try_get(c.ordinal())
.expect("Failed to get unsigned integer"),
)),
"MEDIUMINT UNSIGNED" | "BIGINT UNSIGNED" => Value::BigUnsigned(Some(
row.try_get(c.ordinal())
.expect("Failed to get unsigned big integer"),
)),
"TINYINT" => Value::TinyInt(Some(
row.try_get(c.ordinal())
.expect("Failed to get tiny integer"),
)),
"SMALLINT" => Value::SmallInt(Some(
row.try_get(c.ordinal())
.expect("Failed to get small integer"),
)),
"INT" => Value::Int(Some(
row.try_get(c.ordinal()).expect("Failed to get integer"),
)),
"MEDIUMINT" | "BIGINT" => Value::BigInt(Some(
row.try_get(c.ordinal()).expect("Failed to get big integer"),
)),
"FLOAT" => Value::Float(Some(
row.try_get(c.ordinal()).expect("Failed to get float"),
)),
"DOUBLE" => Value::Double(Some(
row.try_get(c.ordinal()).expect("Failed to get double"),
)),
"BIT" | "BINARY" | "VARBINARY" | "TINYBLOB" | "BLOB" | "MEDIUMBLOB"
| "LONGBLOB" => Value::Bytes(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get bytes"),
))),
"CHAR" | "VARCHAR" | "TINYTEXT" | "TEXT" | "MEDIUMTEXT" | "LONGTEXT" => {
Value::String(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get string"),
)))
}
#[cfg(feature = "with-chrono")]
"TIMESTAMP" => Value::ChronoDateTimeUtc(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIMESTAMP" => Value::TimeDateTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
))),
#[cfg(feature = "with-chrono")]
"DATE" => Value::ChronoDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get date"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"DATE" => Value::TimeDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get date"),
))),
#[cfg(feature = "with-chrono")]
"TIME" => Value::ChronoTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get time"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIME" => Value::TimeTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get time"),
))),
#[cfg(feature = "with-chrono")]
"DATETIME" => Value::ChronoDateTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get datetime"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"DATETIME" => Value::TimeDateTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get datetime"),
))),
#[cfg(feature = "with-chrono")]
"YEAR" => Value::ChronoDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get year"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"YEAR" => Value::TimeDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get year"),
))),
"ENUM" | "SET" | "GEOMETRY" => Value::String(Some(Box::new(
row.try_get(c.ordinal())
.expect("Failed to get serialized string"),
))),
#[cfg(feature = "with-bigdecimal")]
"DECIMAL" => Value::BigDecimal(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get decimal"),
))),
#[cfg(all(
feature = "with-rust_decimal",
not(feature = "with-bigdecimal")
))]
"DECIMAL" => Value::Decimal(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get decimal"),
))),
#[cfg(feature = "with-json")]
"JSON" => Value::Json(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get json"),
))),
_ => unreachable!("Unknown column type: {}", c.type_info().name()),
},
)
})
.collect(),
}
}

View File

@ -1,3 +1,4 @@
use futures::lock::Mutex;
use log::LevelFilter; use log::LevelFilter;
use sea_query::Values; use sea_query::Values;
use std::{future::Future, pin::Pin, sync::Arc}; use std::{future::Future, pin::Pin, sync::Arc};
@ -64,7 +65,7 @@ impl SqlxPostgresConnector {
.schema_search_path .schema_search_path
.as_ref() .as_ref()
.map(|schema| format!("SET search_path = '{schema}'")); .map(|schema| format!("SET search_path = '{schema}'"));
let mut pool_options = options.pool_options(); let mut pool_options = options.sqlx_pool_options();
if let Some(sql) = set_search_path_sql { if let Some(sql) = set_search_path_sql {
pool_options = pool_options.after_connect(move |conn, _| { pool_options = pool_options.after_connect(move |conn, _| {
let sql = sql.clone(); let sql = sql.clone();
@ -104,7 +105,7 @@ impl SqlxPostgresPoolConnection {
debug_print!("{}", stmt); debug_print!("{}", stmt);
let query = sqlx_query(&stmt); let query = sqlx_query(&stmt);
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
crate::metric::metric!(self.metric_callback, &stmt, { crate::metric::metric!(self.metric_callback, &stmt, {
match query.execute(&mut *conn).await { match query.execute(&mut *conn).await {
Ok(res) => Ok(res.into()), Ok(res) => Ok(res.into()),
@ -118,7 +119,7 @@ impl SqlxPostgresPoolConnection {
pub async fn execute_unprepared(&self, sql: &str) -> Result<ExecResult, DbErr> { pub async fn execute_unprepared(&self, sql: &str) -> Result<ExecResult, DbErr> {
debug_print!("{}", sql); debug_print!("{}", sql);
let conn = &mut self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = &mut self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
match conn.execute(sql).await { match conn.execute(sql).await {
Ok(res) => Ok(res.into()), Ok(res) => Ok(res.into()),
Err(err) => Err(sqlx_error_to_exec_err(err)), Err(err) => Err(sqlx_error_to_exec_err(err)),
@ -131,7 +132,7 @@ impl SqlxPostgresPoolConnection {
debug_print!("{}", stmt); debug_print!("{}", stmt);
let query = sqlx_query(&stmt); let query = sqlx_query(&stmt);
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
crate::metric::metric!(self.metric_callback, &stmt, { crate::metric::metric!(self.metric_callback, &stmt, {
match query.fetch_one(&mut *conn).await { match query.fetch_one(&mut *conn).await {
Ok(row) => Ok(Some(row.into())), Ok(row) => Ok(Some(row.into())),
@ -149,7 +150,7 @@ impl SqlxPostgresPoolConnection {
debug_print!("{}", stmt); debug_print!("{}", stmt);
let query = sqlx_query(&stmt); let query = sqlx_query(&stmt);
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
crate::metric::metric!(self.metric_callback, &stmt, { crate::metric::metric!(self.metric_callback, &stmt, {
match query.fetch_all(&mut *conn).await { match query.fetch_all(&mut *conn).await {
Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()), Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()),
@ -163,7 +164,7 @@ impl SqlxPostgresPoolConnection {
pub async fn stream(&self, stmt: Statement) -> Result<QueryStream, DbErr> { pub async fn stream(&self, stmt: Statement) -> Result<QueryStream, DbErr> {
debug_print!("{}", stmt); debug_print!("{}", stmt);
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
Ok(QueryStream::from(( Ok(QueryStream::from((
conn, conn,
stmt, stmt,
@ -178,7 +179,7 @@ impl SqlxPostgresPoolConnection {
isolation_level: Option<IsolationLevel>, isolation_level: Option<IsolationLevel>,
access_mode: Option<AccessMode>, access_mode: Option<AccessMode>,
) -> Result<DatabaseTransaction, DbErr> { ) -> Result<DatabaseTransaction, DbErr> {
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
DatabaseTransaction::new_postgres( DatabaseTransaction::new_postgres(
conn, conn,
self.metric_callback.clone(), self.metric_callback.clone(),
@ -204,7 +205,7 @@ impl SqlxPostgresPoolConnection {
T: Send, T: Send,
E: std::error::Error + Send, E: std::error::Error + Send,
{ {
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
let transaction = DatabaseTransaction::new_postgres( let transaction = DatabaseTransaction::new_postgres(
conn, conn,
self.metric_callback.clone(), self.metric_callback.clone(),
@ -225,7 +226,7 @@ impl SqlxPostgresPoolConnection {
/// Checks if a connection to the database is still valid. /// Checks if a connection to the database is still valid.
pub async fn ping(&self) -> Result<(), DbErr> { pub async fn ping(&self) -> Result<(), DbErr> {
let conn = &mut self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = &mut self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
match conn.ping().await { match conn.ping().await {
Ok(_) => Ok(()), Ok(_) => Ok(()),
Err(err) => Err(sqlx_error_to_conn_err(err)), Err(err) => Err(sqlx_error_to_conn_err(err)),
@ -288,3 +289,502 @@ pub(crate) async fn set_transaction_config(
} }
Ok(()) Ok(())
} }
impl
From<(
PoolConnection<sqlx::Postgres>,
Statement,
Option<crate::metric::Callback>,
)> for crate::QueryStream
{
fn from(
(conn, stmt, metric_callback): (
PoolConnection<sqlx::Postgres>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
crate::QueryStream::build(
stmt,
crate::InnerConnection::Postgres(conn),
metric_callback,
)
}
}
impl crate::DatabaseTransaction {
pub(crate) async fn new_postgres(
inner: PoolConnection<sqlx::Postgres>,
metric_callback: Option<crate::metric::Callback>,
isolation_level: Option<IsolationLevel>,
access_mode: Option<AccessMode>,
) -> Result<crate::DatabaseTransaction, DbErr> {
Self::begin(
Arc::new(Mutex::new(crate::InnerConnection::Postgres(inner))),
crate::DbBackend::Postgres,
metric_callback,
isolation_level,
access_mode,
)
.await
}
}
#[cfg(feature = "proxy")]
pub(crate) fn from_sqlx_postgres_row_to_proxy_row(row: &sqlx::postgres::PgRow) -> crate::ProxyRow {
// https://docs.rs/sqlx-postgres/0.7.2/src/sqlx_postgres/type_info.rs.html
// https://docs.rs/sqlx-postgres/0.7.2/sqlx_postgres/types/index.html
use sea_query::Value;
use sqlx::{Column, Row, TypeInfo};
crate::ProxyRow {
values: row
.columns()
.iter()
.map(|c| {
(
c.name().to_string(),
match c.type_info().name() {
"BOOL" => Value::Bool(Some(
row.try_get(c.ordinal()).expect("Failed to get boolean"),
)),
#[cfg(feature = "postgres-array")]
"BOOL[]" => Value::Array(
sea_query::ArrayType::Bool,
Some(Box::new(
row.try_get::<Vec<bool>, _>(c.ordinal())
.expect("Failed to get boolean array")
.iter()
.map(|val| Value::Bool(Some(*val)))
.collect(),
)),
),
"\"CHAR\"" => Value::TinyInt(Some(
row.try_get(c.ordinal())
.expect("Failed to get small integer"),
)),
#[cfg(feature = "postgres-array")]
"\"CHAR\"[]" => Value::Array(
sea_query::ArrayType::TinyInt,
Some(Box::new(
row.try_get::<Vec<i8>, _>(c.ordinal())
.expect("Failed to get small integer array")
.iter()
.map(|val| Value::TinyInt(Some(*val)))
.collect(),
)),
),
"SMALLINT" | "SMALLSERIAL" | "INT2" => Value::SmallInt(Some(
row.try_get(c.ordinal())
.expect("Failed to get small integer"),
)),
#[cfg(feature = "postgres-array")]
"SMALLINT[]" | "SMALLSERIAL[]" | "INT2[]" => Value::Array(
sea_query::ArrayType::SmallInt,
Some(Box::new(
row.try_get::<Vec<i16>, _>(c.ordinal())
.expect("Failed to get small integer array")
.iter()
.map(|val| Value::SmallInt(Some(*val)))
.collect(),
)),
),
"INT" | "SERIAL" | "INT4" => Value::Int(Some(
row.try_get(c.ordinal()).expect("Failed to get integer"),
)),
#[cfg(feature = "postgres-array")]
"INT[]" | "SERIAL[]" | "INT4[]" => Value::Array(
sea_query::ArrayType::Int,
Some(Box::new(
row.try_get::<Vec<i32>, _>(c.ordinal())
.expect("Failed to get integer array")
.iter()
.map(|val| Value::Int(Some(*val)))
.collect(),
)),
),
"BIGINT" | "BIGSERIAL" | "INT8" => Value::BigInt(Some(
row.try_get(c.ordinal()).expect("Failed to get big integer"),
)),
#[cfg(feature = "postgres-array")]
"BIGINT[]" | "BIGSERIAL[]" | "INT8[]" => Value::Array(
sea_query::ArrayType::BigInt,
Some(Box::new(
row.try_get::<Vec<i64>, _>(c.ordinal())
.expect("Failed to get big integer array")
.iter()
.map(|val| Value::BigInt(Some(*val)))
.collect(),
)),
),
"FLOAT4" | "REAL" => Value::Float(Some(
row.try_get(c.ordinal()).expect("Failed to get float"),
)),
#[cfg(feature = "postgres-array")]
"FLOAT4[]" | "REAL[]" => Value::Array(
sea_query::ArrayType::Float,
Some(Box::new(
row.try_get::<Vec<f32>, _>(c.ordinal())
.expect("Failed to get float array")
.iter()
.map(|val| Value::Float(Some(*val)))
.collect(),
)),
),
"FLOAT8" | "DOUBLE PRECISION" => Value::Double(Some(
row.try_get(c.ordinal()).expect("Failed to get double"),
)),
#[cfg(feature = "postgres-array")]
"FLOAT8[]" | "DOUBLE PRECISION[]" => Value::Array(
sea_query::ArrayType::Double,
Some(Box::new(
row.try_get::<Vec<f64>, _>(c.ordinal())
.expect("Failed to get double array")
.iter()
.map(|val| Value::Double(Some(*val)))
.collect(),
)),
),
"VARCHAR" | "CHAR" | "TEXT" | "NAME" => Value::String(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get string"),
))),
#[cfg(feature = "postgres-array")]
"VARCHAR[]" | "CHAR[]" | "TEXT[]" | "NAME[]" => Value::Array(
sea_query::ArrayType::String,
Some(Box::new(
row.try_get::<Vec<String>, _>(c.ordinal())
.expect("Failed to get string array")
.iter()
.map(|val| Value::String(Some(Box::new(val.clone()))))
.collect(),
)),
),
"BYTEA" => Value::Bytes(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get bytes"),
))),
#[cfg(feature = "postgres-array")]
"BYTEA[]" => Value::Array(
sea_query::ArrayType::Bytes,
Some(Box::new(
row.try_get::<Vec<Vec<u8>>, _>(c.ordinal())
.expect("Failed to get bytes array")
.iter()
.map(|val| Value::Bytes(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-bigdecimal")]
"NUMERIC" => Value::BigDecimal(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get numeric"),
))),
#[cfg(all(
feature = "with-rust_decimal",
not(feature = "with-bigdecimal")
))]
"NUMERIC" => Value::Decimal(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get numeric"),
))),
#[cfg(all(feature = "with-bigdecimal", feature = "postgres-array"))]
"NUMERIC[]" => Value::Array(
sea_query::ArrayType::BigDecimal,
Some(Box::new(
row.try_get::<Vec<bigdecimal::BigDecimal>, _>(c.ordinal())
.expect("Failed to get numeric array")
.iter()
.map(|val| Value::BigDecimal(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(all(
feature = "with-rust_decimal",
not(feature = "with-bigdecimal"),
feature = "postgres-array"
))]
"NUMERIC[]" => Value::Array(
sea_query::ArrayType::Decimal,
Some(Box::new(
row.try_get::<Vec<rust_decimal::Decimal>, _>(c.ordinal())
.expect("Failed to get numeric array")
.iter()
.map(|val| Value::Decimal(Some(Box::new(val.clone()))))
.collect(),
)),
),
"OID" => Value::BigInt(Some(
row.try_get(c.ordinal()).expect("Failed to get oid"),
)),
#[cfg(feature = "postgres-array")]
"OID[]" => Value::Array(
sea_query::ArrayType::BigInt,
Some(Box::new(
row.try_get::<Vec<i64>, _>(c.ordinal())
.expect("Failed to get oid array")
.iter()
.map(|val| Value::BigInt(Some(*val)))
.collect(),
)),
),
"JSON" | "JSONB" => Value::Json(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get json"),
))),
#[cfg(any(feature = "json-array", feature = "postgres-array"))]
"JSON[]" | "JSONB[]" => Value::Array(
sea_query::ArrayType::Json,
Some(Box::new(
row.try_get::<Vec<serde_json::Value>, _>(c.ordinal())
.expect("Failed to get json array")
.iter()
.map(|val| Value::Json(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-ipnetwork")]
"INET" | "CIDR" => Value::IpNetwork(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get ip address"),
))),
#[cfg(feature = "with-ipnetwork")]
"INET[]" | "CIDR[]" => Value::Array(
sea_query::ArrayType::IpNetwork,
Some(Box::new(
row.try_get::<Vec<ipnetwork::IpNetwork>, _>(c.ordinal())
.expect("Failed to get ip address array")
.iter()
.map(|val| Value::IpNetwork(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-mac_address")]
"MACADDR" | "MACADDR8" => Value::MacAddress(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get mac address"),
))),
#[cfg(all(feature = "with-mac_address", feature = "postgres-array"))]
"MACADDR[]" | "MACADDR8[]" => Value::Array(
sea_query::ArrayType::MacAddress,
Some(Box::new(
row.try_get::<Vec<mac_address::MacAddress>, _>(c.ordinal())
.expect("Failed to get mac address array")
.iter()
.map(|val| Value::MacAddress(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-chrono")]
"TIMESTAMP" => Value::ChronoDateTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIMESTAMP" => Value::TimeDateTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
))),
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
"TIMESTAMP[]" => Value::Array(
sea_query::ArrayType::ChronoDateTime,
Some(Box::new(
row.try_get::<Vec<chrono::NaiveDateTime>, _>(c.ordinal())
.expect("Failed to get timestamp array")
.iter()
.map(|val| Value::ChronoDateTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(all(
feature = "with-time",
not(feature = "with-chrono"),
feature = "postgres-array"
))]
"TIMESTAMP[]" => Value::Array(
sea_query::ArrayType::TimeDateTime,
Some(Box::new(
row.try_get::<Vec<time::OffsetDateTime>, _>(c.ordinal())
.expect("Failed to get timestamp array")
.iter()
.map(|val| Value::TimeDateTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-chrono")]
"DATE" => Value::ChronoDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get date"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"DATE" => Value::TimeDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get date"),
))),
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
"DATE[]" => Value::Array(
sea_query::ArrayType::ChronoDate,
Some(Box::new(
row.try_get::<Vec<chrono::NaiveDate>, _>(c.ordinal())
.expect("Failed to get date array")
.iter()
.map(|val| Value::ChronoDate(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(all(
feature = "with-time",
not(feature = "with-chrono"),
feature = "postgres-array"
))]
"DATE[]" => Value::Array(
sea_query::ArrayType::TimeDate,
Some(Box::new(
row.try_get::<Vec<time::Date>, _>(c.ordinal())
.expect("Failed to get date array")
.iter()
.map(|val| Value::TimeDate(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-chrono")]
"TIME" => Value::ChronoTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get time"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIME" => Value::TimeTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get time"),
))),
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
"TIME[]" => Value::Array(
sea_query::ArrayType::ChronoTime,
Some(Box::new(
row.try_get::<Vec<chrono::NaiveTime>, _>(c.ordinal())
.expect("Failed to get time array")
.iter()
.map(|val| Value::ChronoTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(all(
feature = "with-time",
not(feature = "with-chrono"),
feature = "postgres-array"
))]
"TIME[]" => Value::Array(
sea_query::ArrayType::TimeTime,
Some(Box::new(
row.try_get::<Vec<time::Time>, _>(c.ordinal())
.expect("Failed to get time array")
.iter()
.map(|val| Value::TimeTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-chrono")]
"TIMESTAMPTZ" => Value::ChronoDateTimeUtc(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamptz"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIMESTAMPTZ" => Value::TimeDateTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamptz"),
))),
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
"TIMESTAMPTZ[]" => Value::Array(
sea_query::ArrayType::ChronoDateTimeUtc,
Some(Box::new(
row.try_get::<Vec<chrono::DateTime<chrono::Utc>>, _>(c.ordinal())
.expect("Failed to get timestamptz array")
.iter()
.map(|val| {
Value::ChronoDateTimeUtc(Some(Box::new(val.clone())))
})
.collect(),
)),
),
#[cfg(all(
feature = "with-time",
not(feature = "with-chrono"),
feature = "postgres-array"
))]
"TIMESTAMPTZ[]" => Value::Array(
sea_query::ArrayType::TimeDateTime,
Some(Box::new(
row.try_get::<Vec<time::OffsetDateTime>, _>(c.ordinal())
.expect("Failed to get timestamptz array")
.iter()
.map(|val| Value::TimeDateTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-chrono")]
"TIMETZ" => Value::ChronoTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timetz"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIMETZ" => Value::TimeTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timetz"),
))),
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
"TIMETZ[]" => Value::Array(
sea_query::ArrayType::ChronoTime,
Some(Box::new(
row.try_get::<Vec<chrono::NaiveTime>, _>(c.ordinal())
.expect("Failed to get timetz array")
.iter()
.map(|val| Value::ChronoTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(all(
feature = "with-time",
not(feature = "with-chrono"),
feature = "postgres-array"
))]
"TIMETZ[]" => Value::Array(
sea_query::ArrayType::TimeTime,
Some(Box::new(
row.try_get::<Vec<time::Time>, _>(c.ordinal())
.expect("Failed to get timetz array")
.iter()
.map(|val| Value::TimeTime(Some(Box::new(val.clone()))))
.collect(),
)),
),
#[cfg(feature = "with-uuid")]
"UUID" => Value::Uuid(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get uuid"),
))),
#[cfg(all(feature = "with-uuid", feature = "postgres-array"))]
"UUID[]" => Value::Array(
sea_query::ArrayType::Uuid,
Some(Box::new(
row.try_get::<Vec<uuid::Uuid>, _>(c.ordinal())
.expect("Failed to get uuid array")
.iter()
.map(|val| Value::Uuid(Some(Box::new(val.clone()))))
.collect(),
)),
),
_ => unreachable!("Unknown column type: {}", c.type_info().name()),
},
)
})
.collect(),
}
}

View File

@ -1,3 +1,4 @@
use futures::lock::Mutex;
use log::LevelFilter; use log::LevelFilter;
use sea_query::Values; use sea_query::Values;
use std::{future::Future, pin::Pin, sync::Arc}; use std::{future::Future, pin::Pin, sync::Arc};
@ -68,7 +69,7 @@ impl SqlxSqliteConnector {
if options.get_max_connections().is_none() { if options.get_max_connections().is_none() {
options.max_connections(1); options.max_connections(1);
} }
match options.pool_options().connect_with(opt).await { match options.sqlx_pool_options().connect_with(opt).await {
Ok(pool) => { Ok(pool) => {
let pool = SqlxSqlitePoolConnection { let pool = SqlxSqlitePoolConnection {
pool, pool,
@ -105,7 +106,7 @@ impl SqlxSqlitePoolConnection {
debug_print!("{}", stmt); debug_print!("{}", stmt);
let query = sqlx_query(&stmt); let query = sqlx_query(&stmt);
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
crate::metric::metric!(self.metric_callback, &stmt, { crate::metric::metric!(self.metric_callback, &stmt, {
match query.execute(&mut *conn).await { match query.execute(&mut *conn).await {
Ok(res) => Ok(res.into()), Ok(res) => Ok(res.into()),
@ -119,7 +120,7 @@ impl SqlxSqlitePoolConnection {
pub async fn execute_unprepared(&self, sql: &str) -> Result<ExecResult, DbErr> { pub async fn execute_unprepared(&self, sql: &str) -> Result<ExecResult, DbErr> {
debug_print!("{}", sql); debug_print!("{}", sql);
let conn = &mut self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = &mut self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
match conn.execute(sql).await { match conn.execute(sql).await {
Ok(res) => Ok(res.into()), Ok(res) => Ok(res.into()),
Err(err) => Err(sqlx_error_to_exec_err(err)), Err(err) => Err(sqlx_error_to_exec_err(err)),
@ -132,7 +133,7 @@ impl SqlxSqlitePoolConnection {
debug_print!("{}", stmt); debug_print!("{}", stmt);
let query = sqlx_query(&stmt); let query = sqlx_query(&stmt);
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
crate::metric::metric!(self.metric_callback, &stmt, { crate::metric::metric!(self.metric_callback, &stmt, {
match query.fetch_one(&mut *conn).await { match query.fetch_one(&mut *conn).await {
Ok(row) => Ok(Some(row.into())), Ok(row) => Ok(Some(row.into())),
@ -150,7 +151,7 @@ impl SqlxSqlitePoolConnection {
debug_print!("{}", stmt); debug_print!("{}", stmt);
let query = sqlx_query(&stmt); let query = sqlx_query(&stmt);
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
crate::metric::metric!(self.metric_callback, &stmt, { crate::metric::metric!(self.metric_callback, &stmt, {
match query.fetch_all(&mut *conn).await { match query.fetch_all(&mut *conn).await {
Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()), Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()),
@ -164,7 +165,7 @@ impl SqlxSqlitePoolConnection {
pub async fn stream(&self, stmt: Statement) -> Result<QueryStream, DbErr> { pub async fn stream(&self, stmt: Statement) -> Result<QueryStream, DbErr> {
debug_print!("{}", stmt); debug_print!("{}", stmt);
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
Ok(QueryStream::from(( Ok(QueryStream::from((
conn, conn,
stmt, stmt,
@ -179,7 +180,7 @@ impl SqlxSqlitePoolConnection {
isolation_level: Option<IsolationLevel>, isolation_level: Option<IsolationLevel>,
access_mode: Option<AccessMode>, access_mode: Option<AccessMode>,
) -> Result<DatabaseTransaction, DbErr> { ) -> Result<DatabaseTransaction, DbErr> {
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
DatabaseTransaction::new_sqlite( DatabaseTransaction::new_sqlite(
conn, conn,
self.metric_callback.clone(), self.metric_callback.clone(),
@ -205,7 +206,7 @@ impl SqlxSqlitePoolConnection {
T: Send, T: Send,
E: std::error::Error + Send, E: std::error::Error + Send,
{ {
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
let transaction = DatabaseTransaction::new_sqlite( let transaction = DatabaseTransaction::new_sqlite(
conn, conn,
self.metric_callback.clone(), self.metric_callback.clone(),
@ -226,7 +227,7 @@ impl SqlxSqlitePoolConnection {
/// Checks if a connection to the database is still valid. /// Checks if a connection to the database is still valid.
pub async fn ping(&self) -> Result<(), DbErr> { pub async fn ping(&self) -> Result<(), DbErr> {
let conn = &mut self.pool.acquire().await.map_err(conn_acquire_err)?; let conn = &mut self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
match conn.ping().await { match conn.ping().await {
Ok(_) => Ok(()), Ok(_) => Ok(()),
Err(err) => Err(sqlx_error_to_conn_err(err)), Err(err) => Err(sqlx_error_to_conn_err(err)),
@ -325,6 +326,115 @@ fn ensure_returning_version(version: &str) -> Result<(), DbErr> {
} }
} }
impl
From<(
PoolConnection<sqlx::Sqlite>,
Statement,
Option<crate::metric::Callback>,
)> for crate::QueryStream
{
fn from(
(conn, stmt, metric_callback): (
PoolConnection<sqlx::Sqlite>,
Statement,
Option<crate::metric::Callback>,
),
) -> Self {
crate::QueryStream::build(stmt, crate::InnerConnection::Sqlite(conn), metric_callback)
}
}
impl crate::DatabaseTransaction {
pub(crate) async fn new_sqlite(
inner: PoolConnection<sqlx::Sqlite>,
metric_callback: Option<crate::metric::Callback>,
isolation_level: Option<IsolationLevel>,
access_mode: Option<AccessMode>,
) -> Result<crate::DatabaseTransaction, DbErr> {
Self::begin(
Arc::new(Mutex::new(crate::InnerConnection::Sqlite(inner))),
crate::DbBackend::Sqlite,
metric_callback,
isolation_level,
access_mode,
)
.await
}
}
#[cfg(feature = "proxy")]
pub(crate) fn from_sqlx_sqlite_row_to_proxy_row(row: &sqlx::sqlite::SqliteRow) -> crate::ProxyRow {
// https://docs.rs/sqlx-sqlite/0.7.2/src/sqlx_sqlite/type_info.rs.html
// https://docs.rs/sqlx-sqlite/0.7.2/sqlx_sqlite/types/index.html
use sea_query::Value;
use sqlx::{Column, Row, TypeInfo};
crate::ProxyRow {
values: row
.columns()
.iter()
.map(|c| {
(
c.name().to_string(),
match c.type_info().name() {
"BOOLEAN" => Value::Bool(Some(
row.try_get(c.ordinal()).expect("Failed to get boolean"),
)),
"INTEGER" => Value::Int(Some(
row.try_get(c.ordinal()).expect("Failed to get integer"),
)),
"BIGINT" | "INT8" => Value::BigInt(Some(
row.try_get(c.ordinal()).expect("Failed to get big integer"),
)),
"REAL" => Value::Double(Some(
row.try_get(c.ordinal()).expect("Failed to get double"),
)),
"TEXT" => Value::String(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get string"),
))),
"BLOB" => Value::Bytes(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get bytes"),
))),
#[cfg(feature = "with-chrono")]
"DATETIME" => Value::ChronoDateTimeUtc(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"DATETIME" => Value::TimeDateTimeWithTimeZone(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
))),
#[cfg(feature = "with-chrono")]
"DATE" => Value::ChronoDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get date"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"DATE" => Value::TimeDate(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get date"),
))),
#[cfg(feature = "with-chrono")]
"TIME" => Value::ChronoTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get time"),
))),
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
"TIME" => Value::TimeTime(Some(Box::new(
row.try_get(c.ordinal()).expect("Failed to get time"),
))),
_ => unreachable!("Unknown column type: {}", c.type_info().name()),
},
)
})
.collect(),
}
}
#[cfg(all(test, feature = "sqlite-use-returning-for-3_35"))] #[cfg(all(test, feature = "sqlite-use-returning-for-3_35"))]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -1,4 +1,4 @@
use crate::{EntityName, Iden, IdenStatic, IntoSimpleExpr, Iterable}; use crate::{DbBackend, EntityName, Iden, IdenStatic, IntoSimpleExpr, Iterable};
use sea_query::{ use sea_query::{
Alias, BinOper, DynIden, Expr, IntoIden, SeaRc, SelectStatement, SimpleExpr, Value, Alias, BinOper, DynIden, Expr, IntoIden, SeaRc, SelectStatement, SimpleExpr, Value,
}; };
@ -247,6 +247,14 @@ pub trait ColumnTrait: IdenStatic + Iterable + FromStr {
Expr::expr(self.into_simple_expr()) Expr::expr(self.into_simple_expr())
} }
/// Construct a returning [`Expr`].
#[allow(clippy::match_single_binding)]
fn into_returning_expr(self, db_backend: DbBackend) -> Expr {
match db_backend {
_ => Expr::col(self),
}
}
/// Cast column expression used in select statement. /// Cast column expression used in select statement.
/// It only cast database enum as text if it's an enum column. /// It only cast database enum as text if it's an enum column.
fn select_as(&self, expr: Expr) -> SimpleExpr { fn select_as(&self, expr: Expr) -> SimpleExpr {

View File

@ -151,16 +151,6 @@ where
DbErr::Json(s.to_string()) DbErr::Json(s.to_string())
} }
#[allow(dead_code)]
#[cfg(feature = "sqlx-dep")]
pub(crate) fn conn_acquire_err(sqlx_err: sqlx::Error) -> DbErr {
match sqlx_err {
sqlx::Error::PoolTimedOut => DbErr::ConnectionAcquire(ConnAcquireErr::Timeout),
sqlx::Error::PoolClosed => DbErr::ConnectionAcquire(ConnAcquireErr::ConnectionClosed),
_ => DbErr::Conn(RuntimeErr::SqlxError(sqlx_err)),
}
}
/// An error from unsuccessful SQL query /// An error from unsuccessful SQL query
#[derive(Error, Debug, Clone, PartialEq, Eq)] #[derive(Error, Debug, Clone, PartialEq, Eq)]
#[non_exhaustive] #[non_exhaustive]

View File

@ -3,7 +3,7 @@ use crate::{
Iterable, PrimaryKeyToColumn, PrimaryKeyTrait, SelectModel, SelectorRaw, Statement, TryFromU64, Iterable, PrimaryKeyToColumn, PrimaryKeyTrait, SelectModel, SelectorRaw, Statement, TryFromU64,
TryInsert, TryInsert,
}; };
use sea_query::{Expr, FromValueTuple, Iden, InsertStatement, IntoColumnRef, Query, ValueTuple}; use sea_query::{FromValueTuple, Iden, InsertStatement, Query, ValueTuple};
use std::{future::Future, marker::PhantomData}; use std::{future::Future, marker::PhantomData};
/// Defines a structure to perform INSERT operations in an ActiveModel /// Defines a structure to perform INSERT operations in an ActiveModel
@ -118,10 +118,12 @@ where
// so that self is dropped before entering await // so that self is dropped before entering await
let mut query = self.query; let mut query = self.query;
if db.support_returning() && <A::Entity as EntityTrait>::PrimaryKey::iter().count() > 0 { if db.support_returning() && <A::Entity as EntityTrait>::PrimaryKey::iter().count() > 0 {
let returning = Query::returning().exprs( let db_backend = db.get_database_backend();
<A::Entity as EntityTrait>::PrimaryKey::iter() let returning =
.map(|c| c.into_column().select_as(Expr::col(c.into_column_ref()))), Query::returning().exprs(<A::Entity as EntityTrait>::PrimaryKey::iter().map(|c| {
); c.into_column()
.select_as(c.into_column().into_returning_expr(db_backend))
}));
query.returning(returning); query.returning(returning);
} }
Inserter::<A>::new(self.primary_key, query).exec(db) Inserter::<A>::new(self.primary_key, query).exec(db)
@ -275,7 +277,8 @@ where
let found = match db.support_returning() { let found = match db.support_returning() {
true => { true => {
let returning = Query::returning().exprs( let returning = Query::returning().exprs(
<A::Entity as EntityTrait>::Column::iter().map(|c| c.select_as(Expr::col(c))), <A::Entity as EntityTrait>::Column::iter()
.map(|c| c.select_as(c.into_returning_expr(db_backend))),
); );
insert_statement.returning(returning); insert_statement.returning(returning);
SelectorRaw::<SelectModel<<A::Entity as EntityTrait>::Model>>::from_statement( SelectorRaw::<SelectModel<<A::Entity as EntityTrait>::Model>>::from_statement(

View File

@ -2,7 +2,7 @@ use crate::{
error::*, ActiveModelTrait, ColumnTrait, ConnectionTrait, EntityTrait, IntoActiveModel, error::*, ActiveModelTrait, ColumnTrait, ConnectionTrait, EntityTrait, IntoActiveModel,
Iterable, PrimaryKeyTrait, SelectModel, SelectorRaw, UpdateMany, UpdateOne, Iterable, PrimaryKeyTrait, SelectModel, SelectorRaw, UpdateMany, UpdateOne,
}; };
use sea_query::{Expr, FromValueTuple, Query, UpdateStatement}; use sea_query::{FromValueTuple, Query, UpdateStatement};
/// Defines an update operation /// Defines an update operation
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -114,10 +114,11 @@ impl Updater {
match db.support_returning() { match db.support_returning() {
true => { true => {
let returning = Query::returning()
.exprs(Column::<A>::iter().map(|c| c.select_as(Expr::col(c))));
self.query.returning(returning);
let db_backend = db.get_database_backend(); let db_backend = db.get_database_backend();
let returning = Query::returning().exprs(
Column::<A>::iter().map(|c| c.select_as(c.into_returning_expr(db_backend))),
);
self.query.returning(returning);
let found: Option<Model<A>> = SelectorRaw::<SelectModel<Model<A>>>::from_statement( let found: Option<Model<A>> = SelectorRaw::<SelectModel<Model<A>>>::from_statement(
db_backend.build(&self.query), db_backend.build(&self.query),
) )
@ -148,10 +149,11 @@ impl Updater {
match db.support_returning() { match db.support_returning() {
true => { true => {
let returning =
Query::returning().exprs(E::Column::iter().map(|c| c.select_as(Expr::col(c))));
self.query.returning(returning);
let db_backend = db.get_database_backend(); let db_backend = db.get_database_backend();
let returning = Query::returning().exprs(
E::Column::iter().map(|c| c.select_as(c.into_returning_expr(db_backend))),
);
self.query.returning(returning);
let models: Vec<E::Model> = SelectorRaw::<SelectModel<E::Model>>::from_statement( let models: Vec<E::Model> = SelectorRaw::<SelectModel<E::Model>>::from_statement(
db_backend.build(&self.query), db_backend.build(&self.query),
) )

View File

@ -13,11 +13,6 @@ use sea_orm::{
}; };
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("active_enum_tests").await; let ctx = TestContext::new("active_enum_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;

View File

@ -2,7 +2,7 @@ pub mod common;
use common::features::*; use common::features::*;
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection}; use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection, NotSet};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(feature = "sqlx-postgres")] #[cfg(feature = "sqlx-postgres")]
@ -26,7 +26,12 @@ pub async fn create_and_update(db: &DatabaseConnection) -> Result<(), DbErr> {
bit64: 64, bit64: 64,
}; };
let res = bits.clone().into_active_model().insert(db).await?; let res = bits::ActiveModel {
id: NotSet,
..bits.clone().into_active_model()
}
.insert(db)
.await?;
let model = Bits::find().one(db).await?; let model = Bits::find().one(db).await?;
assert_eq!(model, Some(res)); assert_eq!(model, Some(res));

View File

@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection}; use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("byte_primary_key_tests").await; let ctx = TestContext::new("byte_primary_key_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;

View File

@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
use sea_orm::prelude::*; use sea_orm::prelude::*;
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn connection_ping() { pub async fn connection_ping() {
let ctx = TestContext::new("connection_ping").await; let ctx = TestContext::new("connection_ping").await;

View File

@ -10,11 +10,6 @@ use sea_orm::DatabaseConnection;
// DATABASE_URL="mysql://root:root@localhost" cargo test --features sqlx-mysql,runtime-async-std-native-tls --test crud_tests // DATABASE_URL="mysql://root:root@localhost" cargo test --features sqlx-mysql,runtime-async-std-native-tls --test crud_tests
// DATABASE_URL="postgres://root:root@localhost" cargo test --features sqlx-postgres,runtime-async-std-native-tls --test crud_tests // DATABASE_URL="postgres://root:root@localhost" cargo test --features sqlx-postgres,runtime-async-std-native-tls --test crud_tests
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() { async fn main() {
let ctx = TestContext::new("bakery_chain_schema_crud_tests").await; let ctx = TestContext::new("bakery_chain_schema_crud_tests").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();

View File

@ -6,11 +6,6 @@ use sea_orm::{entity::prelude::*, DerivePartialModel, FromQueryResult, QuerySele
use serde_json::json; use serde_json::json;
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn cursor_tests() -> Result<(), DbErr> { async fn cursor_tests() -> Result<(), DbErr> {
let ctx = TestContext::new("cursor_tests").await; let ctx = TestContext::new("cursor_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;

View File

@ -1,13 +1,8 @@
pub mod common; pub mod common;
pub use common::{features::*, setup::*, TestContext}; pub use common::{features::*, setup::*, TestContext};
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel}; use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel, NotSet};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("delete_by_id_tests").await; let ctx = TestContext::new("delete_by_id_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -26,7 +21,10 @@ pub async fn create_and_delete_applog(db: &DatabaseConnection) -> Result<(), DbE
created_at: "2021-09-17T17:50:20+08:00".parse().unwrap(), created_at: "2021-09-17T17:50:20+08:00".parse().unwrap(),
}; };
Applog::insert(log1.clone().into_active_model()) Applog::insert(applog::ActiveModel {
id: NotSet,
..log1.clone().into_active_model()
})
.exec(db) .exec(db)
.await?; .await?;
@ -37,7 +35,10 @@ pub async fn create_and_delete_applog(db: &DatabaseConnection) -> Result<(), DbE
created_at: "2022-09-17T17:50:20+08:00".parse().unwrap(), created_at: "2022-09-17T17:50:20+08:00".parse().unwrap(),
}; };
Applog::insert(log2.clone().into_active_model()) Applog::insert(applog::ActiveModel {
id: NotSet,
..log2.clone().into_active_model()
})
.exec(db) .exec(db)
.await?; .await?;

View File

@ -3,17 +3,12 @@ pub mod common;
pub use common::{features::*, setup::*, TestContext}; pub use common::{features::*, setup::*, TestContext};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use sea_orm::{ use sea_orm::{
entity::prelude::*, DatabaseConnection, Delete, IntoActiveModel, Iterable, QueryTrait, Set, entity::prelude::*, DatabaseConnection, Delete, IntoActiveModel, Iterable, NotSet, QueryTrait,
Update, Set, Update,
}; };
use sea_query::{Expr, Query}; use sea_query::{Expr, Query};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("dyn_table_name_tests").await; let ctx = TestContext::new("dyn_table_name_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -36,7 +31,10 @@ pub async fn dyn_table_name_lazy_static(db: &DatabaseConnection) -> Result<(), D
name: "1st Row".into(), name: "1st Row".into(),
}; };
// Prepare insert statement // Prepare insert statement
let mut insert = Entity::insert(model.clone().into_active_model()); let mut insert = Entity::insert(ActiveModel {
id: NotSet,
..model.clone().into_active_model()
});
// Reset the table name of insert statement // Reset the table name of insert statement
insert.query().into_table(entity.table_ref()); insert.query().into_table(entity.table_ref());
// Execute the insert statement // Execute the insert statement

View File

@ -11,11 +11,6 @@ pub use crud::*;
use sea_orm::{DbConn, TryInsertResult}; use sea_orm::{DbConn, TryInsertResult};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() { async fn main() {
let ctx = TestContext::new("bakery_chain_empty_insert_tests").await; let ctx = TestContext::new("bakery_chain_empty_insert_tests").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();

View File

@ -10,11 +10,6 @@ use sea_orm::{
}; };
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("enum_primary_key_tests").await; let ctx = TestContext::new("enum_primary_key_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;

View File

@ -9,7 +9,7 @@ pub use common::{
TestContext, TestContext,
}; };
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection}; use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection, NotSet};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(all(feature = "sqlx-postgres", feature = "postgres-array"))] #[cfg(all(feature = "sqlx-postgres", feature = "postgres-array"))]
@ -33,7 +33,12 @@ pub async fn insert_event_trigger(db: &DatabaseConnection) -> Result<(), DbErr>
), ),
}; };
let result = event_trigger.clone().into_active_model().insert(db).await?; let result = event_trigger::ActiveModel {
id: NotSet,
..event_trigger.clone().into_active_model()
}
.insert(db)
.await?;
assert_eq!(result, event_trigger); assert_eq!(result, event_trigger);

View File

@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, ConnectionTrait, DatabaseConnection}; use sea_orm::{entity::prelude::*, ConnectionTrait, DatabaseConnection};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("execute_unprepared_tests").await; let ctx = TestContext::new("execute_unprepared_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -24,7 +19,7 @@ pub async fn execute_unprepared(db: &DatabaseConnection) -> Result<(), DbErr> {
db.execute_unprepared( db.execute_unprepared(
[ [
"INSERT INTO insert_default VALUES (1), (2), (3), (4), (5)", "INSERT INTO insert_default (id) VALUES (1), (2), (3), (4), (5)",
"DELETE FROM insert_default WHERE id % 2 = 0", "DELETE FROM insert_default WHERE id % 2 = 0",
] ]
.join(";") .join(";")

View File

@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
use sea_orm::entity::prelude::*; use sea_orm::entity::prelude::*;
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("insert_default_tests").await; let ctx = TestContext::new("insert_default_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;

View File

@ -2,15 +2,10 @@ pub mod common;
pub use common::{features::*, setup::*, TestContext}; pub use common::{features::*, setup::*, TestContext};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection}; use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection, NotSet};
use serde_json::json; use serde_json::json;
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("json_struct_tests").await; let ctx = TestContext::new("json_struct_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -46,7 +41,12 @@ pub async fn insert_json_struct_1(db: &DatabaseConnection) -> Result<(), DbErr>
}), }),
}; };
let result = model.clone().into_active_model().insert(db).await?; let result = ActiveModel {
id: NotSet,
..model.clone().into_active_model()
}
.insert(db)
.await?;
assert_eq!(result, model); assert_eq!(result, model);
@ -81,7 +81,12 @@ pub async fn insert_json_struct_2(db: &DatabaseConnection) -> Result<(), DbErr>
json_value_opt: None, json_value_opt: None,
}; };
let result = model.clone().into_active_model().insert(db).await?; let result = ActiveModel {
id: NotSet,
..model.clone().into_active_model()
}
.insert(db)
.await?;
assert_eq!(result, model); assert_eq!(result, model);

View File

@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection}; use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("json_vec_tests").await; let ctx = TestContext::new("json_vec_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -32,7 +27,12 @@ pub async fn insert_json_vec(db: &DatabaseConnection) -> Result<(), DbErr> {
])), ])),
}; };
let result = json_vec.clone().into_active_model().insert(db).await?; let result = json_vec::ActiveModel {
id: NotSet,
..json_vec.clone().into_active_model()
}
.insert(db)
.await?;
assert_eq!(result, json_vec); assert_eq!(result, json_vec);
@ -48,7 +48,7 @@ pub async fn insert_json_vec(db: &DatabaseConnection) -> Result<(), DbErr> {
pub async fn insert_json_string_vec_derive(db: &DatabaseConnection) -> Result<(), DbErr> { pub async fn insert_json_string_vec_derive(db: &DatabaseConnection) -> Result<(), DbErr> {
let json_vec = json_vec_derive::json_string_vec::Model { let json_vec = json_vec_derive::json_string_vec::Model {
id: 2, id: 1,
str_vec: Some(json_vec_derive::json_string_vec::StringVec(vec![ str_vec: Some(json_vec_derive::json_string_vec::StringVec(vec![
"4".to_string(), "4".to_string(),
"5".to_string(), "5".to_string(),
@ -56,7 +56,12 @@ pub async fn insert_json_string_vec_derive(db: &DatabaseConnection) -> Result<()
])), ])),
}; };
let result = json_vec.clone().into_active_model().insert(db).await?; let result = json_vec_derive::json_string_vec::ActiveModel {
id: NotSet,
..json_vec.clone().into_active_model()
}
.insert(db)
.await?;
assert_eq!(result, json_vec); assert_eq!(result, json_vec);
@ -72,7 +77,7 @@ pub async fn insert_json_string_vec_derive(db: &DatabaseConnection) -> Result<()
pub async fn insert_json_struct_vec_derive(db: &DatabaseConnection) -> Result<(), DbErr> { pub async fn insert_json_struct_vec_derive(db: &DatabaseConnection) -> Result<(), DbErr> {
let json_vec = json_vec_derive::json_struct_vec::Model { let json_vec = json_vec_derive::json_struct_vec::Model {
id: 2, id: 1,
struct_vec: vec![ struct_vec: vec![
json_vec_derive::json_struct_vec::JsonColumn { json_vec_derive::json_struct_vec::JsonColumn {
value: "4".to_string(), value: "4".to_string(),
@ -86,7 +91,12 @@ pub async fn insert_json_struct_vec_derive(db: &DatabaseConnection) -> Result<()
], ],
}; };
let result = json_vec.clone().into_active_model().insert(db).await?; let result = json_vec_derive::json_struct_vec::ActiveModel {
id: NotSet,
..json_vec.clone().into_active_model()
}
.insert(db)
.await?;
assert_eq!(result, json_vec); assert_eq!(result, json_vec);
let model = json_vec_derive::json_struct_vec::Entity::find() let model = json_vec_derive::json_struct_vec::Entity::find()

View File

@ -4,11 +4,6 @@ pub use common::{bakery_chain::*, setup::*, TestContext};
use sea_orm::{entity::*, query::*, DbConn, DbErr, RuntimeErr}; use sea_orm::{entity::*, query::*, DbConn, DbErr, RuntimeErr};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn loader_load_one() -> Result<(), DbErr> { async fn loader_load_one() -> Result<(), DbErr> {
let ctx = TestContext::new("loader_test_load_one").await; let ctx = TestContext::new("loader_test_load_one").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -47,11 +42,6 @@ async fn loader_load_one() -> Result<(), DbErr> {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn loader_load_many() -> Result<(), DbErr> { async fn loader_load_many() -> Result<(), DbErr> {
let ctx = TestContext::new("loader_test_load_many").await; let ctx = TestContext::new("loader_test_load_many").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -121,11 +111,6 @@ async fn loader_load_many() -> Result<(), DbErr> {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn loader_load_many_multi() -> Result<(), DbErr> { async fn loader_load_many_multi() -> Result<(), DbErr> {
let ctx = TestContext::new("loader_test_load_many_multi").await; let ctx = TestContext::new("loader_test_load_many_multi").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -154,11 +139,6 @@ async fn loader_load_many_multi() -> Result<(), DbErr> {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn loader_load_many_to_many() -> Result<(), DbErr> { async fn loader_load_many_to_many() -> Result<(), DbErr> {
let ctx = TestContext::new("loader_test_load_many_to_many").await; let ctx = TestContext::new("loader_test_load_many_to_many").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;

View File

@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel, Set}; use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel, Set};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("features_parallel_tests").await; let ctx = TestContext::new("features_parallel_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;

View File

@ -2,15 +2,10 @@ pub mod common;
use common::{features::*, setup::*, TestContext}; use common::{features::*, setup::*, TestContext};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection}; use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection, NotSet};
use std::str::FromStr; use std::str::FromStr;
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("pi_tests").await; let ctx = TestContext::new("pi_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -29,7 +24,12 @@ pub async fn create_and_update_pi(db: &DatabaseConnection) -> Result<(), DbErr>
big_decimal_opt: None, big_decimal_opt: None,
}; };
let res = pi.clone().into_active_model().insert(db).await?; let res = pi::ActiveModel {
id: NotSet,
..pi.clone().into_active_model()
}
.insert(db)
.await?;
let model = Pi::find().one(db).await?; let model = Pi::find().one(db).await?;
assert_eq!(model, Some(res)); assert_eq!(model, Some(res));

View File

@ -7,11 +7,6 @@ pub use sea_orm::{ConnectionTrait, QueryFilter, QuerySelect};
// Run the test locally: // Run the test locally:
// DATABASE_URL="mysql://root:@localhost" cargo test --features sqlx-mysql,runtime-async-std --test query_tests // DATABASE_URL="mysql://root:@localhost" cargo test --features sqlx-mysql,runtime-async-std --test query_tests
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn find_one_with_no_result() { pub async fn find_one_with_no_result() {
let ctx = TestContext::new("find_one_with_no_result").await; let ctx = TestContext::new("find_one_with_no_result").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -23,11 +18,6 @@ pub async fn find_one_with_no_result() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn find_one_with_result() { pub async fn find_one_with_result() {
let ctx = TestContext::new("find_one_with_result").await; let ctx = TestContext::new("find_one_with_result").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -49,11 +39,6 @@ pub async fn find_one_with_result() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn find_by_id_with_no_result() { pub async fn find_by_id_with_no_result() {
let ctx = TestContext::new("find_by_id_with_no_result").await; let ctx = TestContext::new("find_by_id_with_no_result").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -65,11 +50,6 @@ pub async fn find_by_id_with_no_result() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn find_by_id_with_result() { pub async fn find_by_id_with_result() {
let ctx = TestContext::new("find_by_id_with_result").await; let ctx = TestContext::new("find_by_id_with_result").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -95,11 +75,6 @@ pub async fn find_by_id_with_result() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn find_all_with_no_result() { pub async fn find_all_with_no_result() {
let ctx = TestContext::new("find_all_with_no_result").await; let ctx = TestContext::new("find_all_with_no_result").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -111,11 +86,6 @@ pub async fn find_all_with_no_result() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn find_all_with_result() { pub async fn find_all_with_result() {
let ctx = TestContext::new("find_all_with_result").await; let ctx = TestContext::new("find_all_with_result").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -146,11 +116,6 @@ pub async fn find_all_with_result() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn find_all_filter_no_result() { pub async fn find_all_filter_no_result() {
let ctx = TestContext::new("find_all_filter_no_result").await; let ctx = TestContext::new("find_all_filter_no_result").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -185,11 +150,6 @@ pub async fn find_all_filter_no_result() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn find_all_filter_with_results() { pub async fn find_all_filter_with_results() {
let ctx = TestContext::new("find_all_filter_with_results").await; let ctx = TestContext::new("find_all_filter_with_results").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -224,11 +184,6 @@ pub async fn find_all_filter_with_results() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn select_only_exclude_option_fields() { pub async fn select_only_exclude_option_fields() {
let ctx = TestContext::new("select_only_exclude_option_fields").await; let ctx = TestContext::new("select_only_exclude_option_fields").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();

View File

@ -11,11 +11,6 @@ pub use uuid::Uuid;
// Run the test locally: // Run the test locally:
// DATABASE_URL="mysql://root:@localhost" cargo test --features sqlx-mysql,runtime-async-std-native-tls --test relational_tests // DATABASE_URL="mysql://root:@localhost" cargo test --features sqlx-mysql,runtime-async-std-native-tls --test relational_tests
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn left_join() { pub async fn left_join() {
let ctx = TestContext::new("test_left_join").await; let ctx = TestContext::new("test_left_join").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -200,11 +195,6 @@ pub async fn right_join() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn inner_join() { pub async fn inner_join() {
let ctx = TestContext::new("test_inner_join").await; let ctx = TestContext::new("test_inner_join").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -290,11 +280,6 @@ pub async fn inner_join() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn group_by() { pub async fn group_by() {
let ctx = TestContext::new("test_group_by").await; let ctx = TestContext::new("test_group_by").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -384,11 +369,6 @@ pub async fn group_by() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn having() { pub async fn having() {
// customers with orders with total equal to $90 // customers with orders with total equal to $90
let ctx = TestContext::new("test_having").await; let ctx = TestContext::new("test_having").await;
@ -494,11 +474,6 @@ pub async fn having() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn related() -> Result<(), DbErr> { pub async fn related() -> Result<(), DbErr> {
use sea_orm::{SelectA, SelectB}; use sea_orm::{SelectA, SelectB};
@ -743,11 +718,6 @@ pub async fn related() -> Result<(), DbErr> {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn linked() -> Result<(), DbErr> { pub async fn linked() -> Result<(), DbErr> {
use common::bakery_chain::Order; use common::bakery_chain::Order;
use sea_orm::{SelectA, SelectB}; use sea_orm::{SelectA, SelectB};

View File

@ -1,16 +1,11 @@
pub mod common; pub mod common;
pub use common::{bakery_chain::*, setup::*, TestContext}; pub use common::{bakery_chain::*, setup::*, TestContext};
use sea_orm::{entity::prelude::*, IntoActiveModel}; use sea_orm::{entity::prelude::*, IntoActiveModel, Set};
pub use sea_query::{Expr, Query}; pub use sea_query::{Expr, Query};
use serde_json::json; use serde_json::json;
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
use bakery::*; use bakery::*;
@ -33,7 +28,9 @@ async fn main() -> Result<(), DbErr> {
]) ])
.and_where(Column::Id.eq(1)); .and_where(Column::Id.eq(1));
let returning = Query::returning().columns([Column::Id, Column::Name, Column::ProfitMargin]); let columns = [Column::Id, Column::Name, Column::ProfitMargin];
let returning =
Query::returning().exprs(columns.into_iter().map(|c| c.into_returning_expr(builder)));
create_tables(db).await?; create_tables(db).await?;
@ -69,11 +66,6 @@ async fn main() -> Result<(), DbErr> {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
#[cfg_attr( #[cfg_attr(
any( any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
@ -95,10 +87,10 @@ async fn update_many() {
create_tables(db).await?; create_tables(db).await?;
Entity::insert( Entity::insert(
Model { ActiveModel {
id: 1, action: Set("before_save".into()),
action: "before_save".into(), values: Set(json!({ "id": "unique-id-001" })),
values: json!({ "id": "unique-id-001" }), ..Default::default()
} }
.into_active_model(), .into_active_model(),
) )
@ -106,10 +98,10 @@ async fn update_many() {
.await?; .await?;
Entity::insert( Entity::insert(
Model { ActiveModel {
id: 2, action: Set("before_save".into()),
action: "before_save".into(), values: Set(json!({ "id": "unique-id-002" })),
values: json!({ "id": "unique-id-002" }), ..Default::default()
} }
.into_active_model(), .into_active_model(),
) )
@ -117,10 +109,10 @@ async fn update_many() {
.await?; .await?;
Entity::insert( Entity::insert(
Model { ActiveModel {
id: 3, action: Set("before_save".into()),
action: "before_save".into(), values: Set(json!({ "id": "unique-id-003" })),
values: json!({ "id": "unique-id-003" }), ..Default::default()
} }
.into_active_model(), .into_active_model(),
) )

View File

@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, query::*, DbBackend, IntoActiveModel, QueryOrder}; use sea_orm::{entity::prelude::*, query::*, DbBackend, IntoActiveModel, QueryOrder};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("self_join_tests").await; let ctx = TestContext::new("self_join_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;

View File

@ -133,9 +133,9 @@ async fn seed_data(db: &DatabaseConnection) {
#[cfg(any(feature = "sqlx-mysql", feature = "sqlx-postgres"))] #[cfg(any(feature = "sqlx-mysql", feature = "sqlx-postgres"))]
async fn find_baker_least_sales(db: &DatabaseConnection) -> Option<baker::Model> { async fn find_baker_least_sales(db: &DatabaseConnection) -> Option<baker::Model> {
#[cfg(feature = "sqlx-postgres")] #[cfg(any(feature = "sqlx-postgres"))]
type Type = i64; type Type = i64;
#[cfg(not(feature = "sqlx-postgres"))] #[cfg(not(any(feature = "sqlx-postgres")))]
type Type = Decimal; type Type = Decimal;
#[derive(Debug, FromQueryResult)] #[derive(Debug, FromQueryResult)]

View File

@ -1,17 +1,12 @@
pub mod common; pub mod common;
pub use common::{bakery_chain::*, setup::*, TestContext}; pub use common::{bakery_chain::*, setup::*, TestContext};
pub use sea_orm::{ pub use sea_orm::{
entity::*, error::DbErr, error::SqlErr, tests_cfg, DatabaseConnection, DbBackend, EntityName, entity::*, error::DbErr, error::SqlErr, tests_cfg, ConnectionTrait, DatabaseConnection,
ExecResult, DbBackend, EntityName, ExecResult,
}; };
use uuid::Uuid; use uuid::Uuid;
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() { async fn main() {
let ctx = TestContext::new("bakery_chain_sql_err_tests").await; let ctx = TestContext::new("bakery_chain_sql_err_tests").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();

View File

@ -5,11 +5,6 @@ pub use sea_orm::entity::*;
pub use sea_orm::{ConnectionTrait, DbErr, QueryFilter}; pub use sea_orm::{ConnectionTrait, DbErr, QueryFilter};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn stream() -> Result<(), DbErr> { pub async fn stream() -> Result<(), DbErr> {
use futures::StreamExt; use futures::StreamExt;

View File

@ -6,11 +6,6 @@ use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
use serde_json::json; use serde_json::json;
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("features_schema_string_primary_key_tests").await; let ctx = TestContext::new("features_schema_string_primary_key_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;

View File

@ -1,16 +1,11 @@
pub mod common; pub mod common;
pub use common::{features::*, setup::*, TestContext}; pub use common::{features::*, setup::*, TestContext};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel}; use sea_orm::{entity::prelude::*, ActiveValue::NotSet, DatabaseConnection, IntoActiveModel};
use serde_json::json; use serde_json::json;
use time::macros::{date, time}; use time::macros::{date, time};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() { async fn main() {
let ctx = TestContext::new("time_crate_tests").await; let ctx = TestContext::new("time_crate_tests").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -30,7 +25,10 @@ pub async fn create_transaction_log(db: &DatabaseConnection) -> Result<(), DbErr
.assume_utc(), .assume_utc(),
}; };
let res = TransactionLog::insert(transaction_log.clone().into_active_model()) let res = TransactionLog::insert(transaction_log::ActiveModel {
id: NotSet,
..transaction_log.clone().into_active_model()
})
.exec(db) .exec(db)
.await?; .await?;

View File

@ -1,14 +1,9 @@
pub mod common; pub mod common;
pub use common::{features::*, setup::*, TestContext}; pub use common::{features::*, setup::*, TestContext};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel}; use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel, NotSet};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("bakery_chain_schema_timestamp_tests").await; let ctx = TestContext::new("bakery_chain_schema_timestamp_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -28,7 +23,10 @@ pub async fn create_applog(db: &DatabaseConnection) -> Result<(), DbErr> {
created_at: "2021-09-17T17:50:20+08:00".parse().unwrap(), created_at: "2021-09-17T17:50:20+08:00".parse().unwrap(),
}; };
let res = Applog::insert(log.clone().into_active_model()) let res = Applog::insert(applog::ActiveModel {
id: NotSet,
..log.clone().into_active_model()
})
.exec(db) .exec(db)
.await?; .await?;
@ -77,7 +75,10 @@ pub async fn create_satellites_log(db: &DatabaseConnection) -> Result<(), DbErr>
deployment_date: "2022-01-07T12:11:23Z".parse().unwrap(), deployment_date: "2022-01-07T12:11:23Z".parse().unwrap(),
}; };
let res = Satellite::insert(archive.clone().into_active_model()) let res = Satellite::insert(satellite::ActiveModel {
id: NotSet,
..archive.clone().into_active_model()
})
.exec(db) .exec(db)
.await?; .await?;

View File

@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
use sea_orm::{prelude::*, AccessMode, DatabaseTransaction, IsolationLevel, Set, TransactionTrait}; use sea_orm::{prelude::*, AccessMode, DatabaseTransaction, IsolationLevel, Set, TransactionTrait};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn transaction() { pub async fn transaction() {
let ctx = TestContext::new("transaction_test").await; let ctx = TestContext::new("transaction_test").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -50,11 +45,6 @@ pub async fn transaction() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn transaction_with_reference() { pub async fn transaction_with_reference() {
let ctx = TestContext::new("transaction_with_reference_test").await; let ctx = TestContext::new("transaction_with_reference_test").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -105,11 +95,6 @@ fn _transaction_with_reference<'a>(
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn transaction_begin_out_of_scope() -> Result<(), DbErr> { pub async fn transaction_begin_out_of_scope() -> Result<(), DbErr> {
let ctx = TestContext::new("transaction_begin_out_of_scope_test").await; let ctx = TestContext::new("transaction_begin_out_of_scope_test").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -150,11 +135,6 @@ pub async fn transaction_begin_out_of_scope() -> Result<(), DbErr> {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn transaction_begin_commit() -> Result<(), DbErr> { pub async fn transaction_begin_commit() -> Result<(), DbErr> {
let ctx = TestContext::new("transaction_begin_commit_test").await; let ctx = TestContext::new("transaction_begin_commit_test").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -196,11 +176,6 @@ pub async fn transaction_begin_commit() -> Result<(), DbErr> {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn transaction_begin_rollback() -> Result<(), DbErr> { pub async fn transaction_begin_rollback() -> Result<(), DbErr> {
let ctx = TestContext::new("transaction_begin_rollback_test").await; let ctx = TestContext::new("transaction_begin_rollback_test").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -242,11 +217,6 @@ pub async fn transaction_begin_rollback() -> Result<(), DbErr> {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn transaction_closure_commit() -> Result<(), DbErr> { pub async fn transaction_closure_commit() -> Result<(), DbErr> {
let ctx = TestContext::new("transaction_closure_commit_test").await; let ctx = TestContext::new("transaction_closure_commit_test").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -291,11 +261,6 @@ pub async fn transaction_closure_commit() -> Result<(), DbErr> {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn transaction_closure_rollback() -> Result<(), DbErr> { pub async fn transaction_closure_rollback() -> Result<(), DbErr> {
let ctx = TestContext::new("transaction_closure_rollback_test").await; let ctx = TestContext::new("transaction_closure_rollback_test").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -352,11 +317,6 @@ pub async fn transaction_closure_rollback() -> Result<(), DbErr> {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn transaction_with_active_model_behaviour() -> Result<(), DbErr> { pub async fn transaction_with_active_model_behaviour() -> Result<(), DbErr> {
let ctx = TestContext::new("transaction_with_active_model_behaviour_test").await; let ctx = TestContext::new("transaction_with_active_model_behaviour_test").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -440,11 +400,6 @@ pub async fn transaction_with_active_model_behaviour() -> Result<(), DbErr> {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn transaction_nested() { pub async fn transaction_nested() {
let ctx = TestContext::new("transaction_nested_test").await; let ctx = TestContext::new("transaction_nested_test").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();
@ -692,11 +647,6 @@ pub async fn transaction_nested() {
} }
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
pub async fn transaction_with_config() { pub async fn transaction_with_config() {
let ctx = TestContext::new("transaction_with_config").await; let ctx = TestContext::new("transaction_with_config").await;
create_tables(&ctx.db).await.unwrap(); create_tables(&ctx.db).await.unwrap();

View File

@ -2,14 +2,9 @@ pub mod common;
pub use common::{features::*, setup::*, TestContext}; pub use common::{features::*, setup::*, TestContext};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection}; use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection, NotSet};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("uuid_fmt_tests").await; let ctx = TestContext::new("uuid_fmt_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -31,7 +26,12 @@ pub async fn insert_uuid_fmt(db: &DatabaseConnection) -> Result<(), DbErr> {
uuid_urn: uuid.urn(), uuid_urn: uuid.urn(),
}; };
let result = uuid_fmt.clone().into_active_model().insert(db).await?; let result = uuid_fmt::ActiveModel {
id: NotSet,
..uuid_fmt.clone().into_active_model()
}
.insert(db)
.await?;
assert_eq!(result, uuid_fmt); assert_eq!(result, uuid_fmt);

View File

@ -6,11 +6,6 @@ use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
use serde_json::json; use serde_json::json;
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("bakery_chain_schema_uuid_tests").await; let ctx = TestContext::new("bakery_chain_schema_uuid_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;

View File

@ -12,15 +12,10 @@ pub use common::{
TestContext, TestContext,
}; };
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection}; use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection, NotSet};
use sea_query::{ArrayType, ColumnType, Value, ValueType, ValueTypeErr}; use sea_query::{ArrayType, ColumnType, Value, ValueType, ValueTypeErr};
#[sea_orm_macros::test] #[sea_orm_macros::test]
#[cfg(any(
feature = "sqlx-mysql",
feature = "sqlx-sqlite",
feature = "sqlx-postgres"
))]
async fn main() -> Result<(), DbErr> { async fn main() -> Result<(), DbErr> {
let ctx = TestContext::new("value_type_tests").await; let ctx = TestContext::new("value_type_tests").await;
create_tables(&ctx.db).await?; create_tables(&ctx.db).await?;
@ -45,7 +40,12 @@ pub async fn insert_value(db: &DatabaseConnection) -> Result<(), DbErr> {
id: 1, id: 1,
number: 48.into(), number: 48.into(),
}; };
let result = model.clone().into_active_model().insert(db).await?; let result = value_type_general::ActiveModel {
id: NotSet,
..model.clone().into_active_model()
}
.insert(db)
.await?;
assert_eq!(result, model); assert_eq!(result, model);
Ok(()) Ok(())
@ -57,7 +57,12 @@ pub async fn postgres_insert_value(db: &DatabaseConnection) -> Result<(), DbErr>
number: 48.into(), number: 48.into(),
str_vec: StringVec(vec!["ab".to_string(), "cd".to_string()]), str_vec: StringVec(vec!["ab".to_string(), "cd".to_string()]),
}; };
let result = model.clone().into_active_model().insert(db).await?; let result = value_type_pg::ActiveModel {
id: NotSet,
..model.clone().into_active_model()
}
.insert(db)
.await?;
assert_eq!(result, model); assert_eq!(result, model);
Ok(()) Ok(())