Upstream Changes - 1 (#2145)
* upstream changes * universal `#[sea_orm_macros::test]` * fix * fix * `ColumnTrait::into_returning_expr` * fix * fix * Do not pub sqlx_common --------- Co-authored-by: Chris Tsang <chris.2y3@outlook.com>
This commit is contained in:
parent
b775027fee
commit
f3967fdaca
@ -810,6 +810,11 @@ pub fn test(_: TokenStream, input: TokenStream) -> TokenStream {
|
||||
|
||||
quote::quote! (
|
||||
#[test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres",
|
||||
))]
|
||||
#(#attrs)*
|
||||
fn #name() #ret {
|
||||
let _ = ::tracing_subscriber::fmt()
|
||||
|
@ -579,14 +579,10 @@ impl DbBackend {
|
||||
|
||||
/// Check if the database supports `RETURNING` syntax on insert and update
|
||||
pub fn support_returning(&self) -> bool {
|
||||
#[cfg(not(feature = "sqlite-use-returning-for-3_35"))]
|
||||
{
|
||||
matches!(self, Self::Postgres)
|
||||
}
|
||||
|
||||
#[cfg(feature = "sqlite-use-returning-for-3_35")]
|
||||
{
|
||||
matches!(self, Self::Postgres | Self::Sqlite)
|
||||
match self {
|
||||
Self::Postgres => true,
|
||||
Self::Sqlite if cfg!(feature = "sqlite-use-returning-for-3_35") => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -160,35 +160,6 @@ impl ConnectOptions {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "sqlx-dep")]
|
||||
/// Convert [ConnectOptions] into [sqlx::pool::PoolOptions]
|
||||
pub fn pool_options<DB>(self) -> sqlx::pool::PoolOptions<DB>
|
||||
where
|
||||
DB: sqlx::Database,
|
||||
{
|
||||
let mut opt = sqlx::pool::PoolOptions::new();
|
||||
if let Some(max_connections) = self.max_connections {
|
||||
opt = opt.max_connections(max_connections);
|
||||
}
|
||||
if let Some(min_connections) = self.min_connections {
|
||||
opt = opt.min_connections(min_connections);
|
||||
}
|
||||
if let Some(connect_timeout) = self.connect_timeout {
|
||||
opt = opt.acquire_timeout(connect_timeout);
|
||||
}
|
||||
if let Some(idle_timeout) = self.idle_timeout {
|
||||
opt = opt.idle_timeout(Some(idle_timeout));
|
||||
}
|
||||
if let Some(acquire_timeout) = self.acquire_timeout {
|
||||
opt = opt.acquire_timeout(acquire_timeout);
|
||||
}
|
||||
if let Some(max_lifetime) = self.max_lifetime {
|
||||
opt = opt.max_lifetime(Some(max_lifetime));
|
||||
}
|
||||
opt = opt.test_before_acquire(self.test_before_acquire);
|
||||
opt
|
||||
}
|
||||
|
||||
/// Get the database URL of the pool
|
||||
pub fn get_url(&self) -> &str {
|
||||
&self.url
|
||||
|
@ -150,11 +150,11 @@ impl Into<serde_json::Value> for ProxyRow {
|
||||
pub fn from_query_result_to_proxy_row(result: &QueryResult) -> ProxyRow {
|
||||
match &result.row {
|
||||
#[cfg(feature = "sqlx-mysql")]
|
||||
QueryResultRow::SqlxMySql(row) => from_sqlx_mysql_row_to_proxy_row(&row),
|
||||
QueryResultRow::SqlxMySql(row) => crate::from_sqlx_mysql_row_to_proxy_row(&row),
|
||||
#[cfg(feature = "sqlx-postgres")]
|
||||
QueryResultRow::SqlxPostgres(row) => from_sqlx_postgres_row_to_proxy_row(&row),
|
||||
QueryResultRow::SqlxPostgres(row) => crate::from_sqlx_postgres_row_to_proxy_row(&row),
|
||||
#[cfg(feature = "sqlx-sqlite")]
|
||||
QueryResultRow::SqlxSqlite(row) => from_sqlx_sqlite_row_to_proxy_row(&row),
|
||||
QueryResultRow::SqlxSqlite(row) => crate::from_sqlx_sqlite_row_to_proxy_row(&row),
|
||||
#[cfg(feature = "mock")]
|
||||
QueryResultRow::Mock(row) => ProxyRow {
|
||||
values: row.values.clone(),
|
||||
@ -163,675 +163,6 @@ pub fn from_query_result_to_proxy_row(result: &QueryResult) -> ProxyRow {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "sqlx-mysql")]
|
||||
pub(crate) fn from_sqlx_mysql_row_to_proxy_row(row: &sqlx::mysql::MySqlRow) -> ProxyRow {
|
||||
// https://docs.rs/sqlx-mysql/0.7.2/src/sqlx_mysql/protocol/text/column.rs.html
|
||||
// https://docs.rs/sqlx-mysql/0.7.2/sqlx_mysql/types/index.html
|
||||
use sqlx::{Column, Row, TypeInfo};
|
||||
ProxyRow {
|
||||
values: row
|
||||
.columns()
|
||||
.iter()
|
||||
.map(|c| {
|
||||
(
|
||||
c.name().to_string(),
|
||||
match c.type_info().name() {
|
||||
"TINYINT(1)" | "BOOLEAN" => Value::Bool(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get boolean"),
|
||||
)),
|
||||
"TINYINT UNSIGNED" => Value::TinyUnsigned(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get unsigned tiny integer"),
|
||||
)),
|
||||
"SMALLINT UNSIGNED" => Value::SmallUnsigned(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get unsigned small integer"),
|
||||
)),
|
||||
"INT UNSIGNED" => Value::Unsigned(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get unsigned integer"),
|
||||
)),
|
||||
"MEDIUMINT UNSIGNED" | "BIGINT UNSIGNED" => Value::BigUnsigned(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get unsigned big integer"),
|
||||
)),
|
||||
"TINYINT" => Value::TinyInt(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get tiny integer"),
|
||||
)),
|
||||
"SMALLINT" => Value::SmallInt(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get small integer"),
|
||||
)),
|
||||
"INT" => Value::Int(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get integer"),
|
||||
)),
|
||||
"MEDIUMINT" | "BIGINT" => Value::BigInt(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get big integer"),
|
||||
)),
|
||||
"FLOAT" => Value::Float(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get float"),
|
||||
)),
|
||||
"DOUBLE" => Value::Double(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get double"),
|
||||
)),
|
||||
|
||||
"BIT" | "BINARY" | "VARBINARY" | "TINYBLOB" | "BLOB" | "MEDIUMBLOB"
|
||||
| "LONGBLOB" => Value::Bytes(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get bytes"),
|
||||
))),
|
||||
|
||||
"CHAR" | "VARCHAR" | "TINYTEXT" | "TEXT" | "MEDIUMTEXT" | "LONGTEXT" => {
|
||||
Value::String(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get string"),
|
||||
)))
|
||||
}
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIMESTAMP" => Value::ChronoDateTimeUtc(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIMESTAMP" => Value::TimeDateTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"DATE" => Value::ChronoDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get date"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"DATE" => Value::TimeDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get date"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIME" => Value::ChronoTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get time"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIME" => Value::TimeTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get time"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"DATETIME" => Value::ChronoDateTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get datetime"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"DATETIME" => Value::TimeDateTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get datetime"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"YEAR" => Value::ChronoDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get year"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"YEAR" => Value::TimeDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get year"),
|
||||
))),
|
||||
|
||||
"ENUM" | "SET" | "GEOMETRY" => Value::String(Some(Box::new(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get serialized string"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-bigdecimal")]
|
||||
"DECIMAL" => Value::BigDecimal(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get decimal"),
|
||||
))),
|
||||
#[cfg(all(
|
||||
feature = "with-rust_decimal",
|
||||
not(feature = "with-bigdecimal")
|
||||
))]
|
||||
"DECIMAL" => Value::Decimal(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get decimal"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-json")]
|
||||
"JSON" => Value::Json(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get json"),
|
||||
))),
|
||||
|
||||
_ => unreachable!("Unknown column type: {}", c.type_info().name()),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "sqlx-postgres")]
|
||||
pub(crate) fn from_sqlx_postgres_row_to_proxy_row(row: &sqlx::postgres::PgRow) -> ProxyRow {
|
||||
// https://docs.rs/sqlx-postgres/0.7.2/src/sqlx_postgres/type_info.rs.html
|
||||
// https://docs.rs/sqlx-postgres/0.7.2/sqlx_postgres/types/index.html
|
||||
use sqlx::{Column, Row, TypeInfo};
|
||||
ProxyRow {
|
||||
values: row
|
||||
.columns()
|
||||
.iter()
|
||||
.map(|c| {
|
||||
(
|
||||
c.name().to_string(),
|
||||
match c.type_info().name() {
|
||||
"BOOL" => Value::Bool(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get boolean"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"BOOL[]" => Value::Array(
|
||||
sea_query::ArrayType::Bool,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<bool>, _>(c.ordinal())
|
||||
.expect("Failed to get boolean array")
|
||||
.iter()
|
||||
.map(|val| Value::Bool(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"\"CHAR\"" => Value::TinyInt(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get small integer"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"\"CHAR\"[]" => Value::Array(
|
||||
sea_query::ArrayType::TinyInt,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<i8>, _>(c.ordinal())
|
||||
.expect("Failed to get small integer array")
|
||||
.iter()
|
||||
.map(|val| Value::TinyInt(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"SMALLINT" | "SMALLSERIAL" | "INT2" => Value::SmallInt(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get small integer"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"SMALLINT[]" | "SMALLSERIAL[]" | "INT2[]" => Value::Array(
|
||||
sea_query::ArrayType::SmallInt,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<i16>, _>(c.ordinal())
|
||||
.expect("Failed to get small integer array")
|
||||
.iter()
|
||||
.map(|val| Value::SmallInt(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"INT" | "SERIAL" | "INT4" => Value::Int(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get integer"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"INT[]" | "SERIAL[]" | "INT4[]" => Value::Array(
|
||||
sea_query::ArrayType::Int,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<i32>, _>(c.ordinal())
|
||||
.expect("Failed to get integer array")
|
||||
.iter()
|
||||
.map(|val| Value::Int(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"BIGINT" | "BIGSERIAL" | "INT8" => Value::BigInt(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get big integer"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"BIGINT[]" | "BIGSERIAL[]" | "INT8[]" => Value::Array(
|
||||
sea_query::ArrayType::BigInt,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<i64>, _>(c.ordinal())
|
||||
.expect("Failed to get big integer array")
|
||||
.iter()
|
||||
.map(|val| Value::BigInt(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"FLOAT4" | "REAL" => Value::Float(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get float"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"FLOAT4[]" | "REAL[]" => Value::Array(
|
||||
sea_query::ArrayType::Float,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<f32>, _>(c.ordinal())
|
||||
.expect("Failed to get float array")
|
||||
.iter()
|
||||
.map(|val| Value::Float(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"FLOAT8" | "DOUBLE PRECISION" => Value::Double(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get double"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"FLOAT8[]" | "DOUBLE PRECISION[]" => Value::Array(
|
||||
sea_query::ArrayType::Double,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<f64>, _>(c.ordinal())
|
||||
.expect("Failed to get double array")
|
||||
.iter()
|
||||
.map(|val| Value::Double(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"VARCHAR" | "CHAR" | "TEXT" | "NAME" => Value::String(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get string"),
|
||||
))),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"VARCHAR[]" | "CHAR[]" | "TEXT[]" | "NAME[]" => Value::Array(
|
||||
sea_query::ArrayType::String,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<String>, _>(c.ordinal())
|
||||
.expect("Failed to get string array")
|
||||
.iter()
|
||||
.map(|val| Value::String(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"BYTEA" => Value::Bytes(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get bytes"),
|
||||
))),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"BYTEA[]" => Value::Array(
|
||||
sea_query::ArrayType::Bytes,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<Vec<u8>>, _>(c.ordinal())
|
||||
.expect("Failed to get bytes array")
|
||||
.iter()
|
||||
.map(|val| Value::Bytes(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-bigdecimal")]
|
||||
"NUMERIC" => Value::BigDecimal(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get numeric"),
|
||||
))),
|
||||
#[cfg(all(
|
||||
feature = "with-rust_decimal",
|
||||
not(feature = "with-bigdecimal")
|
||||
))]
|
||||
"NUMERIC" => Value::Decimal(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get numeric"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-bigdecimal", feature = "postgres-array"))]
|
||||
"NUMERIC[]" => Value::Array(
|
||||
sea_query::ArrayType::BigDecimal,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<bigdecimal::BigDecimal>, _>(c.ordinal())
|
||||
.expect("Failed to get numeric array")
|
||||
.iter()
|
||||
.map(|val| Value::BigDecimal(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
#[cfg(all(
|
||||
feature = "with-rust_decimal",
|
||||
not(feature = "with-bigdecimal"),
|
||||
feature = "postgres-array"
|
||||
))]
|
||||
"NUMERIC[]" => Value::Array(
|
||||
sea_query::ArrayType::Decimal,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<rust_decimal::Decimal>, _>(c.ordinal())
|
||||
.expect("Failed to get numeric array")
|
||||
.iter()
|
||||
.map(|val| Value::Decimal(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"OID" => Value::BigInt(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get oid"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"OID[]" => Value::Array(
|
||||
sea_query::ArrayType::BigInt,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<i64>, _>(c.ordinal())
|
||||
.expect("Failed to get oid array")
|
||||
.iter()
|
||||
.map(|val| Value::BigInt(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"JSON" | "JSONB" => Value::Json(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get json"),
|
||||
))),
|
||||
#[cfg(any(feature = "json-array", feature = "postgres-array"))]
|
||||
"JSON[]" | "JSONB[]" => Value::Array(
|
||||
sea_query::ArrayType::Json,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<serde_json::Value>, _>(c.ordinal())
|
||||
.expect("Failed to get json array")
|
||||
.iter()
|
||||
.map(|val| Value::Json(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-ipnetwork")]
|
||||
"INET" | "CIDR" => Value::IpNetwork(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get ip address"),
|
||||
))),
|
||||
#[cfg(feature = "with-ipnetwork")]
|
||||
"INET[]" | "CIDR[]" => Value::Array(
|
||||
sea_query::ArrayType::IpNetwork,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<ipnetwork::IpNetwork>, _>(c.ordinal())
|
||||
.expect("Failed to get ip address array")
|
||||
.iter()
|
||||
.map(|val| Value::IpNetwork(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-mac_address")]
|
||||
"MACADDR" | "MACADDR8" => Value::MacAddress(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get mac address"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-mac_address", feature = "postgres-array"))]
|
||||
"MACADDR[]" | "MACADDR8[]" => Value::Array(
|
||||
sea_query::ArrayType::MacAddress,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<mac_address::MacAddress>, _>(c.ordinal())
|
||||
.expect("Failed to get mac address array")
|
||||
.iter()
|
||||
.map(|val| Value::MacAddress(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIMESTAMP" => Value::ChronoDateTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIMESTAMP" => Value::TimeDateTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
|
||||
"TIMESTAMP[]" => Value::Array(
|
||||
sea_query::ArrayType::ChronoDateTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<chrono::NaiveDateTime>, _>(c.ordinal())
|
||||
.expect("Failed to get timestamp array")
|
||||
.iter()
|
||||
.map(|val| Value::ChronoDateTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
#[cfg(all(
|
||||
feature = "with-time",
|
||||
not(feature = "with-chrono"),
|
||||
feature = "postgres-array"
|
||||
))]
|
||||
"TIMESTAMP[]" => Value::Array(
|
||||
sea_query::ArrayType::TimeDateTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<time::OffsetDateTime>, _>(c.ordinal())
|
||||
.expect("Failed to get timestamp array")
|
||||
.iter()
|
||||
.map(|val| Value::TimeDateTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"DATE" => Value::ChronoDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get date"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"DATE" => Value::TimeDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get date"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
|
||||
"DATE[]" => Value::Array(
|
||||
sea_query::ArrayType::ChronoDate,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<chrono::NaiveDate>, _>(c.ordinal())
|
||||
.expect("Failed to get date array")
|
||||
.iter()
|
||||
.map(|val| Value::ChronoDate(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
#[cfg(all(
|
||||
feature = "with-time",
|
||||
not(feature = "with-chrono"),
|
||||
feature = "postgres-array"
|
||||
))]
|
||||
"DATE[]" => Value::Array(
|
||||
sea_query::ArrayType::TimeDate,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<time::Date>, _>(c.ordinal())
|
||||
.expect("Failed to get date array")
|
||||
.iter()
|
||||
.map(|val| Value::TimeDate(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIME" => Value::ChronoTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get time"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIME" => Value::TimeTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get time"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
|
||||
"TIME[]" => Value::Array(
|
||||
sea_query::ArrayType::ChronoTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<chrono::NaiveTime>, _>(c.ordinal())
|
||||
.expect("Failed to get time array")
|
||||
.iter()
|
||||
.map(|val| Value::ChronoTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
#[cfg(all(
|
||||
feature = "with-time",
|
||||
not(feature = "with-chrono"),
|
||||
feature = "postgres-array"
|
||||
))]
|
||||
"TIME[]" => Value::Array(
|
||||
sea_query::ArrayType::TimeTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<time::Time>, _>(c.ordinal())
|
||||
.expect("Failed to get time array")
|
||||
.iter()
|
||||
.map(|val| Value::TimeTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIMESTAMPTZ" => Value::ChronoDateTimeUtc(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamptz"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIMESTAMPTZ" => Value::TimeDateTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamptz"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
|
||||
"TIMESTAMPTZ[]" => Value::Array(
|
||||
sea_query::ArrayType::ChronoDateTimeUtc,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<chrono::DateTime<chrono::Utc>>, _>(c.ordinal())
|
||||
.expect("Failed to get timestamptz array")
|
||||
.iter()
|
||||
.map(|val| {
|
||||
Value::ChronoDateTimeUtc(Some(Box::new(val.clone())))
|
||||
})
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
#[cfg(all(
|
||||
feature = "with-time",
|
||||
not(feature = "with-chrono"),
|
||||
feature = "postgres-array"
|
||||
))]
|
||||
"TIMESTAMPTZ[]" => Value::Array(
|
||||
sea_query::ArrayType::TimeDateTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<time::OffsetDateTime>, _>(c.ordinal())
|
||||
.expect("Failed to get timestamptz array")
|
||||
.iter()
|
||||
.map(|val| Value::TimeDateTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIMETZ" => Value::ChronoTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timetz"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIMETZ" => Value::TimeTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timetz"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
|
||||
"TIMETZ[]" => Value::Array(
|
||||
sea_query::ArrayType::ChronoTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<chrono::NaiveTime>, _>(c.ordinal())
|
||||
.expect("Failed to get timetz array")
|
||||
.iter()
|
||||
.map(|val| Value::ChronoTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
#[cfg(all(
|
||||
feature = "with-time",
|
||||
not(feature = "with-chrono"),
|
||||
feature = "postgres-array"
|
||||
))]
|
||||
"TIMETZ[]" => Value::Array(
|
||||
sea_query::ArrayType::TimeTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<time::Time>, _>(c.ordinal())
|
||||
.expect("Failed to get timetz array")
|
||||
.iter()
|
||||
.map(|val| Value::TimeTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-uuid")]
|
||||
"UUID" => Value::Uuid(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get uuid"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-uuid", feature = "postgres-array"))]
|
||||
"UUID[]" => Value::Array(
|
||||
sea_query::ArrayType::Uuid,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<uuid::Uuid>, _>(c.ordinal())
|
||||
.expect("Failed to get uuid array")
|
||||
.iter()
|
||||
.map(|val| Value::Uuid(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
_ => unreachable!("Unknown column type: {}", c.type_info().name()),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "sqlx-sqlite")]
|
||||
pub(crate) fn from_sqlx_sqlite_row_to_proxy_row(row: &sqlx::sqlite::SqliteRow) -> ProxyRow {
|
||||
// https://docs.rs/sqlx-sqlite/0.7.2/src/sqlx_sqlite/type_info.rs.html
|
||||
// https://docs.rs/sqlx-sqlite/0.7.2/sqlx_sqlite/types/index.html
|
||||
use sqlx::{Column, Row, TypeInfo};
|
||||
ProxyRow {
|
||||
values: row
|
||||
.columns()
|
||||
.iter()
|
||||
.map(|c| {
|
||||
(
|
||||
c.name().to_string(),
|
||||
match c.type_info().name() {
|
||||
"BOOLEAN" => Value::Bool(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get boolean"),
|
||||
)),
|
||||
|
||||
"INTEGER" => Value::Int(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get integer"),
|
||||
)),
|
||||
|
||||
"BIGINT" | "INT8" => Value::BigInt(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get big integer"),
|
||||
)),
|
||||
|
||||
"REAL" => Value::Double(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get double"),
|
||||
)),
|
||||
|
||||
"TEXT" => Value::String(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get string"),
|
||||
))),
|
||||
|
||||
"BLOB" => Value::Bytes(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get bytes"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"DATETIME" => Value::ChronoDateTimeUtc(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"DATETIME" => Value::TimeDateTimeWithTimeZone(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"DATE" => Value::ChronoDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get date"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"DATE" => Value::TimeDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get date"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIME" => Value::ChronoTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get time"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIME" => Value::TimeTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get time"),
|
||||
))),
|
||||
|
||||
_ => unreachable!("Unknown column type: {}", c.type_info().name()),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
impl ProxyRow {
|
||||
/// Get a value from the [ProxyRow]
|
||||
pub fn try_get<T, I: crate::ColIdx>(&self, index: I) -> Result<T, DbErr>
|
||||
|
@ -1,17 +1,14 @@
|
||||
#![allow(missing_docs, unreachable_code, unused_variables)]
|
||||
|
||||
use futures::Stream;
|
||||
use std::{pin::Pin, task::Poll};
|
||||
use tracing::instrument;
|
||||
|
||||
#[cfg(any(feature = "mock", feature = "proxy"))]
|
||||
use std::sync::Arc;
|
||||
use std::{pin::Pin, task::Poll};
|
||||
|
||||
use futures::Stream;
|
||||
#[cfg(feature = "sqlx-dep")]
|
||||
use futures::TryStreamExt;
|
||||
|
||||
#[cfg(feature = "sqlx-dep")]
|
||||
use sqlx::{pool::PoolConnection, Executor};
|
||||
use sqlx::Executor;
|
||||
|
||||
use super::metric::MetricStream;
|
||||
#[cfg(feature = "sqlx-dep")]
|
||||
@ -29,101 +26,6 @@ pub struct QueryStream {
|
||||
stream: MetricStream<'this>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "sqlx-mysql")]
|
||||
impl
|
||||
From<(
|
||||
PoolConnection<sqlx::MySql>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
)> for QueryStream
|
||||
{
|
||||
fn from(
|
||||
(conn, stmt, metric_callback): (
|
||||
PoolConnection<sqlx::MySql>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
),
|
||||
) -> Self {
|
||||
QueryStream::build(stmt, InnerConnection::MySql(conn), metric_callback)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "sqlx-postgres")]
|
||||
impl
|
||||
From<(
|
||||
PoolConnection<sqlx::Postgres>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
)> for QueryStream
|
||||
{
|
||||
fn from(
|
||||
(conn, stmt, metric_callback): (
|
||||
PoolConnection<sqlx::Postgres>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
),
|
||||
) -> Self {
|
||||
QueryStream::build(stmt, InnerConnection::Postgres(conn), metric_callback)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "sqlx-sqlite")]
|
||||
impl
|
||||
From<(
|
||||
PoolConnection<sqlx::Sqlite>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
)> for QueryStream
|
||||
{
|
||||
fn from(
|
||||
(conn, stmt, metric_callback): (
|
||||
PoolConnection<sqlx::Sqlite>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
),
|
||||
) -> Self {
|
||||
QueryStream::build(stmt, InnerConnection::Sqlite(conn), metric_callback)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "mock")]
|
||||
impl
|
||||
From<(
|
||||
Arc<crate::MockDatabaseConnection>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
)> for QueryStream
|
||||
{
|
||||
fn from(
|
||||
(conn, stmt, metric_callback): (
|
||||
Arc<crate::MockDatabaseConnection>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
),
|
||||
) -> Self {
|
||||
QueryStream::build(stmt, InnerConnection::Mock(conn), metric_callback)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
impl
|
||||
From<(
|
||||
Arc<crate::ProxyDatabaseConnection>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
)> for QueryStream
|
||||
{
|
||||
fn from(
|
||||
(conn, stmt, metric_callback): (
|
||||
Arc<crate::ProxyDatabaseConnection>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
),
|
||||
) -> Self {
|
||||
QueryStream::build(stmt, InnerConnection::Proxy(conn), metric_callback)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for QueryStream {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "QueryStream")
|
||||
@ -132,7 +34,7 @@ impl std::fmt::Debug for QueryStream {
|
||||
|
||||
impl QueryStream {
|
||||
#[instrument(level = "trace", skip(metric_callback))]
|
||||
fn build(
|
||||
pub(crate) fn build(
|
||||
stmt: Statement,
|
||||
conn: InnerConnection,
|
||||
metric_callback: Option<crate::metric::Callback>,
|
||||
|
@ -7,7 +7,7 @@ use crate::{
|
||||
use crate::{sqlx_error_to_exec_err, sqlx_error_to_query_err};
|
||||
use futures::lock::Mutex;
|
||||
#[cfg(feature = "sqlx-dep")]
|
||||
use sqlx::{pool::PoolConnection, TransactionManager};
|
||||
use sqlx::TransactionManager;
|
||||
use std::{future::Future, pin::Pin, sync::Arc};
|
||||
use tracing::instrument;
|
||||
|
||||
@ -28,91 +28,8 @@ impl std::fmt::Debug for DatabaseTransaction {
|
||||
}
|
||||
|
||||
impl DatabaseTransaction {
|
||||
#[cfg(feature = "sqlx-mysql")]
|
||||
pub(crate) async fn new_mysql(
|
||||
inner: PoolConnection<sqlx::MySql>,
|
||||
metric_callback: Option<crate::metric::Callback>,
|
||||
isolation_level: Option<IsolationLevel>,
|
||||
access_mode: Option<AccessMode>,
|
||||
) -> Result<DatabaseTransaction, DbErr> {
|
||||
Self::begin(
|
||||
Arc::new(Mutex::new(InnerConnection::MySql(inner))),
|
||||
DbBackend::MySql,
|
||||
metric_callback,
|
||||
isolation_level,
|
||||
access_mode,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(feature = "sqlx-postgres")]
|
||||
pub(crate) async fn new_postgres(
|
||||
inner: PoolConnection<sqlx::Postgres>,
|
||||
metric_callback: Option<crate::metric::Callback>,
|
||||
isolation_level: Option<IsolationLevel>,
|
||||
access_mode: Option<AccessMode>,
|
||||
) -> Result<DatabaseTransaction, DbErr> {
|
||||
Self::begin(
|
||||
Arc::new(Mutex::new(InnerConnection::Postgres(inner))),
|
||||
DbBackend::Postgres,
|
||||
metric_callback,
|
||||
isolation_level,
|
||||
access_mode,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(feature = "sqlx-sqlite")]
|
||||
pub(crate) async fn new_sqlite(
|
||||
inner: PoolConnection<sqlx::Sqlite>,
|
||||
metric_callback: Option<crate::metric::Callback>,
|
||||
isolation_level: Option<IsolationLevel>,
|
||||
access_mode: Option<AccessMode>,
|
||||
) -> Result<DatabaseTransaction, DbErr> {
|
||||
Self::begin(
|
||||
Arc::new(Mutex::new(InnerConnection::Sqlite(inner))),
|
||||
DbBackend::Sqlite,
|
||||
metric_callback,
|
||||
isolation_level,
|
||||
access_mode,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(feature = "mock")]
|
||||
pub(crate) async fn new_mock(
|
||||
inner: Arc<crate::MockDatabaseConnection>,
|
||||
metric_callback: Option<crate::metric::Callback>,
|
||||
) -> Result<DatabaseTransaction, DbErr> {
|
||||
let backend = inner.get_database_backend();
|
||||
Self::begin(
|
||||
Arc::new(Mutex::new(InnerConnection::Mock(inner))),
|
||||
backend,
|
||||
metric_callback,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
pub(crate) async fn new_proxy(
|
||||
inner: Arc<crate::ProxyDatabaseConnection>,
|
||||
metric_callback: Option<crate::metric::Callback>,
|
||||
) -> Result<DatabaseTransaction, DbErr> {
|
||||
let backend = inner.get_database_backend();
|
||||
Self::begin(
|
||||
Arc::new(Mutex::new(InnerConnection::Proxy(inner))),
|
||||
backend,
|
||||
metric_callback,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
#[instrument(level = "trace", skip(metric_callback))]
|
||||
async fn begin(
|
||||
pub(crate) async fn begin(
|
||||
conn: Arc<Mutex<InnerConnection>>,
|
||||
backend: DbBackend,
|
||||
metric_callback: Option<crate::metric::Callback>,
|
||||
@ -293,17 +210,6 @@ impl DatabaseTransaction {
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(feature = "sqlx-dep")]
|
||||
fn map_err_ignore_not_found<T: std::fmt::Debug>(
|
||||
err: Result<Option<T>, sqlx::Error>,
|
||||
) -> Result<Option<T>, DbErr> {
|
||||
if let Err(sqlx::Error::RowNotFound) = err {
|
||||
Ok(None)
|
||||
} else {
|
||||
err.map_err(sqlx_error_to_query_err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for DatabaseTransaction {
|
||||
@ -411,7 +317,7 @@ impl ConnectionTrait for DatabaseTransaction {
|
||||
let query = crate::driver::sqlx_mysql::sqlx_query(&stmt);
|
||||
let conn: &mut sqlx::MySqlConnection = &mut *conn;
|
||||
crate::metric::metric!(self.metric_callback, &stmt, {
|
||||
Self::map_err_ignore_not_found(
|
||||
crate::sqlx_map_err_ignore_not_found(
|
||||
query.fetch_one(conn).await.map(|row| Some(row.into())),
|
||||
)
|
||||
})
|
||||
@ -421,7 +327,7 @@ impl ConnectionTrait for DatabaseTransaction {
|
||||
let query = crate::driver::sqlx_postgres::sqlx_query(&stmt);
|
||||
let conn: &mut sqlx::PgConnection = &mut *conn;
|
||||
crate::metric::metric!(self.metric_callback, &stmt, {
|
||||
Self::map_err_ignore_not_found(
|
||||
crate::sqlx_map_err_ignore_not_found(
|
||||
query.fetch_one(conn).await.map(|row| Some(row.into())),
|
||||
)
|
||||
})
|
||||
@ -431,7 +337,7 @@ impl ConnectionTrait for DatabaseTransaction {
|
||||
let query = crate::driver::sqlx_sqlite::sqlx_query(&stmt);
|
||||
let conn: &mut sqlx::SqliteConnection = &mut *conn;
|
||||
crate::metric::metric!(self.metric_callback, &stmt, {
|
||||
Self::map_err_ignore_not_found(
|
||||
crate::sqlx_map_err_ignore_not_found(
|
||||
query.fetch_one(conn).await.map(|row| Some(row.into())),
|
||||
)
|
||||
})
|
||||
|
@ -207,3 +207,39 @@ impl MockDatabaseConnection {
|
||||
self.mocker.lock().map_err(query_err)?.ping()
|
||||
}
|
||||
}
|
||||
|
||||
impl
|
||||
From<(
|
||||
Arc<crate::MockDatabaseConnection>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
)> for crate::QueryStream
|
||||
{
|
||||
fn from(
|
||||
(conn, stmt, metric_callback): (
|
||||
Arc<crate::MockDatabaseConnection>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
),
|
||||
) -> Self {
|
||||
crate::QueryStream::build(stmt, crate::InnerConnection::Mock(conn), metric_callback)
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::DatabaseTransaction {
|
||||
pub(crate) async fn new_mock(
|
||||
inner: Arc<crate::MockDatabaseConnection>,
|
||||
metric_callback: Option<crate::metric::Callback>,
|
||||
) -> Result<crate::DatabaseTransaction, DbErr> {
|
||||
use futures::lock::Mutex;
|
||||
let backend = inner.get_database_backend();
|
||||
Self::begin(
|
||||
Arc::new(Mutex::new(crate::InnerConnection::Mock(inner))),
|
||||
backend,
|
||||
metric_callback,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ pub use mock::*;
|
||||
#[cfg(feature = "proxy")]
|
||||
pub use proxy::*;
|
||||
#[cfg(feature = "sqlx-dep")]
|
||||
pub use sqlx_common::*;
|
||||
pub(crate) use sqlx_common::*;
|
||||
#[cfg(feature = "sqlx-mysql")]
|
||||
pub use sqlx_mysql::*;
|
||||
#[cfg(feature = "sqlx-postgres")]
|
||||
|
@ -138,3 +138,39 @@ impl ProxyDatabaseConnection {
|
||||
self.proxy.lock().map_err(query_err)?.ping()
|
||||
}
|
||||
}
|
||||
|
||||
impl
|
||||
From<(
|
||||
Arc<crate::ProxyDatabaseConnection>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
)> for crate::QueryStream
|
||||
{
|
||||
fn from(
|
||||
(conn, stmt, metric_callback): (
|
||||
Arc<crate::ProxyDatabaseConnection>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
),
|
||||
) -> Self {
|
||||
crate::QueryStream::build(stmt, crate::InnerConnection::Proxy(conn), metric_callback)
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::DatabaseTransaction {
|
||||
pub(crate) async fn new_proxy(
|
||||
inner: Arc<crate::ProxyDatabaseConnection>,
|
||||
metric_callback: Option<crate::metric::Callback>,
|
||||
) -> Result<crate::DatabaseTransaction, DbErr> {
|
||||
use futures::lock::Mutex;
|
||||
let backend = inner.get_database_backend();
|
||||
Self::begin(
|
||||
Arc::new(Mutex::new(crate::InnerConnection::Proxy(inner))),
|
||||
backend,
|
||||
metric_callback,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use crate::{DbErr, RuntimeErr};
|
||||
use crate::{ConnAcquireErr, ConnectOptions, DbErr, RuntimeErr};
|
||||
|
||||
/// Converts an [sqlx::error] execution error to a [DbErr]
|
||||
pub fn sqlx_error_to_exec_err(err: sqlx::Error) -> DbErr {
|
||||
@ -14,3 +14,53 @@ pub fn sqlx_error_to_query_err(err: sqlx::Error) -> DbErr {
|
||||
pub fn sqlx_error_to_conn_err(err: sqlx::Error) -> DbErr {
|
||||
DbErr::Conn(RuntimeErr::SqlxError(err))
|
||||
}
|
||||
|
||||
/// Converts an [sqlx::error] error to a [DbErr]
|
||||
pub fn sqlx_map_err_ignore_not_found<T: std::fmt::Debug>(
|
||||
err: Result<Option<T>, sqlx::Error>,
|
||||
) -> Result<Option<T>, DbErr> {
|
||||
if let Err(sqlx::Error::RowNotFound) = err {
|
||||
Ok(None)
|
||||
} else {
|
||||
err.map_err(sqlx_error_to_query_err)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts an [sqlx::error] error to a [DbErr]
|
||||
pub fn sqlx_conn_acquire_err(sqlx_err: sqlx::Error) -> DbErr {
|
||||
match sqlx_err {
|
||||
sqlx::Error::PoolTimedOut => DbErr::ConnectionAcquire(ConnAcquireErr::Timeout),
|
||||
sqlx::Error::PoolClosed => DbErr::ConnectionAcquire(ConnAcquireErr::ConnectionClosed),
|
||||
_ => DbErr::Conn(RuntimeErr::SqlxError(sqlx_err)),
|
||||
}
|
||||
}
|
||||
|
||||
impl ConnectOptions {
|
||||
/// Convert [ConnectOptions] into [sqlx::pool::PoolOptions]
|
||||
pub fn sqlx_pool_options<DB>(self) -> sqlx::pool::PoolOptions<DB>
|
||||
where
|
||||
DB: sqlx::Database,
|
||||
{
|
||||
let mut opt = sqlx::pool::PoolOptions::new();
|
||||
if let Some(max_connections) = self.max_connections {
|
||||
opt = opt.max_connections(max_connections);
|
||||
}
|
||||
if let Some(min_connections) = self.min_connections {
|
||||
opt = opt.min_connections(min_connections);
|
||||
}
|
||||
if let Some(connect_timeout) = self.connect_timeout {
|
||||
opt = opt.acquire_timeout(connect_timeout);
|
||||
}
|
||||
if let Some(idle_timeout) = self.idle_timeout {
|
||||
opt = opt.idle_timeout(Some(idle_timeout));
|
||||
}
|
||||
if let Some(acquire_timeout) = self.acquire_timeout {
|
||||
opt = opt.acquire_timeout(acquire_timeout);
|
||||
}
|
||||
if let Some(max_lifetime) = self.max_lifetime {
|
||||
opt = opt.max_lifetime(Some(max_lifetime));
|
||||
}
|
||||
opt = opt.test_before_acquire(self.test_before_acquire);
|
||||
opt
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
use futures::lock::Mutex;
|
||||
use log::LevelFilter;
|
||||
use sea_query::Values;
|
||||
use std::{future::Future, pin::Pin, sync::Arc};
|
||||
@ -60,7 +61,7 @@ impl SqlxMySqlConnector {
|
||||
);
|
||||
}
|
||||
}
|
||||
match options.pool_options().connect_with(opt).await {
|
||||
match options.sqlx_pool_options().connect_with(opt).await {
|
||||
Ok(pool) => Ok(DatabaseConnection::SqlxMySqlPoolConnection(
|
||||
SqlxMySqlPoolConnection {
|
||||
pool,
|
||||
@ -89,7 +90,7 @@ impl SqlxMySqlPoolConnection {
|
||||
debug_print!("{}", stmt);
|
||||
|
||||
let query = sqlx_query(&stmt);
|
||||
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
crate::metric::metric!(self.metric_callback, &stmt, {
|
||||
match query.execute(&mut *conn).await {
|
||||
Ok(res) => Ok(res.into()),
|
||||
@ -103,7 +104,7 @@ impl SqlxMySqlPoolConnection {
|
||||
pub async fn execute_unprepared(&self, sql: &str) -> Result<ExecResult, DbErr> {
|
||||
debug_print!("{}", sql);
|
||||
|
||||
let conn = &mut self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = &mut self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
match conn.execute(sql).await {
|
||||
Ok(res) => Ok(res.into()),
|
||||
Err(err) => Err(sqlx_error_to_exec_err(err)),
|
||||
@ -116,7 +117,7 @@ impl SqlxMySqlPoolConnection {
|
||||
debug_print!("{}", stmt);
|
||||
|
||||
let query = sqlx_query(&stmt);
|
||||
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
crate::metric::metric!(self.metric_callback, &stmt, {
|
||||
match query.fetch_one(&mut *conn).await {
|
||||
Ok(row) => Ok(Some(row.into())),
|
||||
@ -134,7 +135,7 @@ impl SqlxMySqlPoolConnection {
|
||||
debug_print!("{}", stmt);
|
||||
|
||||
let query = sqlx_query(&stmt);
|
||||
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
crate::metric::metric!(self.metric_callback, &stmt, {
|
||||
match query.fetch_all(&mut *conn).await {
|
||||
Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()),
|
||||
@ -148,7 +149,7 @@ impl SqlxMySqlPoolConnection {
|
||||
pub async fn stream(&self, stmt: Statement) -> Result<QueryStream, DbErr> {
|
||||
debug_print!("{}", stmt);
|
||||
|
||||
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
Ok(QueryStream::from((
|
||||
conn,
|
||||
stmt,
|
||||
@ -163,7 +164,7 @@ impl SqlxMySqlPoolConnection {
|
||||
isolation_level: Option<IsolationLevel>,
|
||||
access_mode: Option<AccessMode>,
|
||||
) -> Result<DatabaseTransaction, DbErr> {
|
||||
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
DatabaseTransaction::new_mysql(
|
||||
conn,
|
||||
self.metric_callback.clone(),
|
||||
@ -189,7 +190,7 @@ impl SqlxMySqlPoolConnection {
|
||||
T: Send,
|
||||
E: std::error::Error + Send,
|
||||
{
|
||||
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
let transaction = DatabaseTransaction::new_mysql(
|
||||
conn,
|
||||
self.metric_callback.clone(),
|
||||
@ -210,7 +211,7 @@ impl SqlxMySqlPoolConnection {
|
||||
|
||||
/// Checks if a connection to the database is still valid.
|
||||
pub async fn ping(&self) -> Result<(), DbErr> {
|
||||
let conn = &mut self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = &mut self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
match conn.ping().await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => Err(sqlx_error_to_conn_err(err)),
|
||||
@ -273,3 +274,179 @@ pub(crate) async fn set_transaction_config(
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl
|
||||
From<(
|
||||
PoolConnection<sqlx::MySql>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
)> for crate::QueryStream
|
||||
{
|
||||
fn from(
|
||||
(conn, stmt, metric_callback): (
|
||||
PoolConnection<sqlx::MySql>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
),
|
||||
) -> Self {
|
||||
crate::QueryStream::build(stmt, crate::InnerConnection::MySql(conn), metric_callback)
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::DatabaseTransaction {
|
||||
pub(crate) async fn new_mysql(
|
||||
inner: PoolConnection<sqlx::MySql>,
|
||||
metric_callback: Option<crate::metric::Callback>,
|
||||
isolation_level: Option<IsolationLevel>,
|
||||
access_mode: Option<AccessMode>,
|
||||
) -> Result<crate::DatabaseTransaction, DbErr> {
|
||||
Self::begin(
|
||||
Arc::new(Mutex::new(crate::InnerConnection::MySql(inner))),
|
||||
crate::DbBackend::MySql,
|
||||
metric_callback,
|
||||
isolation_level,
|
||||
access_mode,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
pub(crate) fn from_sqlx_mysql_row_to_proxy_row(row: &sqlx::mysql::MySqlRow) -> crate::ProxyRow {
|
||||
// https://docs.rs/sqlx-mysql/0.7.2/src/sqlx_mysql/protocol/text/column.rs.html
|
||||
// https://docs.rs/sqlx-mysql/0.7.2/sqlx_mysql/types/index.html
|
||||
use sea_query::Value;
|
||||
use sqlx::{Column, Row, TypeInfo};
|
||||
crate::ProxyRow {
|
||||
values: row
|
||||
.columns()
|
||||
.iter()
|
||||
.map(|c| {
|
||||
(
|
||||
c.name().to_string(),
|
||||
match c.type_info().name() {
|
||||
"TINYINT(1)" | "BOOLEAN" => Value::Bool(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get boolean"),
|
||||
)),
|
||||
"TINYINT UNSIGNED" => Value::TinyUnsigned(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get unsigned tiny integer"),
|
||||
)),
|
||||
"SMALLINT UNSIGNED" => Value::SmallUnsigned(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get unsigned small integer"),
|
||||
)),
|
||||
"INT UNSIGNED" => Value::Unsigned(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get unsigned integer"),
|
||||
)),
|
||||
"MEDIUMINT UNSIGNED" | "BIGINT UNSIGNED" => Value::BigUnsigned(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get unsigned big integer"),
|
||||
)),
|
||||
"TINYINT" => Value::TinyInt(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get tiny integer"),
|
||||
)),
|
||||
"SMALLINT" => Value::SmallInt(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get small integer"),
|
||||
)),
|
||||
"INT" => Value::Int(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get integer"),
|
||||
)),
|
||||
"MEDIUMINT" | "BIGINT" => Value::BigInt(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get big integer"),
|
||||
)),
|
||||
"FLOAT" => Value::Float(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get float"),
|
||||
)),
|
||||
"DOUBLE" => Value::Double(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get double"),
|
||||
)),
|
||||
|
||||
"BIT" | "BINARY" | "VARBINARY" | "TINYBLOB" | "BLOB" | "MEDIUMBLOB"
|
||||
| "LONGBLOB" => Value::Bytes(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get bytes"),
|
||||
))),
|
||||
|
||||
"CHAR" | "VARCHAR" | "TINYTEXT" | "TEXT" | "MEDIUMTEXT" | "LONGTEXT" => {
|
||||
Value::String(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get string"),
|
||||
)))
|
||||
}
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIMESTAMP" => Value::ChronoDateTimeUtc(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIMESTAMP" => Value::TimeDateTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"DATE" => Value::ChronoDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get date"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"DATE" => Value::TimeDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get date"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIME" => Value::ChronoTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get time"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIME" => Value::TimeTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get time"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"DATETIME" => Value::ChronoDateTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get datetime"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"DATETIME" => Value::TimeDateTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get datetime"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"YEAR" => Value::ChronoDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get year"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"YEAR" => Value::TimeDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get year"),
|
||||
))),
|
||||
|
||||
"ENUM" | "SET" | "GEOMETRY" => Value::String(Some(Box::new(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get serialized string"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-bigdecimal")]
|
||||
"DECIMAL" => Value::BigDecimal(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get decimal"),
|
||||
))),
|
||||
#[cfg(all(
|
||||
feature = "with-rust_decimal",
|
||||
not(feature = "with-bigdecimal")
|
||||
))]
|
||||
"DECIMAL" => Value::Decimal(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get decimal"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-json")]
|
||||
"JSON" => Value::Json(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get json"),
|
||||
))),
|
||||
|
||||
_ => unreachable!("Unknown column type: {}", c.type_info().name()),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
use futures::lock::Mutex;
|
||||
use log::LevelFilter;
|
||||
use sea_query::Values;
|
||||
use std::{future::Future, pin::Pin, sync::Arc};
|
||||
@ -64,7 +65,7 @@ impl SqlxPostgresConnector {
|
||||
.schema_search_path
|
||||
.as_ref()
|
||||
.map(|schema| format!("SET search_path = '{schema}'"));
|
||||
let mut pool_options = options.pool_options();
|
||||
let mut pool_options = options.sqlx_pool_options();
|
||||
if let Some(sql) = set_search_path_sql {
|
||||
pool_options = pool_options.after_connect(move |conn, _| {
|
||||
let sql = sql.clone();
|
||||
@ -104,7 +105,7 @@ impl SqlxPostgresPoolConnection {
|
||||
debug_print!("{}", stmt);
|
||||
|
||||
let query = sqlx_query(&stmt);
|
||||
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
crate::metric::metric!(self.metric_callback, &stmt, {
|
||||
match query.execute(&mut *conn).await {
|
||||
Ok(res) => Ok(res.into()),
|
||||
@ -118,7 +119,7 @@ impl SqlxPostgresPoolConnection {
|
||||
pub async fn execute_unprepared(&self, sql: &str) -> Result<ExecResult, DbErr> {
|
||||
debug_print!("{}", sql);
|
||||
|
||||
let conn = &mut self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = &mut self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
match conn.execute(sql).await {
|
||||
Ok(res) => Ok(res.into()),
|
||||
Err(err) => Err(sqlx_error_to_exec_err(err)),
|
||||
@ -131,7 +132,7 @@ impl SqlxPostgresPoolConnection {
|
||||
debug_print!("{}", stmt);
|
||||
|
||||
let query = sqlx_query(&stmt);
|
||||
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
crate::metric::metric!(self.metric_callback, &stmt, {
|
||||
match query.fetch_one(&mut *conn).await {
|
||||
Ok(row) => Ok(Some(row.into())),
|
||||
@ -149,7 +150,7 @@ impl SqlxPostgresPoolConnection {
|
||||
debug_print!("{}", stmt);
|
||||
|
||||
let query = sqlx_query(&stmt);
|
||||
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
crate::metric::metric!(self.metric_callback, &stmt, {
|
||||
match query.fetch_all(&mut *conn).await {
|
||||
Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()),
|
||||
@ -163,7 +164,7 @@ impl SqlxPostgresPoolConnection {
|
||||
pub async fn stream(&self, stmt: Statement) -> Result<QueryStream, DbErr> {
|
||||
debug_print!("{}", stmt);
|
||||
|
||||
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
Ok(QueryStream::from((
|
||||
conn,
|
||||
stmt,
|
||||
@ -178,7 +179,7 @@ impl SqlxPostgresPoolConnection {
|
||||
isolation_level: Option<IsolationLevel>,
|
||||
access_mode: Option<AccessMode>,
|
||||
) -> Result<DatabaseTransaction, DbErr> {
|
||||
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
DatabaseTransaction::new_postgres(
|
||||
conn,
|
||||
self.metric_callback.clone(),
|
||||
@ -204,7 +205,7 @@ impl SqlxPostgresPoolConnection {
|
||||
T: Send,
|
||||
E: std::error::Error + Send,
|
||||
{
|
||||
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
let transaction = DatabaseTransaction::new_postgres(
|
||||
conn,
|
||||
self.metric_callback.clone(),
|
||||
@ -225,7 +226,7 @@ impl SqlxPostgresPoolConnection {
|
||||
|
||||
/// Checks if a connection to the database is still valid.
|
||||
pub async fn ping(&self) -> Result<(), DbErr> {
|
||||
let conn = &mut self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = &mut self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
match conn.ping().await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => Err(sqlx_error_to_conn_err(err)),
|
||||
@ -288,3 +289,502 @@ pub(crate) async fn set_transaction_config(
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl
|
||||
From<(
|
||||
PoolConnection<sqlx::Postgres>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
)> for crate::QueryStream
|
||||
{
|
||||
fn from(
|
||||
(conn, stmt, metric_callback): (
|
||||
PoolConnection<sqlx::Postgres>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
),
|
||||
) -> Self {
|
||||
crate::QueryStream::build(
|
||||
stmt,
|
||||
crate::InnerConnection::Postgres(conn),
|
||||
metric_callback,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::DatabaseTransaction {
|
||||
pub(crate) async fn new_postgres(
|
||||
inner: PoolConnection<sqlx::Postgres>,
|
||||
metric_callback: Option<crate::metric::Callback>,
|
||||
isolation_level: Option<IsolationLevel>,
|
||||
access_mode: Option<AccessMode>,
|
||||
) -> Result<crate::DatabaseTransaction, DbErr> {
|
||||
Self::begin(
|
||||
Arc::new(Mutex::new(crate::InnerConnection::Postgres(inner))),
|
||||
crate::DbBackend::Postgres,
|
||||
metric_callback,
|
||||
isolation_level,
|
||||
access_mode,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
pub(crate) fn from_sqlx_postgres_row_to_proxy_row(row: &sqlx::postgres::PgRow) -> crate::ProxyRow {
|
||||
// https://docs.rs/sqlx-postgres/0.7.2/src/sqlx_postgres/type_info.rs.html
|
||||
// https://docs.rs/sqlx-postgres/0.7.2/sqlx_postgres/types/index.html
|
||||
use sea_query::Value;
|
||||
use sqlx::{Column, Row, TypeInfo};
|
||||
crate::ProxyRow {
|
||||
values: row
|
||||
.columns()
|
||||
.iter()
|
||||
.map(|c| {
|
||||
(
|
||||
c.name().to_string(),
|
||||
match c.type_info().name() {
|
||||
"BOOL" => Value::Bool(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get boolean"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"BOOL[]" => Value::Array(
|
||||
sea_query::ArrayType::Bool,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<bool>, _>(c.ordinal())
|
||||
.expect("Failed to get boolean array")
|
||||
.iter()
|
||||
.map(|val| Value::Bool(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"\"CHAR\"" => Value::TinyInt(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get small integer"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"\"CHAR\"[]" => Value::Array(
|
||||
sea_query::ArrayType::TinyInt,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<i8>, _>(c.ordinal())
|
||||
.expect("Failed to get small integer array")
|
||||
.iter()
|
||||
.map(|val| Value::TinyInt(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"SMALLINT" | "SMALLSERIAL" | "INT2" => Value::SmallInt(Some(
|
||||
row.try_get(c.ordinal())
|
||||
.expect("Failed to get small integer"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"SMALLINT[]" | "SMALLSERIAL[]" | "INT2[]" => Value::Array(
|
||||
sea_query::ArrayType::SmallInt,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<i16>, _>(c.ordinal())
|
||||
.expect("Failed to get small integer array")
|
||||
.iter()
|
||||
.map(|val| Value::SmallInt(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"INT" | "SERIAL" | "INT4" => Value::Int(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get integer"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"INT[]" | "SERIAL[]" | "INT4[]" => Value::Array(
|
||||
sea_query::ArrayType::Int,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<i32>, _>(c.ordinal())
|
||||
.expect("Failed to get integer array")
|
||||
.iter()
|
||||
.map(|val| Value::Int(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"BIGINT" | "BIGSERIAL" | "INT8" => Value::BigInt(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get big integer"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"BIGINT[]" | "BIGSERIAL[]" | "INT8[]" => Value::Array(
|
||||
sea_query::ArrayType::BigInt,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<i64>, _>(c.ordinal())
|
||||
.expect("Failed to get big integer array")
|
||||
.iter()
|
||||
.map(|val| Value::BigInt(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"FLOAT4" | "REAL" => Value::Float(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get float"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"FLOAT4[]" | "REAL[]" => Value::Array(
|
||||
sea_query::ArrayType::Float,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<f32>, _>(c.ordinal())
|
||||
.expect("Failed to get float array")
|
||||
.iter()
|
||||
.map(|val| Value::Float(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"FLOAT8" | "DOUBLE PRECISION" => Value::Double(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get double"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"FLOAT8[]" | "DOUBLE PRECISION[]" => Value::Array(
|
||||
sea_query::ArrayType::Double,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<f64>, _>(c.ordinal())
|
||||
.expect("Failed to get double array")
|
||||
.iter()
|
||||
.map(|val| Value::Double(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"VARCHAR" | "CHAR" | "TEXT" | "NAME" => Value::String(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get string"),
|
||||
))),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"VARCHAR[]" | "CHAR[]" | "TEXT[]" | "NAME[]" => Value::Array(
|
||||
sea_query::ArrayType::String,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<String>, _>(c.ordinal())
|
||||
.expect("Failed to get string array")
|
||||
.iter()
|
||||
.map(|val| Value::String(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"BYTEA" => Value::Bytes(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get bytes"),
|
||||
))),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"BYTEA[]" => Value::Array(
|
||||
sea_query::ArrayType::Bytes,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<Vec<u8>>, _>(c.ordinal())
|
||||
.expect("Failed to get bytes array")
|
||||
.iter()
|
||||
.map(|val| Value::Bytes(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-bigdecimal")]
|
||||
"NUMERIC" => Value::BigDecimal(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get numeric"),
|
||||
))),
|
||||
#[cfg(all(
|
||||
feature = "with-rust_decimal",
|
||||
not(feature = "with-bigdecimal")
|
||||
))]
|
||||
"NUMERIC" => Value::Decimal(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get numeric"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-bigdecimal", feature = "postgres-array"))]
|
||||
"NUMERIC[]" => Value::Array(
|
||||
sea_query::ArrayType::BigDecimal,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<bigdecimal::BigDecimal>, _>(c.ordinal())
|
||||
.expect("Failed to get numeric array")
|
||||
.iter()
|
||||
.map(|val| Value::BigDecimal(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
#[cfg(all(
|
||||
feature = "with-rust_decimal",
|
||||
not(feature = "with-bigdecimal"),
|
||||
feature = "postgres-array"
|
||||
))]
|
||||
"NUMERIC[]" => Value::Array(
|
||||
sea_query::ArrayType::Decimal,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<rust_decimal::Decimal>, _>(c.ordinal())
|
||||
.expect("Failed to get numeric array")
|
||||
.iter()
|
||||
.map(|val| Value::Decimal(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"OID" => Value::BigInt(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get oid"),
|
||||
)),
|
||||
#[cfg(feature = "postgres-array")]
|
||||
"OID[]" => Value::Array(
|
||||
sea_query::ArrayType::BigInt,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<i64>, _>(c.ordinal())
|
||||
.expect("Failed to get oid array")
|
||||
.iter()
|
||||
.map(|val| Value::BigInt(Some(*val)))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
"JSON" | "JSONB" => Value::Json(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get json"),
|
||||
))),
|
||||
#[cfg(any(feature = "json-array", feature = "postgres-array"))]
|
||||
"JSON[]" | "JSONB[]" => Value::Array(
|
||||
sea_query::ArrayType::Json,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<serde_json::Value>, _>(c.ordinal())
|
||||
.expect("Failed to get json array")
|
||||
.iter()
|
||||
.map(|val| Value::Json(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-ipnetwork")]
|
||||
"INET" | "CIDR" => Value::IpNetwork(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get ip address"),
|
||||
))),
|
||||
#[cfg(feature = "with-ipnetwork")]
|
||||
"INET[]" | "CIDR[]" => Value::Array(
|
||||
sea_query::ArrayType::IpNetwork,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<ipnetwork::IpNetwork>, _>(c.ordinal())
|
||||
.expect("Failed to get ip address array")
|
||||
.iter()
|
||||
.map(|val| Value::IpNetwork(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-mac_address")]
|
||||
"MACADDR" | "MACADDR8" => Value::MacAddress(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get mac address"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-mac_address", feature = "postgres-array"))]
|
||||
"MACADDR[]" | "MACADDR8[]" => Value::Array(
|
||||
sea_query::ArrayType::MacAddress,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<mac_address::MacAddress>, _>(c.ordinal())
|
||||
.expect("Failed to get mac address array")
|
||||
.iter()
|
||||
.map(|val| Value::MacAddress(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIMESTAMP" => Value::ChronoDateTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIMESTAMP" => Value::TimeDateTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
|
||||
"TIMESTAMP[]" => Value::Array(
|
||||
sea_query::ArrayType::ChronoDateTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<chrono::NaiveDateTime>, _>(c.ordinal())
|
||||
.expect("Failed to get timestamp array")
|
||||
.iter()
|
||||
.map(|val| Value::ChronoDateTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
#[cfg(all(
|
||||
feature = "with-time",
|
||||
not(feature = "with-chrono"),
|
||||
feature = "postgres-array"
|
||||
))]
|
||||
"TIMESTAMP[]" => Value::Array(
|
||||
sea_query::ArrayType::TimeDateTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<time::OffsetDateTime>, _>(c.ordinal())
|
||||
.expect("Failed to get timestamp array")
|
||||
.iter()
|
||||
.map(|val| Value::TimeDateTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"DATE" => Value::ChronoDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get date"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"DATE" => Value::TimeDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get date"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
|
||||
"DATE[]" => Value::Array(
|
||||
sea_query::ArrayType::ChronoDate,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<chrono::NaiveDate>, _>(c.ordinal())
|
||||
.expect("Failed to get date array")
|
||||
.iter()
|
||||
.map(|val| Value::ChronoDate(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
#[cfg(all(
|
||||
feature = "with-time",
|
||||
not(feature = "with-chrono"),
|
||||
feature = "postgres-array"
|
||||
))]
|
||||
"DATE[]" => Value::Array(
|
||||
sea_query::ArrayType::TimeDate,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<time::Date>, _>(c.ordinal())
|
||||
.expect("Failed to get date array")
|
||||
.iter()
|
||||
.map(|val| Value::TimeDate(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIME" => Value::ChronoTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get time"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIME" => Value::TimeTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get time"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
|
||||
"TIME[]" => Value::Array(
|
||||
sea_query::ArrayType::ChronoTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<chrono::NaiveTime>, _>(c.ordinal())
|
||||
.expect("Failed to get time array")
|
||||
.iter()
|
||||
.map(|val| Value::ChronoTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
#[cfg(all(
|
||||
feature = "with-time",
|
||||
not(feature = "with-chrono"),
|
||||
feature = "postgres-array"
|
||||
))]
|
||||
"TIME[]" => Value::Array(
|
||||
sea_query::ArrayType::TimeTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<time::Time>, _>(c.ordinal())
|
||||
.expect("Failed to get time array")
|
||||
.iter()
|
||||
.map(|val| Value::TimeTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIMESTAMPTZ" => Value::ChronoDateTimeUtc(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamptz"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIMESTAMPTZ" => Value::TimeDateTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamptz"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
|
||||
"TIMESTAMPTZ[]" => Value::Array(
|
||||
sea_query::ArrayType::ChronoDateTimeUtc,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<chrono::DateTime<chrono::Utc>>, _>(c.ordinal())
|
||||
.expect("Failed to get timestamptz array")
|
||||
.iter()
|
||||
.map(|val| {
|
||||
Value::ChronoDateTimeUtc(Some(Box::new(val.clone())))
|
||||
})
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
#[cfg(all(
|
||||
feature = "with-time",
|
||||
not(feature = "with-chrono"),
|
||||
feature = "postgres-array"
|
||||
))]
|
||||
"TIMESTAMPTZ[]" => Value::Array(
|
||||
sea_query::ArrayType::TimeDateTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<time::OffsetDateTime>, _>(c.ordinal())
|
||||
.expect("Failed to get timestamptz array")
|
||||
.iter()
|
||||
.map(|val| Value::TimeDateTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIMETZ" => Value::ChronoTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timetz"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIMETZ" => Value::TimeTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timetz"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-chrono", feature = "postgres-array"))]
|
||||
"TIMETZ[]" => Value::Array(
|
||||
sea_query::ArrayType::ChronoTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<chrono::NaiveTime>, _>(c.ordinal())
|
||||
.expect("Failed to get timetz array")
|
||||
.iter()
|
||||
.map(|val| Value::ChronoTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
#[cfg(all(
|
||||
feature = "with-time",
|
||||
not(feature = "with-chrono"),
|
||||
feature = "postgres-array"
|
||||
))]
|
||||
"TIMETZ[]" => Value::Array(
|
||||
sea_query::ArrayType::TimeTime,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<time::Time>, _>(c.ordinal())
|
||||
.expect("Failed to get timetz array")
|
||||
.iter()
|
||||
.map(|val| Value::TimeTime(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
#[cfg(feature = "with-uuid")]
|
||||
"UUID" => Value::Uuid(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get uuid"),
|
||||
))),
|
||||
|
||||
#[cfg(all(feature = "with-uuid", feature = "postgres-array"))]
|
||||
"UUID[]" => Value::Array(
|
||||
sea_query::ArrayType::Uuid,
|
||||
Some(Box::new(
|
||||
row.try_get::<Vec<uuid::Uuid>, _>(c.ordinal())
|
||||
.expect("Failed to get uuid array")
|
||||
.iter()
|
||||
.map(|val| Value::Uuid(Some(Box::new(val.clone()))))
|
||||
.collect(),
|
||||
)),
|
||||
),
|
||||
|
||||
_ => unreachable!("Unknown column type: {}", c.type_info().name()),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
use futures::lock::Mutex;
|
||||
use log::LevelFilter;
|
||||
use sea_query::Values;
|
||||
use std::{future::Future, pin::Pin, sync::Arc};
|
||||
@ -68,7 +69,7 @@ impl SqlxSqliteConnector {
|
||||
if options.get_max_connections().is_none() {
|
||||
options.max_connections(1);
|
||||
}
|
||||
match options.pool_options().connect_with(opt).await {
|
||||
match options.sqlx_pool_options().connect_with(opt).await {
|
||||
Ok(pool) => {
|
||||
let pool = SqlxSqlitePoolConnection {
|
||||
pool,
|
||||
@ -105,7 +106,7 @@ impl SqlxSqlitePoolConnection {
|
||||
debug_print!("{}", stmt);
|
||||
|
||||
let query = sqlx_query(&stmt);
|
||||
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
crate::metric::metric!(self.metric_callback, &stmt, {
|
||||
match query.execute(&mut *conn).await {
|
||||
Ok(res) => Ok(res.into()),
|
||||
@ -119,7 +120,7 @@ impl SqlxSqlitePoolConnection {
|
||||
pub async fn execute_unprepared(&self, sql: &str) -> Result<ExecResult, DbErr> {
|
||||
debug_print!("{}", sql);
|
||||
|
||||
let conn = &mut self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = &mut self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
match conn.execute(sql).await {
|
||||
Ok(res) => Ok(res.into()),
|
||||
Err(err) => Err(sqlx_error_to_exec_err(err)),
|
||||
@ -132,7 +133,7 @@ impl SqlxSqlitePoolConnection {
|
||||
debug_print!("{}", stmt);
|
||||
|
||||
let query = sqlx_query(&stmt);
|
||||
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
crate::metric::metric!(self.metric_callback, &stmt, {
|
||||
match query.fetch_one(&mut *conn).await {
|
||||
Ok(row) => Ok(Some(row.into())),
|
||||
@ -150,7 +151,7 @@ impl SqlxSqlitePoolConnection {
|
||||
debug_print!("{}", stmt);
|
||||
|
||||
let query = sqlx_query(&stmt);
|
||||
let mut conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let mut conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
crate::metric::metric!(self.metric_callback, &stmt, {
|
||||
match query.fetch_all(&mut *conn).await {
|
||||
Ok(rows) => Ok(rows.into_iter().map(|r| r.into()).collect()),
|
||||
@ -164,7 +165,7 @@ impl SqlxSqlitePoolConnection {
|
||||
pub async fn stream(&self, stmt: Statement) -> Result<QueryStream, DbErr> {
|
||||
debug_print!("{}", stmt);
|
||||
|
||||
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
Ok(QueryStream::from((
|
||||
conn,
|
||||
stmt,
|
||||
@ -179,7 +180,7 @@ impl SqlxSqlitePoolConnection {
|
||||
isolation_level: Option<IsolationLevel>,
|
||||
access_mode: Option<AccessMode>,
|
||||
) -> Result<DatabaseTransaction, DbErr> {
|
||||
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
DatabaseTransaction::new_sqlite(
|
||||
conn,
|
||||
self.metric_callback.clone(),
|
||||
@ -205,7 +206,7 @@ impl SqlxSqlitePoolConnection {
|
||||
T: Send,
|
||||
E: std::error::Error + Send,
|
||||
{
|
||||
let conn = self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
let transaction = DatabaseTransaction::new_sqlite(
|
||||
conn,
|
||||
self.metric_callback.clone(),
|
||||
@ -226,7 +227,7 @@ impl SqlxSqlitePoolConnection {
|
||||
|
||||
/// Checks if a connection to the database is still valid.
|
||||
pub async fn ping(&self) -> Result<(), DbErr> {
|
||||
let conn = &mut self.pool.acquire().await.map_err(conn_acquire_err)?;
|
||||
let conn = &mut self.pool.acquire().await.map_err(sqlx_conn_acquire_err)?;
|
||||
match conn.ping().await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(err) => Err(sqlx_error_to_conn_err(err)),
|
||||
@ -325,6 +326,115 @@ fn ensure_returning_version(version: &str) -> Result<(), DbErr> {
|
||||
}
|
||||
}
|
||||
|
||||
impl
|
||||
From<(
|
||||
PoolConnection<sqlx::Sqlite>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
)> for crate::QueryStream
|
||||
{
|
||||
fn from(
|
||||
(conn, stmt, metric_callback): (
|
||||
PoolConnection<sqlx::Sqlite>,
|
||||
Statement,
|
||||
Option<crate::metric::Callback>,
|
||||
),
|
||||
) -> Self {
|
||||
crate::QueryStream::build(stmt, crate::InnerConnection::Sqlite(conn), metric_callback)
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::DatabaseTransaction {
|
||||
pub(crate) async fn new_sqlite(
|
||||
inner: PoolConnection<sqlx::Sqlite>,
|
||||
metric_callback: Option<crate::metric::Callback>,
|
||||
isolation_level: Option<IsolationLevel>,
|
||||
access_mode: Option<AccessMode>,
|
||||
) -> Result<crate::DatabaseTransaction, DbErr> {
|
||||
Self::begin(
|
||||
Arc::new(Mutex::new(crate::InnerConnection::Sqlite(inner))),
|
||||
crate::DbBackend::Sqlite,
|
||||
metric_callback,
|
||||
isolation_level,
|
||||
access_mode,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "proxy")]
|
||||
pub(crate) fn from_sqlx_sqlite_row_to_proxy_row(row: &sqlx::sqlite::SqliteRow) -> crate::ProxyRow {
|
||||
// https://docs.rs/sqlx-sqlite/0.7.2/src/sqlx_sqlite/type_info.rs.html
|
||||
// https://docs.rs/sqlx-sqlite/0.7.2/sqlx_sqlite/types/index.html
|
||||
use sea_query::Value;
|
||||
use sqlx::{Column, Row, TypeInfo};
|
||||
crate::ProxyRow {
|
||||
values: row
|
||||
.columns()
|
||||
.iter()
|
||||
.map(|c| {
|
||||
(
|
||||
c.name().to_string(),
|
||||
match c.type_info().name() {
|
||||
"BOOLEAN" => Value::Bool(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get boolean"),
|
||||
)),
|
||||
|
||||
"INTEGER" => Value::Int(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get integer"),
|
||||
)),
|
||||
|
||||
"BIGINT" | "INT8" => Value::BigInt(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get big integer"),
|
||||
)),
|
||||
|
||||
"REAL" => Value::Double(Some(
|
||||
row.try_get(c.ordinal()).expect("Failed to get double"),
|
||||
)),
|
||||
|
||||
"TEXT" => Value::String(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get string"),
|
||||
))),
|
||||
|
||||
"BLOB" => Value::Bytes(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get bytes"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"DATETIME" => Value::ChronoDateTimeUtc(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"DATETIME" => Value::TimeDateTimeWithTimeZone(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get timestamp"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"DATE" => Value::ChronoDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get date"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"DATE" => Value::TimeDate(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get date"),
|
||||
))),
|
||||
|
||||
#[cfg(feature = "with-chrono")]
|
||||
"TIME" => Value::ChronoTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get time"),
|
||||
))),
|
||||
#[cfg(all(feature = "with-time", not(feature = "with-chrono")))]
|
||||
"TIME" => Value::TimeTime(Some(Box::new(
|
||||
row.try_get(c.ordinal()).expect("Failed to get time"),
|
||||
))),
|
||||
|
||||
_ => unreachable!("Unknown column type: {}", c.type_info().name()),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(test, feature = "sqlite-use-returning-for-3_35"))]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -1,4 +1,4 @@
|
||||
use crate::{EntityName, Iden, IdenStatic, IntoSimpleExpr, Iterable};
|
||||
use crate::{DbBackend, EntityName, Iden, IdenStatic, IntoSimpleExpr, Iterable};
|
||||
use sea_query::{
|
||||
Alias, BinOper, DynIden, Expr, IntoIden, SeaRc, SelectStatement, SimpleExpr, Value,
|
||||
};
|
||||
@ -247,6 +247,14 @@ pub trait ColumnTrait: IdenStatic + Iterable + FromStr {
|
||||
Expr::expr(self.into_simple_expr())
|
||||
}
|
||||
|
||||
/// Construct a returning [`Expr`].
|
||||
#[allow(clippy::match_single_binding)]
|
||||
fn into_returning_expr(self, db_backend: DbBackend) -> Expr {
|
||||
match db_backend {
|
||||
_ => Expr::col(self),
|
||||
}
|
||||
}
|
||||
|
||||
/// Cast column expression used in select statement.
|
||||
/// It only cast database enum as text if it's an enum column.
|
||||
fn select_as(&self, expr: Expr) -> SimpleExpr {
|
||||
|
10
src/error.rs
10
src/error.rs
@ -151,16 +151,6 @@ where
|
||||
DbErr::Json(s.to_string())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[cfg(feature = "sqlx-dep")]
|
||||
pub(crate) fn conn_acquire_err(sqlx_err: sqlx::Error) -> DbErr {
|
||||
match sqlx_err {
|
||||
sqlx::Error::PoolTimedOut => DbErr::ConnectionAcquire(ConnAcquireErr::Timeout),
|
||||
sqlx::Error::PoolClosed => DbErr::ConnectionAcquire(ConnAcquireErr::ConnectionClosed),
|
||||
_ => DbErr::Conn(RuntimeErr::SqlxError(sqlx_err)),
|
||||
}
|
||||
}
|
||||
|
||||
/// An error from unsuccessful SQL query
|
||||
#[derive(Error, Debug, Clone, PartialEq, Eq)]
|
||||
#[non_exhaustive]
|
||||
|
@ -3,7 +3,7 @@ use crate::{
|
||||
Iterable, PrimaryKeyToColumn, PrimaryKeyTrait, SelectModel, SelectorRaw, Statement, TryFromU64,
|
||||
TryInsert,
|
||||
};
|
||||
use sea_query::{Expr, FromValueTuple, Iden, InsertStatement, IntoColumnRef, Query, ValueTuple};
|
||||
use sea_query::{FromValueTuple, Iden, InsertStatement, Query, ValueTuple};
|
||||
use std::{future::Future, marker::PhantomData};
|
||||
|
||||
/// Defines a structure to perform INSERT operations in an ActiveModel
|
||||
@ -118,10 +118,12 @@ where
|
||||
// so that self is dropped before entering await
|
||||
let mut query = self.query;
|
||||
if db.support_returning() && <A::Entity as EntityTrait>::PrimaryKey::iter().count() > 0 {
|
||||
let returning = Query::returning().exprs(
|
||||
<A::Entity as EntityTrait>::PrimaryKey::iter()
|
||||
.map(|c| c.into_column().select_as(Expr::col(c.into_column_ref()))),
|
||||
);
|
||||
let db_backend = db.get_database_backend();
|
||||
let returning =
|
||||
Query::returning().exprs(<A::Entity as EntityTrait>::PrimaryKey::iter().map(|c| {
|
||||
c.into_column()
|
||||
.select_as(c.into_column().into_returning_expr(db_backend))
|
||||
}));
|
||||
query.returning(returning);
|
||||
}
|
||||
Inserter::<A>::new(self.primary_key, query).exec(db)
|
||||
@ -275,7 +277,8 @@ where
|
||||
let found = match db.support_returning() {
|
||||
true => {
|
||||
let returning = Query::returning().exprs(
|
||||
<A::Entity as EntityTrait>::Column::iter().map(|c| c.select_as(Expr::col(c))),
|
||||
<A::Entity as EntityTrait>::Column::iter()
|
||||
.map(|c| c.select_as(c.into_returning_expr(db_backend))),
|
||||
);
|
||||
insert_statement.returning(returning);
|
||||
SelectorRaw::<SelectModel<<A::Entity as EntityTrait>::Model>>::from_statement(
|
||||
|
@ -2,7 +2,7 @@ use crate::{
|
||||
error::*, ActiveModelTrait, ColumnTrait, ConnectionTrait, EntityTrait, IntoActiveModel,
|
||||
Iterable, PrimaryKeyTrait, SelectModel, SelectorRaw, UpdateMany, UpdateOne,
|
||||
};
|
||||
use sea_query::{Expr, FromValueTuple, Query, UpdateStatement};
|
||||
use sea_query::{FromValueTuple, Query, UpdateStatement};
|
||||
|
||||
/// Defines an update operation
|
||||
#[derive(Clone, Debug)]
|
||||
@ -114,10 +114,11 @@ impl Updater {
|
||||
|
||||
match db.support_returning() {
|
||||
true => {
|
||||
let returning = Query::returning()
|
||||
.exprs(Column::<A>::iter().map(|c| c.select_as(Expr::col(c))));
|
||||
self.query.returning(returning);
|
||||
let db_backend = db.get_database_backend();
|
||||
let returning = Query::returning().exprs(
|
||||
Column::<A>::iter().map(|c| c.select_as(c.into_returning_expr(db_backend))),
|
||||
);
|
||||
self.query.returning(returning);
|
||||
let found: Option<Model<A>> = SelectorRaw::<SelectModel<Model<A>>>::from_statement(
|
||||
db_backend.build(&self.query),
|
||||
)
|
||||
@ -148,10 +149,11 @@ impl Updater {
|
||||
|
||||
match db.support_returning() {
|
||||
true => {
|
||||
let returning =
|
||||
Query::returning().exprs(E::Column::iter().map(|c| c.select_as(Expr::col(c))));
|
||||
self.query.returning(returning);
|
||||
let db_backend = db.get_database_backend();
|
||||
let returning = Query::returning().exprs(
|
||||
E::Column::iter().map(|c| c.select_as(c.into_returning_expr(db_backend))),
|
||||
);
|
||||
self.query.returning(returning);
|
||||
let models: Vec<E::Model> = SelectorRaw::<SelectModel<E::Model>>::from_statement(
|
||||
db_backend.build(&self.query),
|
||||
)
|
||||
|
@ -13,11 +13,6 @@ use sea_orm::{
|
||||
};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("active_enum_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
|
@ -2,7 +2,7 @@ pub mod common;
|
||||
|
||||
use common::features::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection, NotSet};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(feature = "sqlx-postgres")]
|
||||
@ -26,7 +26,12 @@ pub async fn create_and_update(db: &DatabaseConnection) -> Result<(), DbErr> {
|
||||
bit64: 64,
|
||||
};
|
||||
|
||||
let res = bits.clone().into_active_model().insert(db).await?;
|
||||
let res = bits::ActiveModel {
|
||||
id: NotSet,
|
||||
..bits.clone().into_active_model()
|
||||
}
|
||||
.insert(db)
|
||||
.await?;
|
||||
|
||||
let model = Bits::find().one(db).await?;
|
||||
assert_eq!(model, Some(res));
|
||||
|
@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("byte_primary_key_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
|
@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
|
||||
use sea_orm::prelude::*;
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn connection_ping() {
|
||||
let ctx = TestContext::new("connection_ping").await;
|
||||
|
||||
|
@ -10,11 +10,6 @@ use sea_orm::DatabaseConnection;
|
||||
// DATABASE_URL="mysql://root:root@localhost" cargo test --features sqlx-mysql,runtime-async-std-native-tls --test crud_tests
|
||||
// DATABASE_URL="postgres://root:root@localhost" cargo test --features sqlx-postgres,runtime-async-std-native-tls --test crud_tests
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() {
|
||||
let ctx = TestContext::new("bakery_chain_schema_crud_tests").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
|
@ -6,11 +6,6 @@ use sea_orm::{entity::prelude::*, DerivePartialModel, FromQueryResult, QuerySele
|
||||
use serde_json::json;
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn cursor_tests() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("cursor_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
|
@ -1,13 +1,8 @@
|
||||
pub mod common;
|
||||
pub use common::{features::*, setup::*, TestContext};
|
||||
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel};
|
||||
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel, NotSet};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("delete_by_id_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -26,9 +21,12 @@ pub async fn create_and_delete_applog(db: &DatabaseConnection) -> Result<(), DbE
|
||||
created_at: "2021-09-17T17:50:20+08:00".parse().unwrap(),
|
||||
};
|
||||
|
||||
Applog::insert(log1.clone().into_active_model())
|
||||
.exec(db)
|
||||
.await?;
|
||||
Applog::insert(applog::ActiveModel {
|
||||
id: NotSet,
|
||||
..log1.clone().into_active_model()
|
||||
})
|
||||
.exec(db)
|
||||
.await?;
|
||||
|
||||
let log2 = applog::Model {
|
||||
id: 2,
|
||||
@ -37,9 +35,12 @@ pub async fn create_and_delete_applog(db: &DatabaseConnection) -> Result<(), DbE
|
||||
created_at: "2022-09-17T17:50:20+08:00".parse().unwrap(),
|
||||
};
|
||||
|
||||
Applog::insert(log2.clone().into_active_model())
|
||||
.exec(db)
|
||||
.await?;
|
||||
Applog::insert(applog::ActiveModel {
|
||||
id: NotSet,
|
||||
..log2.clone().into_active_model()
|
||||
})
|
||||
.exec(db)
|
||||
.await?;
|
||||
|
||||
let delete_res = Applog::delete_by_id(2).exec(db).await?;
|
||||
assert_eq!(delete_res.rows_affected, 1);
|
||||
|
@ -3,17 +3,12 @@ pub mod common;
|
||||
pub use common::{features::*, setup::*, TestContext};
|
||||
use pretty_assertions::assert_eq;
|
||||
use sea_orm::{
|
||||
entity::prelude::*, DatabaseConnection, Delete, IntoActiveModel, Iterable, QueryTrait, Set,
|
||||
Update,
|
||||
entity::prelude::*, DatabaseConnection, Delete, IntoActiveModel, Iterable, NotSet, QueryTrait,
|
||||
Set, Update,
|
||||
};
|
||||
use sea_query::{Expr, Query};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("dyn_table_name_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -36,7 +31,10 @@ pub async fn dyn_table_name_lazy_static(db: &DatabaseConnection) -> Result<(), D
|
||||
name: "1st Row".into(),
|
||||
};
|
||||
// Prepare insert statement
|
||||
let mut insert = Entity::insert(model.clone().into_active_model());
|
||||
let mut insert = Entity::insert(ActiveModel {
|
||||
id: NotSet,
|
||||
..model.clone().into_active_model()
|
||||
});
|
||||
// Reset the table name of insert statement
|
||||
insert.query().into_table(entity.table_ref());
|
||||
// Execute the insert statement
|
||||
|
@ -11,11 +11,6 @@ pub use crud::*;
|
||||
use sea_orm::{DbConn, TryInsertResult};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() {
|
||||
let ctx = TestContext::new("bakery_chain_empty_insert_tests").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
|
@ -10,11 +10,6 @@ use sea_orm::{
|
||||
};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("enum_primary_key_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
|
@ -9,7 +9,7 @@ pub use common::{
|
||||
TestContext,
|
||||
};
|
||||
use pretty_assertions::assert_eq;
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection, NotSet};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(all(feature = "sqlx-postgres", feature = "postgres-array"))]
|
||||
@ -33,7 +33,12 @@ pub async fn insert_event_trigger(db: &DatabaseConnection) -> Result<(), DbErr>
|
||||
),
|
||||
};
|
||||
|
||||
let result = event_trigger.clone().into_active_model().insert(db).await?;
|
||||
let result = event_trigger::ActiveModel {
|
||||
id: NotSet,
|
||||
..event_trigger.clone().into_active_model()
|
||||
}
|
||||
.insert(db)
|
||||
.await?;
|
||||
|
||||
assert_eq!(result, event_trigger);
|
||||
|
||||
|
@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
|
||||
use sea_orm::{entity::prelude::*, ConnectionTrait, DatabaseConnection};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("execute_unprepared_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -24,7 +19,7 @@ pub async fn execute_unprepared(db: &DatabaseConnection) -> Result<(), DbErr> {
|
||||
|
||||
db.execute_unprepared(
|
||||
[
|
||||
"INSERT INTO insert_default VALUES (1), (2), (3), (4), (5)",
|
||||
"INSERT INTO insert_default (id) VALUES (1), (2), (3), (4), (5)",
|
||||
"DELETE FROM insert_default WHERE id % 2 = 0",
|
||||
]
|
||||
.join(";")
|
||||
|
@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("insert_default_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
|
@ -2,15 +2,10 @@ pub mod common;
|
||||
|
||||
pub use common::{features::*, setup::*, TestContext};
|
||||
use pretty_assertions::assert_eq;
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection, NotSet};
|
||||
use serde_json::json;
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("json_struct_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -46,7 +41,12 @@ pub async fn insert_json_struct_1(db: &DatabaseConnection) -> Result<(), DbErr>
|
||||
}),
|
||||
};
|
||||
|
||||
let result = model.clone().into_active_model().insert(db).await?;
|
||||
let result = ActiveModel {
|
||||
id: NotSet,
|
||||
..model.clone().into_active_model()
|
||||
}
|
||||
.insert(db)
|
||||
.await?;
|
||||
|
||||
assert_eq!(result, model);
|
||||
|
||||
@ -81,7 +81,12 @@ pub async fn insert_json_struct_2(db: &DatabaseConnection) -> Result<(), DbErr>
|
||||
json_value_opt: None,
|
||||
};
|
||||
|
||||
let result = model.clone().into_active_model().insert(db).await?;
|
||||
let result = ActiveModel {
|
||||
id: NotSet,
|
||||
..model.clone().into_active_model()
|
||||
}
|
||||
.insert(db)
|
||||
.await?;
|
||||
|
||||
assert_eq!(result, model);
|
||||
|
||||
|
@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("json_vec_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -32,7 +27,12 @@ pub async fn insert_json_vec(db: &DatabaseConnection) -> Result<(), DbErr> {
|
||||
])),
|
||||
};
|
||||
|
||||
let result = json_vec.clone().into_active_model().insert(db).await?;
|
||||
let result = json_vec::ActiveModel {
|
||||
id: NotSet,
|
||||
..json_vec.clone().into_active_model()
|
||||
}
|
||||
.insert(db)
|
||||
.await?;
|
||||
|
||||
assert_eq!(result, json_vec);
|
||||
|
||||
@ -48,7 +48,7 @@ pub async fn insert_json_vec(db: &DatabaseConnection) -> Result<(), DbErr> {
|
||||
|
||||
pub async fn insert_json_string_vec_derive(db: &DatabaseConnection) -> Result<(), DbErr> {
|
||||
let json_vec = json_vec_derive::json_string_vec::Model {
|
||||
id: 2,
|
||||
id: 1,
|
||||
str_vec: Some(json_vec_derive::json_string_vec::StringVec(vec![
|
||||
"4".to_string(),
|
||||
"5".to_string(),
|
||||
@ -56,7 +56,12 @@ pub async fn insert_json_string_vec_derive(db: &DatabaseConnection) -> Result<()
|
||||
])),
|
||||
};
|
||||
|
||||
let result = json_vec.clone().into_active_model().insert(db).await?;
|
||||
let result = json_vec_derive::json_string_vec::ActiveModel {
|
||||
id: NotSet,
|
||||
..json_vec.clone().into_active_model()
|
||||
}
|
||||
.insert(db)
|
||||
.await?;
|
||||
|
||||
assert_eq!(result, json_vec);
|
||||
|
||||
@ -72,7 +77,7 @@ pub async fn insert_json_string_vec_derive(db: &DatabaseConnection) -> Result<()
|
||||
|
||||
pub async fn insert_json_struct_vec_derive(db: &DatabaseConnection) -> Result<(), DbErr> {
|
||||
let json_vec = json_vec_derive::json_struct_vec::Model {
|
||||
id: 2,
|
||||
id: 1,
|
||||
struct_vec: vec![
|
||||
json_vec_derive::json_struct_vec::JsonColumn {
|
||||
value: "4".to_string(),
|
||||
@ -86,7 +91,12 @@ pub async fn insert_json_struct_vec_derive(db: &DatabaseConnection) -> Result<()
|
||||
],
|
||||
};
|
||||
|
||||
let result = json_vec.clone().into_active_model().insert(db).await?;
|
||||
let result = json_vec_derive::json_struct_vec::ActiveModel {
|
||||
id: NotSet,
|
||||
..json_vec.clone().into_active_model()
|
||||
}
|
||||
.insert(db)
|
||||
.await?;
|
||||
assert_eq!(result, json_vec);
|
||||
|
||||
let model = json_vec_derive::json_struct_vec::Entity::find()
|
||||
|
@ -4,11 +4,6 @@ pub use common::{bakery_chain::*, setup::*, TestContext};
|
||||
use sea_orm::{entity::*, query::*, DbConn, DbErr, RuntimeErr};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn loader_load_one() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("loader_test_load_one").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -47,11 +42,6 @@ async fn loader_load_one() -> Result<(), DbErr> {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn loader_load_many() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("loader_test_load_many").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -121,11 +111,6 @@ async fn loader_load_many() -> Result<(), DbErr> {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn loader_load_many_multi() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("loader_test_load_many_multi").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -154,11 +139,6 @@ async fn loader_load_many_multi() -> Result<(), DbErr> {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn loader_load_many_to_many() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("loader_test_load_many_to_many").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
|
@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
|
||||
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel, Set};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("features_parallel_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
|
@ -2,15 +2,10 @@ pub mod common;
|
||||
|
||||
use common::{features::*, setup::*, TestContext};
|
||||
use pretty_assertions::assert_eq;
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection, NotSet};
|
||||
use std::str::FromStr;
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("pi_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -29,7 +24,12 @@ pub async fn create_and_update_pi(db: &DatabaseConnection) -> Result<(), DbErr>
|
||||
big_decimal_opt: None,
|
||||
};
|
||||
|
||||
let res = pi.clone().into_active_model().insert(db).await?;
|
||||
let res = pi::ActiveModel {
|
||||
id: NotSet,
|
||||
..pi.clone().into_active_model()
|
||||
}
|
||||
.insert(db)
|
||||
.await?;
|
||||
|
||||
let model = Pi::find().one(db).await?;
|
||||
assert_eq!(model, Some(res));
|
||||
|
@ -7,11 +7,6 @@ pub use sea_orm::{ConnectionTrait, QueryFilter, QuerySelect};
|
||||
// Run the test locally:
|
||||
// DATABASE_URL="mysql://root:@localhost" cargo test --features sqlx-mysql,runtime-async-std --test query_tests
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn find_one_with_no_result() {
|
||||
let ctx = TestContext::new("find_one_with_no_result").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -23,11 +18,6 @@ pub async fn find_one_with_no_result() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn find_one_with_result() {
|
||||
let ctx = TestContext::new("find_one_with_result").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -49,11 +39,6 @@ pub async fn find_one_with_result() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn find_by_id_with_no_result() {
|
||||
let ctx = TestContext::new("find_by_id_with_no_result").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -65,11 +50,6 @@ pub async fn find_by_id_with_no_result() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn find_by_id_with_result() {
|
||||
let ctx = TestContext::new("find_by_id_with_result").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -95,11 +75,6 @@ pub async fn find_by_id_with_result() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn find_all_with_no_result() {
|
||||
let ctx = TestContext::new("find_all_with_no_result").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -111,11 +86,6 @@ pub async fn find_all_with_no_result() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn find_all_with_result() {
|
||||
let ctx = TestContext::new("find_all_with_result").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -146,11 +116,6 @@ pub async fn find_all_with_result() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn find_all_filter_no_result() {
|
||||
let ctx = TestContext::new("find_all_filter_no_result").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -185,11 +150,6 @@ pub async fn find_all_filter_no_result() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn find_all_filter_with_results() {
|
||||
let ctx = TestContext::new("find_all_filter_with_results").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -224,11 +184,6 @@ pub async fn find_all_filter_with_results() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn select_only_exclude_option_fields() {
|
||||
let ctx = TestContext::new("select_only_exclude_option_fields").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
|
@ -11,11 +11,6 @@ pub use uuid::Uuid;
|
||||
// Run the test locally:
|
||||
// DATABASE_URL="mysql://root:@localhost" cargo test --features sqlx-mysql,runtime-async-std-native-tls --test relational_tests
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn left_join() {
|
||||
let ctx = TestContext::new("test_left_join").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -200,11 +195,6 @@ pub async fn right_join() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn inner_join() {
|
||||
let ctx = TestContext::new("test_inner_join").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -290,11 +280,6 @@ pub async fn inner_join() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn group_by() {
|
||||
let ctx = TestContext::new("test_group_by").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -384,11 +369,6 @@ pub async fn group_by() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn having() {
|
||||
// customers with orders with total equal to $90
|
||||
let ctx = TestContext::new("test_having").await;
|
||||
@ -494,11 +474,6 @@ pub async fn having() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn related() -> Result<(), DbErr> {
|
||||
use sea_orm::{SelectA, SelectB};
|
||||
|
||||
@ -743,11 +718,6 @@ pub async fn related() -> Result<(), DbErr> {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn linked() -> Result<(), DbErr> {
|
||||
use common::bakery_chain::Order;
|
||||
use sea_orm::{SelectA, SelectB};
|
||||
|
@ -1,16 +1,11 @@
|
||||
pub mod common;
|
||||
|
||||
pub use common::{bakery_chain::*, setup::*, TestContext};
|
||||
use sea_orm::{entity::prelude::*, IntoActiveModel};
|
||||
use sea_orm::{entity::prelude::*, IntoActiveModel, Set};
|
||||
pub use sea_query::{Expr, Query};
|
||||
use serde_json::json;
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
use bakery::*;
|
||||
|
||||
@ -33,7 +28,9 @@ async fn main() -> Result<(), DbErr> {
|
||||
])
|
||||
.and_where(Column::Id.eq(1));
|
||||
|
||||
let returning = Query::returning().columns([Column::Id, Column::Name, Column::ProfitMargin]);
|
||||
let columns = [Column::Id, Column::Name, Column::ProfitMargin];
|
||||
let returning =
|
||||
Query::returning().exprs(columns.into_iter().map(|c| c.into_returning_expr(builder)));
|
||||
|
||||
create_tables(db).await?;
|
||||
|
||||
@ -69,11 +66,6 @@ async fn main() -> Result<(), DbErr> {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
#[cfg_attr(
|
||||
any(
|
||||
feature = "sqlx-mysql",
|
||||
@ -95,10 +87,10 @@ async fn update_many() {
|
||||
create_tables(db).await?;
|
||||
|
||||
Entity::insert(
|
||||
Model {
|
||||
id: 1,
|
||||
action: "before_save".into(),
|
||||
values: json!({ "id": "unique-id-001" }),
|
||||
ActiveModel {
|
||||
action: Set("before_save".into()),
|
||||
values: Set(json!({ "id": "unique-id-001" })),
|
||||
..Default::default()
|
||||
}
|
||||
.into_active_model(),
|
||||
)
|
||||
@ -106,10 +98,10 @@ async fn update_many() {
|
||||
.await?;
|
||||
|
||||
Entity::insert(
|
||||
Model {
|
||||
id: 2,
|
||||
action: "before_save".into(),
|
||||
values: json!({ "id": "unique-id-002" }),
|
||||
ActiveModel {
|
||||
action: Set("before_save".into()),
|
||||
values: Set(json!({ "id": "unique-id-002" })),
|
||||
..Default::default()
|
||||
}
|
||||
.into_active_model(),
|
||||
)
|
||||
@ -117,10 +109,10 @@ async fn update_many() {
|
||||
.await?;
|
||||
|
||||
Entity::insert(
|
||||
Model {
|
||||
id: 3,
|
||||
action: "before_save".into(),
|
||||
values: json!({ "id": "unique-id-003" }),
|
||||
ActiveModel {
|
||||
action: Set("before_save".into()),
|
||||
values: Set(json!({ "id": "unique-id-003" })),
|
||||
..Default::default()
|
||||
}
|
||||
.into_active_model(),
|
||||
)
|
||||
|
@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
|
||||
use sea_orm::{entity::prelude::*, query::*, DbBackend, IntoActiveModel, QueryOrder};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("self_join_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
|
@ -133,9 +133,9 @@ async fn seed_data(db: &DatabaseConnection) {
|
||||
|
||||
#[cfg(any(feature = "sqlx-mysql", feature = "sqlx-postgres"))]
|
||||
async fn find_baker_least_sales(db: &DatabaseConnection) -> Option<baker::Model> {
|
||||
#[cfg(feature = "sqlx-postgres")]
|
||||
#[cfg(any(feature = "sqlx-postgres"))]
|
||||
type Type = i64;
|
||||
#[cfg(not(feature = "sqlx-postgres"))]
|
||||
#[cfg(not(any(feature = "sqlx-postgres")))]
|
||||
type Type = Decimal;
|
||||
|
||||
#[derive(Debug, FromQueryResult)]
|
||||
|
@ -1,17 +1,12 @@
|
||||
pub mod common;
|
||||
pub use common::{bakery_chain::*, setup::*, TestContext};
|
||||
pub use sea_orm::{
|
||||
entity::*, error::DbErr, error::SqlErr, tests_cfg, DatabaseConnection, DbBackend, EntityName,
|
||||
ExecResult,
|
||||
entity::*, error::DbErr, error::SqlErr, tests_cfg, ConnectionTrait, DatabaseConnection,
|
||||
DbBackend, EntityName, ExecResult,
|
||||
};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() {
|
||||
let ctx = TestContext::new("bakery_chain_sql_err_tests").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
|
@ -5,11 +5,6 @@ pub use sea_orm::entity::*;
|
||||
pub use sea_orm::{ConnectionTrait, DbErr, QueryFilter};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn stream() -> Result<(), DbErr> {
|
||||
use futures::StreamExt;
|
||||
|
||||
|
@ -6,11 +6,6 @@ use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
|
||||
use serde_json::json;
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("features_schema_string_primary_key_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
|
@ -1,16 +1,11 @@
|
||||
pub mod common;
|
||||
pub use common::{features::*, setup::*, TestContext};
|
||||
use pretty_assertions::assert_eq;
|
||||
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel};
|
||||
use sea_orm::{entity::prelude::*, ActiveValue::NotSet, DatabaseConnection, IntoActiveModel};
|
||||
use serde_json::json;
|
||||
use time::macros::{date, time};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() {
|
||||
let ctx = TestContext::new("time_crate_tests").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -30,9 +25,12 @@ pub async fn create_transaction_log(db: &DatabaseConnection) -> Result<(), DbErr
|
||||
.assume_utc(),
|
||||
};
|
||||
|
||||
let res = TransactionLog::insert(transaction_log.clone().into_active_model())
|
||||
.exec(db)
|
||||
.await?;
|
||||
let res = TransactionLog::insert(transaction_log::ActiveModel {
|
||||
id: NotSet,
|
||||
..transaction_log.clone().into_active_model()
|
||||
})
|
||||
.exec(db)
|
||||
.await?;
|
||||
|
||||
assert_eq!(transaction_log.id, res.last_insert_id);
|
||||
assert_eq!(
|
||||
|
@ -1,14 +1,9 @@
|
||||
pub mod common;
|
||||
pub use common::{features::*, setup::*, TestContext};
|
||||
use pretty_assertions::assert_eq;
|
||||
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel};
|
||||
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel, NotSet};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("bakery_chain_schema_timestamp_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -28,9 +23,12 @@ pub async fn create_applog(db: &DatabaseConnection) -> Result<(), DbErr> {
|
||||
created_at: "2021-09-17T17:50:20+08:00".parse().unwrap(),
|
||||
};
|
||||
|
||||
let res = Applog::insert(log.clone().into_active_model())
|
||||
.exec(db)
|
||||
.await?;
|
||||
let res = Applog::insert(applog::ActiveModel {
|
||||
id: NotSet,
|
||||
..log.clone().into_active_model()
|
||||
})
|
||||
.exec(db)
|
||||
.await?;
|
||||
|
||||
assert_eq!(log.id, res.last_insert_id);
|
||||
assert_eq!(Applog::find().one(db).await?, Some(log.clone()));
|
||||
@ -77,9 +75,12 @@ pub async fn create_satellites_log(db: &DatabaseConnection) -> Result<(), DbErr>
|
||||
deployment_date: "2022-01-07T12:11:23Z".parse().unwrap(),
|
||||
};
|
||||
|
||||
let res = Satellite::insert(archive.clone().into_active_model())
|
||||
.exec(db)
|
||||
.await?;
|
||||
let res = Satellite::insert(satellite::ActiveModel {
|
||||
id: NotSet,
|
||||
..archive.clone().into_active_model()
|
||||
})
|
||||
.exec(db)
|
||||
.await?;
|
||||
|
||||
assert_eq!(archive.id, res.last_insert_id);
|
||||
assert_eq!(Satellite::find().one(db).await?, Some(archive.clone()));
|
||||
|
@ -5,11 +5,6 @@ use pretty_assertions::assert_eq;
|
||||
use sea_orm::{prelude::*, AccessMode, DatabaseTransaction, IsolationLevel, Set, TransactionTrait};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn transaction() {
|
||||
let ctx = TestContext::new("transaction_test").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -50,11 +45,6 @@ pub async fn transaction() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn transaction_with_reference() {
|
||||
let ctx = TestContext::new("transaction_with_reference_test").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -105,11 +95,6 @@ fn _transaction_with_reference<'a>(
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn transaction_begin_out_of_scope() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("transaction_begin_out_of_scope_test").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -150,11 +135,6 @@ pub async fn transaction_begin_out_of_scope() -> Result<(), DbErr> {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn transaction_begin_commit() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("transaction_begin_commit_test").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -196,11 +176,6 @@ pub async fn transaction_begin_commit() -> Result<(), DbErr> {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn transaction_begin_rollback() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("transaction_begin_rollback_test").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -242,11 +217,6 @@ pub async fn transaction_begin_rollback() -> Result<(), DbErr> {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn transaction_closure_commit() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("transaction_closure_commit_test").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -291,11 +261,6 @@ pub async fn transaction_closure_commit() -> Result<(), DbErr> {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn transaction_closure_rollback() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("transaction_closure_rollback_test").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -352,11 +317,6 @@ pub async fn transaction_closure_rollback() -> Result<(), DbErr> {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn transaction_with_active_model_behaviour() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("transaction_with_active_model_behaviour_test").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -440,11 +400,6 @@ pub async fn transaction_with_active_model_behaviour() -> Result<(), DbErr> {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn transaction_nested() {
|
||||
let ctx = TestContext::new("transaction_nested_test").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
@ -692,11 +647,6 @@ pub async fn transaction_nested() {
|
||||
}
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
pub async fn transaction_with_config() {
|
||||
let ctx = TestContext::new("transaction_with_config").await;
|
||||
create_tables(&ctx.db).await.unwrap();
|
||||
|
@ -2,14 +2,9 @@ pub mod common;
|
||||
|
||||
pub use common::{features::*, setup::*, TestContext};
|
||||
use pretty_assertions::assert_eq;
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection, NotSet};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("uuid_fmt_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -31,7 +26,12 @@ pub async fn insert_uuid_fmt(db: &DatabaseConnection) -> Result<(), DbErr> {
|
||||
uuid_urn: uuid.urn(),
|
||||
};
|
||||
|
||||
let result = uuid_fmt.clone().into_active_model().insert(db).await?;
|
||||
let result = uuid_fmt::ActiveModel {
|
||||
id: NotSet,
|
||||
..uuid_fmt.clone().into_active_model()
|
||||
}
|
||||
.insert(db)
|
||||
.await?;
|
||||
|
||||
assert_eq!(result, uuid_fmt);
|
||||
|
||||
|
@ -6,11 +6,6 @@ use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
|
||||
use serde_json::json;
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("bakery_chain_schema_uuid_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
|
@ -12,15 +12,10 @@ pub use common::{
|
||||
TestContext,
|
||||
};
|
||||
use pretty_assertions::assert_eq;
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection};
|
||||
use sea_orm::{entity::prelude::*, entity::*, DatabaseConnection, NotSet};
|
||||
use sea_query::{ArrayType, ColumnType, Value, ValueType, ValueTypeErr};
|
||||
|
||||
#[sea_orm_macros::test]
|
||||
#[cfg(any(
|
||||
feature = "sqlx-mysql",
|
||||
feature = "sqlx-sqlite",
|
||||
feature = "sqlx-postgres"
|
||||
))]
|
||||
async fn main() -> Result<(), DbErr> {
|
||||
let ctx = TestContext::new("value_type_tests").await;
|
||||
create_tables(&ctx.db).await?;
|
||||
@ -45,7 +40,12 @@ pub async fn insert_value(db: &DatabaseConnection) -> Result<(), DbErr> {
|
||||
id: 1,
|
||||
number: 48.into(),
|
||||
};
|
||||
let result = model.clone().into_active_model().insert(db).await?;
|
||||
let result = value_type_general::ActiveModel {
|
||||
id: NotSet,
|
||||
..model.clone().into_active_model()
|
||||
}
|
||||
.insert(db)
|
||||
.await?;
|
||||
assert_eq!(result, model);
|
||||
|
||||
Ok(())
|
||||
@ -57,7 +57,12 @@ pub async fn postgres_insert_value(db: &DatabaseConnection) -> Result<(), DbErr>
|
||||
number: 48.into(),
|
||||
str_vec: StringVec(vec!["ab".to_string(), "cd".to_string()]),
|
||||
};
|
||||
let result = model.clone().into_active_model().insert(db).await?;
|
||||
let result = value_type_pg::ActiveModel {
|
||||
id: NotSet,
|
||||
..model.clone().into_active_model()
|
||||
}
|
||||
.insert(db)
|
||||
.await?;
|
||||
assert_eq!(result, model);
|
||||
|
||||
Ok(())
|
||||
|
Loading…
x
Reference in New Issue
Block a user