Merge branch 'master' into ss/actix-example
This commit is contained in:
commit
fac33f4019
104
README.md
104
README.md
@ -29,19 +29,113 @@ SeaORM is a relational ORM to help you build light weight and concurrent web ser
|
|||||||
|
|
||||||
1. Async
|
1. Async
|
||||||
|
|
||||||
Relying on [SQLx](https://github.com/launchbadge/sqlx), SeaORM is a new library with async support from day 1.
|
Relying on [SQLx](https://github.com/launchbadge/sqlx), SeaORM is a new library with async support from day 1.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// execute multiple queries in parallel
|
||||||
|
let cakes_and_fruits: (Vec<cake::Model>, Vec<fruit::Model>) =
|
||||||
|
futures::try_join!(Cake::find().all(&db), Fruit::find().all(&db))?;
|
||||||
|
```
|
||||||
|
|
||||||
2. Dynamic
|
2. Dynamic
|
||||||
|
|
||||||
Built upon [SeaQuery](https://github.com/SeaQL/sea-query), SeaORM allows you to build complex queries without 'fighting the ORM'.
|
Built upon [SeaQuery](https://github.com/SeaQL/sea-query), SeaORM allows you to build complex queries without 'fighting the ORM'.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// build subquery with ease
|
||||||
|
let cakes_with_filling: Vec<cake::Model> = cake::Entity::find()
|
||||||
|
.filter(
|
||||||
|
Condition::any().add(
|
||||||
|
cake::Column::Id.in_subquery(
|
||||||
|
Query::select()
|
||||||
|
.column(cake_filling::Column::CakeId)
|
||||||
|
.from(cake_filling::Entity)
|
||||||
|
.to_owned(),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.all(&db)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
3. Testable
|
3. Testable
|
||||||
|
|
||||||
Use mock connections to write unit tests for your logic.
|
Use mock connections to write unit tests for your logic.
|
||||||
|
|
||||||
4. Service oriented
|
```rust
|
||||||
|
// Setup mock connection
|
||||||
|
let db = MockDatabase::new(DbBackend::Postgres)
|
||||||
|
.append_query_results(vec![
|
||||||
|
vec![
|
||||||
|
cake::Model {
|
||||||
|
id: 1,
|
||||||
|
name: "New York Cheese".to_owned(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
])
|
||||||
|
.into_connection();
|
||||||
|
|
||||||
Quickly build services that join, filter, sort and paginate data in APIs.
|
// Perform your application logic
|
||||||
|
assert_eq!(
|
||||||
|
cake::Entity::find().one(&db).await?,
|
||||||
|
Some(cake::Model {
|
||||||
|
id: 1,
|
||||||
|
name: "New York Cheese".to_owned(),
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
// Compare it against the expected transaction log
|
||||||
|
assert_eq!(
|
||||||
|
db.into_transaction_log(),
|
||||||
|
vec![
|
||||||
|
Transaction::from_sql_and_values(
|
||||||
|
DbBackend::Postgres,
|
||||||
|
r#"SELECT "cake"."id", "cake"."name" FROM "cake" LIMIT $1"#,
|
||||||
|
vec![1u64.into()]
|
||||||
|
),
|
||||||
|
]
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
4. Service Oriented
|
||||||
|
|
||||||
|
Quickly build services that join, filter, sort and paginate data in APIs.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[get("/?<page>&<posts_per_page>")]
|
||||||
|
async fn list(
|
||||||
|
conn: Connection<Db>,
|
||||||
|
page: Option<usize>,
|
||||||
|
per_page: Option<usize>,
|
||||||
|
) -> Template {
|
||||||
|
// Set page number and items per page
|
||||||
|
let page = page.unwrap_or(1);
|
||||||
|
let per_page = per_page.unwrap_or(10);
|
||||||
|
|
||||||
|
// Setup paginator
|
||||||
|
let paginator = Post::find()
|
||||||
|
.order_by_asc(post::Column::Id)
|
||||||
|
.paginate(&conn, per_page);
|
||||||
|
let num_pages = paginator.num_pages().await.unwrap();
|
||||||
|
|
||||||
|
// Fetch paginated posts
|
||||||
|
let posts = paginator
|
||||||
|
.fetch_page(page - 1)
|
||||||
|
.await
|
||||||
|
.expect("could not retrieve posts");
|
||||||
|
|
||||||
|
Template::render(
|
||||||
|
"index",
|
||||||
|
context! {
|
||||||
|
page: page,
|
||||||
|
per_page: per_page,
|
||||||
|
posts: posts,
|
||||||
|
num_pages: num_pages,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## A quick taste of SeaORM
|
## A quick taste of SeaORM
|
||||||
|
|
||||||
|
@ -79,28 +79,32 @@ async fn list(
|
|||||||
page: Option<usize>,
|
page: Option<usize>,
|
||||||
flash: Option<FlashMessage<'_>>,
|
flash: Option<FlashMessage<'_>>,
|
||||||
) -> Template {
|
) -> Template {
|
||||||
let page = page.unwrap_or(0);
|
// Set page number and items per page
|
||||||
|
let page = page.unwrap_or(1);
|
||||||
let posts_per_page = posts_per_page.unwrap_or(DEFAULT_POSTS_PER_PAGE);
|
let posts_per_page = posts_per_page.unwrap_or(DEFAULT_POSTS_PER_PAGE);
|
||||||
|
if page == 0 {
|
||||||
|
panic!("Page number cannot be zero");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup paginator
|
||||||
let paginator = Post::find()
|
let paginator = Post::find()
|
||||||
.order_by_asc(post::Column::Id)
|
.order_by_asc(post::Column::Id)
|
||||||
.paginate(&conn, posts_per_page);
|
.paginate(&conn, posts_per_page);
|
||||||
let num_pages = paginator.num_pages().await.ok().unwrap();
|
|
||||||
|
|
||||||
|
// Fetch paginated posts
|
||||||
let posts = paginator
|
let posts = paginator
|
||||||
.fetch_page(page)
|
.fetch_page(page - 1)
|
||||||
.await
|
.await
|
||||||
.expect("could not retrieve posts");
|
.expect("could not retrieve posts");
|
||||||
|
|
||||||
let flash = flash.map(FlashMessage::into_inner);
|
|
||||||
|
|
||||||
Template::render(
|
Template::render(
|
||||||
"index",
|
"index",
|
||||||
context! {
|
context! {
|
||||||
posts: posts,
|
|
||||||
flash: flash,
|
|
||||||
page: page,
|
page: page,
|
||||||
posts_per_page: posts_per_page,
|
posts_per_page: posts_per_page,
|
||||||
num_pages: num_pages,
|
posts: posts,
|
||||||
|
flash: flash.map(FlashMessage::into_inner),
|
||||||
|
num_pages: paginator.num_pages().await.ok().unwrap(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -26,9 +26,9 @@
|
|||||||
<tr>
|
<tr>
|
||||||
<td></td>
|
<td></td>
|
||||||
<td>
|
<td>
|
||||||
{% if page == 0 %} Previous {% else %}
|
{% if page == 1 %} Previous {% else %}
|
||||||
<a href="/?page={{ page - 1 }}&posts_per_page={{ posts_per_page }}">Previous</a>
|
<a href="/?page={{ page - 1 }}&posts_per_page={{ posts_per_page }}">Previous</a>
|
||||||
{% endif %} | {% if page == num_pages - 1 %} Next {% else %}
|
{% endif %} | {% if page == num_pages %} Next {% else %}
|
||||||
<a href="/?page={{ page + 1 }}&posts_per_page={{ posts_per_page }}">Next</a>
|
<a href="/?page={{ page + 1 }}&posts_per_page={{ posts_per_page }}">Next</a>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</td>
|
</td>
|
||||||
|
@ -91,6 +91,9 @@ impl Column {
|
|||||||
},
|
},
|
||||||
ColumnType::DateTime(_) => quote! { ColumnType::DateTime.def() },
|
ColumnType::DateTime(_) => quote! { ColumnType::DateTime.def() },
|
||||||
ColumnType::Timestamp(_) => quote! { ColumnType::Timestamp.def() },
|
ColumnType::Timestamp(_) => quote! { ColumnType::Timestamp.def() },
|
||||||
|
ColumnType::TimestampWithTimeZone(_) => {
|
||||||
|
quote! { ColumnType::TimestampWithTimeZone.def() }
|
||||||
|
}
|
||||||
ColumnType::Time(_) => quote! { ColumnType::Time.def() },
|
ColumnType::Time(_) => quote! { ColumnType::Time.def() },
|
||||||
ColumnType::Date => quote! { ColumnType::Date.def() },
|
ColumnType::Date => quote! { ColumnType::Date.def() },
|
||||||
ColumnType::Binary(_) => quote! { ColumnType::Binary.def() },
|
ColumnType::Binary(_) => quote! { ColumnType::Binary.def() },
|
||||||
@ -106,6 +109,7 @@ impl Column {
|
|||||||
let s = s.to_string();
|
let s = s.to_string();
|
||||||
quote! { ColumnType::Custom(#s.to_owned()).def() }
|
quote! { ColumnType::Custom(#s.to_owned()).def() }
|
||||||
}
|
}
|
||||||
|
#[allow(unreachable_patterns)]
|
||||||
_ => unimplemented!(),
|
_ => unimplemented!(),
|
||||||
};
|
};
|
||||||
if !self.not_null {
|
if !self.not_null {
|
||||||
|
@ -29,19 +29,6 @@ pub trait IntoMockRow {
|
|||||||
fn into_mock_row(self) -> MockRow;
|
fn into_mock_row(self) -> MockRow;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<M> IntoMockRow for M
|
|
||||||
where
|
|
||||||
M: ModelTrait,
|
|
||||||
{
|
|
||||||
fn into_mock_row(self) -> MockRow {
|
|
||||||
let mut values = BTreeMap::new();
|
|
||||||
for col in <<M::Entity as EntityTrait>::Column>::iter() {
|
|
||||||
values.insert(col.to_string(), self.get(col));
|
|
||||||
}
|
|
||||||
MockRow { values }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MockDatabase {
|
impl MockDatabase {
|
||||||
pub fn new(db_backend: DbBackend) -> Self {
|
pub fn new(db_backend: DbBackend) -> Self {
|
||||||
Self {
|
Self {
|
||||||
@ -121,6 +108,25 @@ impl MockRow {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl IntoMockRow for MockRow {
|
||||||
|
fn into_mock_row(self) -> MockRow {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<M> IntoMockRow for M
|
||||||
|
where
|
||||||
|
M: ModelTrait,
|
||||||
|
{
|
||||||
|
fn into_mock_row(self) -> MockRow {
|
||||||
|
let mut values = BTreeMap::new();
|
||||||
|
for col in <<M::Entity as EntityTrait>::Column>::iter() {
|
||||||
|
values.insert(col.to_string(), self.get(col));
|
||||||
|
}
|
||||||
|
MockRow { values }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl IntoMockRow for BTreeMap<&str, Value> {
|
impl IntoMockRow for BTreeMap<&str, Value> {
|
||||||
fn into_mock_row(self) -> MockRow {
|
fn into_mock_row(self) -> MockRow {
|
||||||
MockRow {
|
MockRow {
|
||||||
|
@ -386,7 +386,10 @@ pub trait EntityTrait: EntityName {
|
|||||||
/// # let _: Result<(), DbErr> = smol::block_on(async {
|
/// # let _: Result<(), DbErr> = smol::block_on(async {
|
||||||
/// #
|
/// #
|
||||||
/// assert_eq!(
|
/// assert_eq!(
|
||||||
/// orange.clone().update(&db).await?, // Clone here because we need to assert_eq
|
/// fruit::Entity::update(orange.clone())
|
||||||
|
/// .filter(fruit::Column::Name.contains("orange"))
|
||||||
|
/// .exec(&db)
|
||||||
|
/// .await?,
|
||||||
/// orange
|
/// orange
|
||||||
/// );
|
/// );
|
||||||
/// #
|
/// #
|
||||||
@ -396,7 +399,8 @@ pub trait EntityTrait: EntityName {
|
|||||||
/// assert_eq!(
|
/// assert_eq!(
|
||||||
/// db.into_transaction_log(),
|
/// db.into_transaction_log(),
|
||||||
/// vec![Transaction::from_sql_and_values(
|
/// vec![Transaction::from_sql_and_values(
|
||||||
/// DbBackend::Postgres, r#"UPDATE "fruit" SET "name" = $1 WHERE "fruit"."id" = $2"#, vec!["Orange".into(), 1i32.into()]
|
/// DbBackend::Postgres, r#"UPDATE "fruit" SET "name" = $1 WHERE "fruit"."id" = $2 AND "fruit"."name" LIKE $3"#,
|
||||||
|
/// vec!["Orange".into(), 1i32.into(), "%orange%".into()]
|
||||||
/// )]);
|
/// )]);
|
||||||
/// ```
|
/// ```
|
||||||
fn update<A>(model: A) -> UpdateOne<A>
|
fn update<A>(model: A) -> UpdateOne<A>
|
||||||
|
@ -334,7 +334,7 @@ mod tests {
|
|||||||
use sea_query::Query;
|
use sea_query::Query;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_in_subquery() {
|
fn test_in_subquery_1() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
cake::Entity::find()
|
cake::Entity::find()
|
||||||
.filter(
|
.filter(
|
||||||
@ -357,6 +357,30 @@ mod tests {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_in_subquery_2() {
|
||||||
|
assert_eq!(
|
||||||
|
cake::Entity::find()
|
||||||
|
.filter(
|
||||||
|
Condition::any().add(
|
||||||
|
cake::Column::Id.in_subquery(
|
||||||
|
Query::select()
|
||||||
|
.column(cake_filling::Column::CakeId)
|
||||||
|
.from(cake_filling::Entity)
|
||||||
|
.to_owned()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.build(DbBackend::MySql)
|
||||||
|
.to_string(),
|
||||||
|
[
|
||||||
|
"SELECT `cake`.`id`, `cake`.`name` FROM `cake`",
|
||||||
|
"WHERE `cake`.`id` IN (SELECT `cake_id` FROM `cake_filling`)",
|
||||||
|
]
|
||||||
|
.join(" ")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_col_from_str() {
|
fn test_col_from_str() {
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
@ -24,7 +24,7 @@ impl<'db, S> Paginator<'db, S>
|
|||||||
where
|
where
|
||||||
S: SelectorTrait + 'db,
|
S: SelectorTrait + 'db,
|
||||||
{
|
{
|
||||||
/// Fetch a specific page
|
/// Fetch a specific page; page index starts from zero
|
||||||
pub async fn fetch_page(&self, page: usize) -> Result<Vec<S::Item>, DbErr> {
|
pub async fn fetch_page(&self, page: usize) -> Result<Vec<S::Item>, DbErr> {
|
||||||
let query = self
|
let query = self
|
||||||
.query
|
.query
|
||||||
|
161
src/lib.rs
161
src/lib.rs
@ -36,19 +36,170 @@
|
|||||||
//!
|
//!
|
||||||
//! 1. Async
|
//! 1. Async
|
||||||
//!
|
//!
|
||||||
//! Relying on [SQLx](https://github.com/launchbadge/sqlx), SeaORM is a new library with async support from day 1.
|
//! Relying on [SQLx](https://github.com/launchbadge/sqlx), SeaORM is a new library with async support from day 1.
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! # use sea_orm::{DbConn, error::*, entity::*, query::*, tests_cfg::*, DatabaseConnection, DbBackend, MockDatabase, Transaction, IntoMockRow};
|
||||||
|
//! # let db = MockDatabase::new(DbBackend::Postgres)
|
||||||
|
//! # .append_query_results(vec![
|
||||||
|
//! # vec![cake::Model {
|
||||||
|
//! # id: 1,
|
||||||
|
//! # name: "New York Cheese".to_owned(),
|
||||||
|
//! # }
|
||||||
|
//! # .into_mock_row()],
|
||||||
|
//! # vec![fruit::Model {
|
||||||
|
//! # id: 1,
|
||||||
|
//! # name: "Apple".to_owned(),
|
||||||
|
//! # cake_id: Some(1),
|
||||||
|
//! # }
|
||||||
|
//! # .into_mock_row()],
|
||||||
|
//! # ])
|
||||||
|
//! # .into_connection();
|
||||||
|
//! # let _: Result<(), DbErr> = smol::block_on(async {
|
||||||
|
//! // execute multiple queries in parallel
|
||||||
|
//! let cakes_and_fruits: (Vec<cake::Model>, Vec<fruit::Model>) =
|
||||||
|
//! futures::try_join!(Cake::find().all(&db), Fruit::find().all(&db))?;
|
||||||
|
//! # assert_eq!(
|
||||||
|
//! # cakes_and_fruits,
|
||||||
|
//! # (
|
||||||
|
//! # vec![cake::Model {
|
||||||
|
//! # id: 1,
|
||||||
|
//! # name: "New York Cheese".to_owned(),
|
||||||
|
//! # }],
|
||||||
|
//! # vec![fruit::Model {
|
||||||
|
//! # id: 1,
|
||||||
|
//! # name: "Apple".to_owned(),
|
||||||
|
//! # cake_id: Some(1),
|
||||||
|
//! # }]
|
||||||
|
//! # )
|
||||||
|
//! # );
|
||||||
|
//! # assert_eq!(
|
||||||
|
//! # db.into_transaction_log(),
|
||||||
|
//! # vec![
|
||||||
|
//! # Transaction::from_sql_and_values(
|
||||||
|
//! # DbBackend::Postgres,
|
||||||
|
//! # r#"SELECT "cake"."id", "cake"."name" FROM "cake""#,
|
||||||
|
//! # vec![]
|
||||||
|
//! # ),
|
||||||
|
//! # Transaction::from_sql_and_values(
|
||||||
|
//! # DbBackend::Postgres,
|
||||||
|
//! # r#"SELECT "fruit"."id", "fruit"."name", "fruit"."cake_id" FROM "fruit""#,
|
||||||
|
//! # vec![]
|
||||||
|
//! # ),
|
||||||
|
//! # ]
|
||||||
|
//! # );
|
||||||
|
//! # Ok(())
|
||||||
|
//! # });
|
||||||
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! 2. Dynamic
|
//! 2. Dynamic
|
||||||
//!
|
//!
|
||||||
//! Built upon [SeaQuery](https://github.com/SeaQL/sea-query), SeaORM allows you to build complex queries without 'fighting the ORM'.
|
//! Built upon [SeaQuery](https://github.com/SeaQL/sea-query), SeaORM allows you to build complex queries without 'fighting the ORM'.
|
||||||
|
//!
|
||||||
|
//! ```
|
||||||
|
//! # use sea_query::Query;
|
||||||
|
//! # use sea_orm::{DbConn, error::*, entity::*, query::*, tests_cfg::*};
|
||||||
|
//! # async fn function(db: &DbConn) -> Result<(), DbErr> {
|
||||||
|
//! // build subquery with ease
|
||||||
|
//! let cakes_with_filling: Vec<cake::Model> = cake::Entity::find()
|
||||||
|
//! .filter(
|
||||||
|
//! Condition::any().add(
|
||||||
|
//! cake::Column::Id.in_subquery(
|
||||||
|
//! Query::select()
|
||||||
|
//! .column(cake_filling::Column::CakeId)
|
||||||
|
//! .from(cake_filling::Entity)
|
||||||
|
//! .to_owned(),
|
||||||
|
//! ),
|
||||||
|
//! ),
|
||||||
|
//! )
|
||||||
|
//! .all(&db)
|
||||||
|
//! .await?;
|
||||||
|
//!
|
||||||
|
//! # Ok(())
|
||||||
|
//! # }
|
||||||
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! 3. Testable
|
//! 3. Testable
|
||||||
//!
|
//!
|
||||||
//! Use mock connections to write unit tests for your logic.
|
//! Use mock connections to write unit tests for your logic.
|
||||||
//!
|
//!
|
||||||
//! 4. Service oriented
|
//! ```
|
||||||
|
//! # use sea_orm::{error::*, entity::*, query::*, tests_cfg::*, DbConn, MockDatabase, Transaction, DbBackend};
|
||||||
|
//! # async fn function(db: &DbConn) -> Result<(), DbErr> {
|
||||||
|
//! // Setup mock connection
|
||||||
|
//! let db = MockDatabase::new(DbBackend::Postgres)
|
||||||
|
//! .append_query_results(vec![
|
||||||
|
//! vec![
|
||||||
|
//! cake::Model {
|
||||||
|
//! id: 1,
|
||||||
|
//! name: "New York Cheese".to_owned(),
|
||||||
|
//! },
|
||||||
|
//! ],
|
||||||
|
//! ])
|
||||||
|
//! .into_connection();
|
||||||
//!
|
//!
|
||||||
//! Quickly build services that join, filter, sort and paginate data in APIs.
|
//! // Perform your application logic
|
||||||
|
//! assert_eq!(
|
||||||
|
//! cake::Entity::find().one(&db).await?,
|
||||||
|
//! Some(cake::Model {
|
||||||
|
//! id: 1,
|
||||||
|
//! name: "New York Cheese".to_owned(),
|
||||||
|
//! })
|
||||||
|
//! );
|
||||||
|
//!
|
||||||
|
//! // Compare it against the expected transaction log
|
||||||
|
//! assert_eq!(
|
||||||
|
//! db.into_transaction_log(),
|
||||||
|
//! vec![
|
||||||
|
//! Transaction::from_sql_and_values(
|
||||||
|
//! DbBackend::Postgres,
|
||||||
|
//! r#"SELECT "cake"."id", "cake"."name" FROM "cake" LIMIT $1"#,
|
||||||
|
//! vec![1u64.into()]
|
||||||
|
//! ),
|
||||||
|
//! ]
|
||||||
|
//! );
|
||||||
|
//! # Ok(())
|
||||||
|
//! # }
|
||||||
|
//! ```
|
||||||
|
//!
|
||||||
|
//! 4. Service Oriented
|
||||||
|
//!
|
||||||
|
//! Quickly build services that join, filter, sort and paginate data in APIs.
|
||||||
|
//!
|
||||||
|
//! ```ignore
|
||||||
|
//! #[get("/?<page>&<posts_per_page>")]
|
||||||
|
//! async fn list(
|
||||||
|
//! conn: Connection<Db>,
|
||||||
|
//! page: Option<usize>,
|
||||||
|
//! per_page: Option<usize>,
|
||||||
|
//! ) -> Template {
|
||||||
|
//! // Set page number and items per page
|
||||||
|
//! let page = page.unwrap_or(1);
|
||||||
|
//! let per_page = per_page.unwrap_or(10);
|
||||||
|
//!
|
||||||
|
//! // Setup paginator
|
||||||
|
//! let paginator = Post::find()
|
||||||
|
//! .order_by_asc(post::Column::Id)
|
||||||
|
//! .paginate(&conn, per_page);
|
||||||
|
//! let num_pages = paginator.num_pages().await.unwrap();
|
||||||
|
//!
|
||||||
|
//! // Fetch paginated posts
|
||||||
|
//! let posts = paginator
|
||||||
|
//! .fetch_page(page - 1)
|
||||||
|
//! .await
|
||||||
|
//! .expect("could not retrieve posts");
|
||||||
|
//!
|
||||||
|
//! Template::render(
|
||||||
|
//! "index",
|
||||||
|
//! context! {
|
||||||
|
//! page: page,
|
||||||
|
//! per_page: per_page,
|
||||||
|
//! posts: posts,
|
||||||
|
//! num_pages: num_pages,
|
||||||
|
//! },
|
||||||
|
//! )
|
||||||
|
//! }
|
||||||
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! ## A quick taste of SeaORM
|
//! ## A quick taste of SeaORM
|
||||||
//!
|
//!
|
||||||
|
108
tests/parallel_tests.rs
Normal file
108
tests/parallel_tests.rs
Normal file
@ -0,0 +1,108 @@
|
|||||||
|
pub mod common;
|
||||||
|
|
||||||
|
pub use common::{bakery_chain::*, setup::*, TestContext};
|
||||||
|
use pretty_assertions::assert_eq;
|
||||||
|
use sea_orm::{entity::prelude::*, DatabaseConnection, IntoActiveModel, Set};
|
||||||
|
|
||||||
|
#[sea_orm_macros::test]
|
||||||
|
#[cfg(any(
|
||||||
|
feature = "sqlx-mysql",
|
||||||
|
feature = "sqlx-sqlite",
|
||||||
|
feature = "sqlx-postgres"
|
||||||
|
))]
|
||||||
|
async fn main() -> Result<(), DbErr> {
|
||||||
|
let ctx = TestContext::new("bakery_chain_parallel_tests").await;
|
||||||
|
crud_in_parallel(&ctx.db).await?;
|
||||||
|
ctx.delete().await;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn crud_in_parallel(db: &DatabaseConnection) -> Result<(), DbErr> {
|
||||||
|
let metadata = vec![
|
||||||
|
metadata::Model {
|
||||||
|
uuid: Uuid::new_v4(),
|
||||||
|
key: "markup".to_owned(),
|
||||||
|
value: "1.18".to_owned(),
|
||||||
|
bytes: vec![1, 2, 3],
|
||||||
|
},
|
||||||
|
metadata::Model {
|
||||||
|
uuid: Uuid::new_v4(),
|
||||||
|
key: "exchange_rate".to_owned(),
|
||||||
|
value: "0.78".to_owned(),
|
||||||
|
bytes: vec![1, 2, 3],
|
||||||
|
},
|
||||||
|
metadata::Model {
|
||||||
|
uuid: Uuid::new_v4(),
|
||||||
|
key: "service_charge".to_owned(),
|
||||||
|
value: "1.1".to_owned(),
|
||||||
|
bytes: vec![1, 2, 3],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
let _insert_res = futures::try_join!(
|
||||||
|
metadata[0].clone().into_active_model().insert(db),
|
||||||
|
metadata[1].clone().into_active_model().insert(db),
|
||||||
|
metadata[2].clone().into_active_model().insert(db),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let find_res = futures::try_join!(
|
||||||
|
Metadata::find_by_id(metadata[0].uuid).one(db),
|
||||||
|
Metadata::find_by_id(metadata[1].uuid).one(db),
|
||||||
|
Metadata::find_by_id(metadata[2].uuid).one(db),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
metadata,
|
||||||
|
vec![
|
||||||
|
find_res.0.clone().unwrap(),
|
||||||
|
find_res.1.clone().unwrap(),
|
||||||
|
find_res.2.clone().unwrap(),
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut active_models = (
|
||||||
|
find_res.0.unwrap().into_active_model(),
|
||||||
|
find_res.1.unwrap().into_active_model(),
|
||||||
|
find_res.2.unwrap().into_active_model(),
|
||||||
|
);
|
||||||
|
|
||||||
|
active_models.0.bytes = Set(vec![0]);
|
||||||
|
active_models.1.bytes = Set(vec![1]);
|
||||||
|
active_models.2.bytes = Set(vec![2]);
|
||||||
|
|
||||||
|
let _update_res = futures::try_join!(
|
||||||
|
active_models.0.clone().update(db),
|
||||||
|
active_models.1.clone().update(db),
|
||||||
|
active_models.2.clone().update(db),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let find_res = futures::try_join!(
|
||||||
|
Metadata::find_by_id(metadata[0].uuid).one(db),
|
||||||
|
Metadata::find_by_id(metadata[1].uuid).one(db),
|
||||||
|
Metadata::find_by_id(metadata[2].uuid).one(db),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
vec![
|
||||||
|
active_models.0.bytes.clone().unwrap(),
|
||||||
|
active_models.1.bytes.clone().unwrap(),
|
||||||
|
active_models.2.bytes.clone().unwrap(),
|
||||||
|
],
|
||||||
|
vec![
|
||||||
|
find_res.0.clone().unwrap().bytes,
|
||||||
|
find_res.1.clone().unwrap().bytes,
|
||||||
|
find_res.2.clone().unwrap().bytes,
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
let _delete_res = futures::try_join!(
|
||||||
|
active_models.0.delete(db),
|
||||||
|
active_models.1.delete(db),
|
||||||
|
active_models.2.delete(db),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
assert_eq!(Metadata::find().all(db).await?, vec![]);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user