Merge branch 'master' into ss/rocket-example

This commit is contained in:
Sam Samai 2021-08-24 20:58:01 +10:00
commit aebb1a5073
61 changed files with 1050 additions and 262 deletions

View File

@ -1,10 +1,8 @@
name: tests name: tests
on: on:
push:
branches:
- master
pull_request: pull_request:
push:
branches: branches:
- master - master
@ -12,12 +10,15 @@ env:
CARGO_TERM_COLOR: always CARGO_TERM_COLOR: always
jobs: jobs:
test:
name: Unit Test compile:
name: Compile
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
strategy: strategy:
matrix: matrix:
runtime: [async-std-native-tls, async-std-rustls, actix-native-tls, actix-rustls, tokio-native-tls, tokio-rustls] database: [sqlite, mysql, postgres]
runtime: [async-std, actix, tokio]
tls: [native-tls, rustls]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -27,32 +28,98 @@ jobs:
toolchain: stable toolchain: stable
override: true override: true
- uses: Swatinem/rust-cache@v1 - uses: actions/cache@v2
with:
path: |
~/.cargo/registry
~/.cargo/git
Cargo.lock
target
key: ${{ github.sha }}-${{ github.run_id }}-${{ runner.os }}-${{ matrix.database }}-${{ matrix.runtime }}-${{ matrix.tls }}
- uses: actions-rs/cargo@v1 - uses: actions-rs/cargo@v1
with: with:
command: build command: test
args: > args: >
--all --features default,sqlx-${{ matrix.database }},runtime-${{ matrix.runtime }}-${{ matrix.tls }}
--features default --no-run
test:
name: Unit Test
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: actions-rs/cargo@v1 - uses: actions-rs/cargo@v1
with: with:
command: test command: test
args: > args: >
--all --all
--exclude 'sea-orm-example-*'
--features default cli:
name: CLI
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest]
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: actions-rs/cargo@v1
with:
command: install
args: >
--path sea-orm-cli
examples:
name: Examples
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest]
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: actions-rs/cargo@v1
with:
command: build
args: >
--manifest-path examples/async-std/Cargo.toml
- uses: actions-rs/cargo@v1
with:
command: build
args: >
--manifest-path examples/tokio/Cargo.toml
sqlite: sqlite:
name: SQLite name: SQLite
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
needs: compile
env: env:
DATABASE_URL: "sqlite::memory:" DATABASE_URL: "sqlite::memory:"
strategy: strategy:
matrix: matrix:
# runtime: [async-std-native-tls, async-std-rustls, actix-native-tls, actix-rustls, tokio-native-tls, tokio-rustls] runtime: [async-std, actix, tokio]
runtime: [async-std-native-tls] tls: [native-tls, rustls]
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -62,29 +129,32 @@ jobs:
toolchain: stable toolchain: stable
override: true override: true
- uses: Swatinem/rust-cache@v1 - uses: actions/cache@v2
- uses: actions-rs/cargo@v1
with: with:
command: build path: |
args: > ~/.cargo/registry
--features default,runtime-${{ matrix.runtime }} ~/.cargo/git
Cargo.lock
target
key: ${{ github.sha }}-${{ github.run_id }}-${{ runner.os }}-sqlite-${{ matrix.runtime }}-${{ matrix.tls }}
- uses: actions-rs/cargo@v1 - uses: actions-rs/cargo@v1
with: with:
command: test command: test
args: > args: >
--features default,sqlx-sqlite,runtime-${{ matrix.runtime }} --features default,sqlx-sqlite,runtime-${{ matrix.runtime }}-${{ matrix.tls }}
mysql: mysql:
name: MySQL name: MySQL
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
needs: compile
env: env:
DATABASE_URL: "mysql://root:@localhost" DATABASE_URL: "mysql://root:@localhost"
strategy: strategy:
matrix: matrix:
version: [8.0, 5.7] version: [8.0, 5.7]
runtime: [async-std-native-tls] runtime: [async-std, actix, tokio]
tls: [native-tls]
services: services:
mysql: mysql:
image: mysql:${{ matrix.version }} image: mysql:${{ matrix.version }}
@ -111,29 +181,32 @@ jobs:
toolchain: stable toolchain: stable
override: true override: true
- uses: Swatinem/rust-cache@v1 - uses: actions/cache@v2
- uses: actions-rs/cargo@v1
with: with:
command: build path: |
args: > ~/.cargo/registry
--features default,runtime-${{ matrix.runtime }} ~/.cargo/git
Cargo.lock
target
key: ${{ github.sha }}-${{ github.run_id }}-${{ runner.os }}-mysql-${{ matrix.runtime }}-${{ matrix.tls }}
- uses: actions-rs/cargo@v1 - uses: actions-rs/cargo@v1
with: with:
command: test command: test
args: > args: >
--features default,sqlx-mysql,runtime-${{ matrix.runtime }} --features default,sqlx-mysql,runtime-${{ matrix.runtime }}-${{ matrix.tls }}
mariadb: mariadb:
name: MariaDB name: MariaDB
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
needs: compile
env: env:
DATABASE_URL: "mysql://root:@localhost" DATABASE_URL: "mysql://root:@localhost"
strategy: strategy:
matrix: matrix:
version: [10.6] version: [10.6]
runtime: [async-std-native-tls] runtime: [async-std, actix, tokio]
tls: [rustls]
services: services:
mysql: mysql:
image: mariadb:${{ matrix.version }} image: mariadb:${{ matrix.version }}
@ -160,29 +233,32 @@ jobs:
toolchain: stable toolchain: stable
override: true override: true
- uses: Swatinem/rust-cache@v1 - uses: actions/cache@v2
- uses: actions-rs/cargo@v1
with: with:
command: build path: |
args: > ~/.cargo/registry
--features default,runtime-${{ matrix.runtime }} ~/.cargo/git
Cargo.lock
target
key: ${{ github.sha }}-${{ github.run_id }}-${{ runner.os }}-mysql-${{ matrix.runtime }}-${{ matrix.tls }}
- uses: actions-rs/cargo@v1 - uses: actions-rs/cargo@v1
with: with:
command: test command: test
args: > args: >
--features default,sqlx-mysql,runtime-${{ matrix.runtime }} --features default,sqlx-mysql,runtime-${{ matrix.runtime }}-${{ matrix.tls }}
postgres: postgres:
name: Postgres name: Postgres
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
needs: compile
env: env:
DATABASE_URL: "postgres://root:root@localhost" DATABASE_URL: "postgres://root:root@localhost"
strategy: strategy:
matrix: matrix:
version: [13.3, 12.7, 11.12, 10.17, 9.6.22] version: [13.3, 12.7, 11.12, 10.17, 9.6.22]
runtime: [async-std-native-tls] runtime: [tokio]
tls: [native-tls]
services: services:
postgres: postgres:
image: postgres:${{ matrix.version }} image: postgres:${{ matrix.version }}
@ -206,16 +282,17 @@ jobs:
toolchain: stable toolchain: stable
override: true override: true
- uses: Swatinem/rust-cache@v1 - uses: actions/cache@v2
- uses: actions-rs/cargo@v1
with: with:
command: build path: |
args: > ~/.cargo/registry
--features default,runtime-${{ matrix.runtime }} ~/.cargo/git
Cargo.lock
target
key: ${{ github.sha }}-${{ github.run_id }}-${{ runner.os }}-postgres-${{ matrix.runtime }}-${{ matrix.tls }}
- uses: actions-rs/cargo@v1 - uses: actions-rs/cargo@v1
with: with:
command: test command: test
args: > args: >
--features default,sqlx-postgres,runtime-${{ matrix.runtime }} --features default,sqlx-postgres,runtime-${{ matrix.runtime }}-${{ matrix.tls }}

24
CHANGELOG.md Normal file
View File

@ -0,0 +1,24 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
## 0.1.2 - 2021-08-23
- [[#68]] Added `DateTimeWithTimeZone` as supported attribute type
- [[#70]] Generate arbitrary named entity
- [[#80]] Custom column name
- [[#81]] Support join on multiple columns
- [[#99]] Implement FromStr for ColumnTrait
[#68]: https://github.com/SeaQL/sea-orm/issues/68
[#70]: https://github.com/SeaQL/sea-orm/issues/70
[#80]: https://github.com/SeaQL/sea-orm/issues/80
[#81]: https://github.com/SeaQL/sea-orm/issues/81
[#99]: https://github.com/SeaQL/sea-orm/issues/99
## 0.1.1 - 2021-08-08
- Early release of SeaORM

View File

@ -3,13 +3,11 @@ members = [
".", ".",
"sea-orm-macros", "sea-orm-macros",
"sea-orm-codegen", "sea-orm-codegen",
"sea-orm-cli",
"examples/sqlx",
] ]
[package] [package]
name = "sea-orm" name = "sea-orm"
version = "0.1.1" version = "0.1.2"
authors = ["Chris Tsang <tyt2y7@gmail.com>"] authors = ["Chris Tsang <tyt2y7@gmail.com>"]
edition = "2018" edition = "2018"
description = "🐚 An async & dynamic ORM for Rust" description = "🐚 An async & dynamic ORM for Rust"
@ -20,11 +18,7 @@ categories = ["database"]
keywords = ["async", "orm", "mysql", "postgres", "sqlite"] keywords = ["async", "orm", "mysql", "postgres", "sqlite"]
[package.metadata.docs.rs] [package.metadata.docs.rs]
features = [ features = ["default", "sqlx-all", "runtime-async-std-native-tls"]
"default",
"sqlx-all",
"runtime-async-std-native-tls",
]
rustdoc-args = ["--cfg", "docsrs"] rustdoc-args = ["--cfg", "docsrs"]
[lib] [lib]
@ -37,8 +31,8 @@ chrono = { version = "^0", optional = true }
futures = { version = "^0.3" } futures = { version = "^0.3" }
futures-util = { version = "^0.3" } futures-util = { version = "^0.3" }
rust_decimal = { version = "^1", optional = true } rust_decimal = { version = "^1", optional = true }
sea-orm-macros = { version = "^0.1", optional = true } sea-orm-macros = { version = "^0.1.1", optional = true }
sea-query = { version = "^0.12.8" } sea-query = { version = "^0.15", features = ["thread-safe"] }
sea-strum = { version = "^0.21", features = ["derive", "sea-orm"] } sea-strum = { version = "^0.21", features = ["derive", "sea-orm"] }
serde = { version = "^1.0", features = ["derive"] } serde = { version = "^1.0", features = ["derive"] }
sqlx = { version = "^0.5", optional = true } sqlx = { version = "^0.5", optional = true }
@ -48,6 +42,8 @@ serde_json = { version = "^1", optional = true }
uuid = { version = "0.8", features = ["serde", "v4"], optional = true } uuid = { version = "0.8", features = ["serde", "v4"], optional = true }
[dev-dependencies] [dev-dependencies]
smol = { version = "^1.2" }
smol-potat = { version = "^1.1" }
async-std = { version = "^1.9", features = ["attributes"] } async-std = { version = "^1.9", features = ["attributes"] }
tokio = { version = "^1.6", features = ["full"] } tokio = { version = "^1.6", features = ["full"] }
actix-rt = { version = "2.2.0" } actix-rt = { version = "2.2.0" }
@ -69,14 +65,8 @@ macros = ["sea-orm-macros"]
mock = [] mock = []
with-json = ["serde_json", "sea-query/with-json"] with-json = ["serde_json", "sea-query/with-json"]
with-chrono = ["chrono", "sea-query/with-chrono"] with-chrono = ["chrono", "sea-query/with-chrono"]
with-rust_decimal = [ with-rust_decimal = ["rust_decimal", "sea-query/with-rust_decimal"]
"rust_decimal", with-uuid = ["uuid", "sea-query/with-uuid"]
"sea-query/with-rust_decimal",
]
with-uuid = [
"uuid",
"sea-query/with-uuid",
]
sqlx-all = ["sqlx-mysql", "sqlx-postgres", "sqlx-sqlite"] sqlx-all = ["sqlx-mysql", "sqlx-postgres", "sqlx-sqlite"]
sqlx-dep = ["sqlx-json", "sqlx-chrono", "sqlx-decimal", "sqlx-uuid"] sqlx-dep = ["sqlx-json", "sqlx-chrono", "sqlx-decimal", "sqlx-uuid"]
sqlx-json = ["sqlx/json", "with-json"] sqlx-json = ["sqlx/json", "with-json"]

View File

@ -1,5 +1,7 @@
# Design Goals # Design Goals
We are heavily inspired by ActiveRecord, Eloquent and TypeORM.
1. Intuitive and ergonomic 1. Intuitive and ergonomic
API should state the intention clearly. Provide syntax sugar for common things. API should state the intention clearly. Provide syntax sugar for common things.

View File

@ -1,6 +1,6 @@
<div align="center"> <div align="center">
<img src="docs/SeaORM banner.png"/> <img src="https://www.sea-ql.org/SeaORM/img/SeaORM banner.png"/>
<h1>SeaORM</h1> <h1>SeaORM</h1>
@ -18,30 +18,26 @@
# SeaORM # SeaORM
Inspired by ActiveRecord, Eloquent and TypeORM, SeaORM aims to provide you an intuitive and ergonomic SeaORM is a relational ORM to help you build light weight and concurrent web services in Rust.
API to make working with databases in Rust a first-class experience.
```markdown ```markdown
This is an early release of SeaORM, the API is not stable yet. This is an early release of SeaORM, the API is not stable yet.
``` ```
<div align="center">
[![Getting Started](https://img.shields.io/badge/Getting%20Started-blue)](https://www.sea-ql.org/SeaORM/docs/index) [![Getting Started](https://img.shields.io/badge/Getting%20Started-blue)](https://www.sea-ql.org/SeaORM/docs/index)
[![Getting Started](https://img.shields.io/badge/Examples-orange)](https://github.com/SeaQL/sea-orm/tree/master/examples/sqlx) [![Examples](https://img.shields.io/badge/Examples-orange)](https://github.com/SeaQL/sea-orm/tree/master/examples/sqlx)
[![Getting Started](https://img.shields.io/badge/Starter%20Kit-green)](https://github.com/SeaQL/sea-orm/issues/37) [![Starter Kit](https://img.shields.io/badge/Starter%20Kit-green)](https://github.com/SeaQL/sea-orm/issues/37)
[![Discord](https://img.shields.io/discord/873880840487206962?label=Discord)](https://discord.com/invite/uCPdDXzbdv)
</div>
## Features ## Features
1. Async 1. Async
Relying on SQLx, SeaORM is a new library with async support from day 1. Relying on [SQLx](https://github.com/launchbadge/sqlx), SeaORM is a new library with async support from day 1.
2. Dynamic 2. Dynamic
Built upon SeaQuery, SeaORM allows you to build complex queries without 'fighting the ORM'. Built upon [SeaQuery](https://github.com/SeaQL/sea-query), SeaORM allows you to build complex queries without 'fighting the ORM'.
3. Testable 3. Testable

View File

@ -1,5 +1,8 @@
[workspace]
# A separate workspace
[package] [package]
name = "sea-orm-example-sqlx" name = "sea-orm-example-async-std"
version = "0.1.0" version = "0.1.0"
edition = "2018" edition = "2018"
publish = false publish = false

View File

@ -66,7 +66,8 @@ async fn find_all(db: &DbConn) -> Result<(), DbErr> {
async fn find_together(db: &DbConn) -> Result<(), DbErr> { async fn find_together(db: &DbConn) -> Result<(), DbErr> {
print!("find cakes and fruits: "); print!("find cakes and fruits: ");
let both = Cake::find().find_also_related(Fruit).all(db).await?; let both: Vec<(cake::Model, Option<fruit::Model>)> =
Cake::find().find_also_related(Fruit).all(db).await?;
println!(); println!();
for bb in both.iter() { for bb in both.iter() {

12
examples/tokio/Cargo.toml Normal file
View File

@ -0,0 +1,12 @@
[workspace]
# A separate workspace
[package]
name = "sea-orm-example-tokio"
version = "0.1.0"
edition = "2018"
publish = false
[dependencies]
sea-orm = { path = "../../", features = [ "sqlx-all", "runtime-tokio-native-tls" ] }
tokio = { version = "1", features = ["full"] }

View File

@ -0,0 +1,55 @@
use sea_orm::entity::prelude::*;
#[derive(Copy, Clone, Default, Debug, DeriveEntity)]
pub struct Entity;
impl EntityName for Entity {
fn table_name(&self) -> &str {
"cake"
}
}
#[derive(Clone, Debug, PartialEq, DeriveModel, DeriveActiveModel)]
pub struct Model {
pub id: i32,
pub name: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
pub enum Column {
Id,
Name,
}
#[derive(Copy, Clone, Debug, EnumIter, DerivePrimaryKey)]
pub enum PrimaryKey {
Id,
}
impl PrimaryKeyTrait for PrimaryKey {
fn auto_increment() -> bool {
true
}
}
#[derive(Copy, Clone, Debug, EnumIter)]
pub enum Relation {}
impl ColumnTrait for Column {
type EntityName = Entity;
fn def(&self) -> ColumnDef {
match self {
Self::Id => ColumnType::Integer.def(),
Self::Name => ColumnType::String(None).def(),
}
}
}
impl RelationTrait for Relation {
fn def(&self) -> RelationDef {
unreachable!()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@ -0,0 +1,14 @@
mod cake;
use sea_orm::*;
#[tokio::main]
pub async fn main() {
let db = Database::connect("sql://sea:sea@localhost/bakery")
.await
.unwrap();
tokio::spawn(async move {
cake::Entity::find().one(&db).await.unwrap();
})
.await.unwrap();
}

View File

@ -1,6 +1,9 @@
[workspace]
# A separate workspace
[package] [package]
name = "sea-orm-cli" name = "sea-orm-cli"
version = "0.1.0" version = "0.1.3"
authors = [ "Billy Chan <ccw.billy.123@gmail.com>" ] authors = [ "Billy Chan <ccw.billy.123@gmail.com>" ]
edition = "2018" edition = "2018"
description = "Command line utility for SeaORM" description = "Command line utility for SeaORM"
@ -18,9 +21,9 @@ path = "src/main.rs"
clap = { version = "^2.33.3" } clap = { version = "^2.33.3" }
dotenv = { version = "^0.15" } dotenv = { version = "^0.15" }
async-std = { version = "^1.9", features = [ "attributes" ] } async-std = { version = "^1.9", features = [ "attributes" ] }
sea-orm = { version = "^0.1", features = [ "sqlx-all" ] } sea-orm = { version = "^0.1.2", features = [ "sqlx-all" ] }
sea-orm-codegen = { version = "^0.1" } sea-orm-codegen = { version = "^0.1.3" }
sea-schema = { version = "^0.2.4", default-features = false, features = [ sea-schema = { version = "^0.2.7", default-features = false, features = [
"sqlx-mysql", "sqlx-mysql",
"sqlx-postgres", "sqlx-postgres",
"discovery", "discovery",

View File

@ -9,5 +9,9 @@ cargo run -- -h
Running Entity Generator: Running Entity Generator:
```sh ```sh
cargo run -- entity generate -url mysql://sea:sea@localhost/bakery -schema bakery -o out # MySQL (`--database-schema` option is ignored)
cargo run -- generate entity -u mysql://sea:sea@localhost/bakery -o out
# PostgreSQL
cargo run -- generate entity -u postgres://sea:sea@localhost/bakery -s public -o out
``` ```

View File

@ -21,8 +21,10 @@ pub fn build_cli() -> App<'static, 'static> {
.long("database-schema") .long("database-schema")
.short("s") .short("s")
.help("Database schema") .help("Database schema")
.long_help("Database schema\n \
- For MySQL, this argument is ignored.\n \
- For PostgreSQL, this argument is optional with default value 'public'.")
.takes_value(true) .takes_value(true)
.required(true)
.env("DATABASE_SCHEMA"), .env("DATABASE_SCHEMA"),
) )
.arg( .arg(
@ -32,6 +34,12 @@ pub fn build_cli() -> App<'static, 'static> {
.help("Entity file output directory") .help("Entity file output directory")
.takes_value(true) .takes_value(true)
.default_value("./"), .default_value("./"),
)
.arg(
Arg::with_name("INCLUDE_HIDDEN_TABLES")
.long("include-hidden-tables")
.help("Generate entity file for hidden tables (i.e. table name starts with an underscore)")
.takes_value(false),
), ),
) )
.setting(AppSettings::SubcommandRequiredElseHelp); .setting(AppSettings::SubcommandRequiredElseHelp);

View File

@ -23,31 +23,43 @@ async fn run_generate_command(matches: &ArgMatches<'_>) -> Result<(), Box<dyn Er
match matches.subcommand() { match matches.subcommand() {
("entity", Some(args)) => { ("entity", Some(args)) => {
let url = args.value_of("DATABASE_URL").unwrap(); let url = args.value_of("DATABASE_URL").unwrap();
let schema = args.value_of("DATABASE_SCHEMA").unwrap();
let output_dir = args.value_of("OUTPUT_DIR").unwrap(); let output_dir = args.value_of("OUTPUT_DIR").unwrap();
let include_hidden_tables = args.is_present("INCLUDE_HIDDEN_TABLES");
let filter_hidden_tables = |table: &str| -> bool {
if include_hidden_tables {
true
} else {
!table.starts_with("_")
}
};
let table_stmts = if url.starts_with("mysql://") { let table_stmts = if url.starts_with("mysql://") {
use sea_schema::mysql::discovery::SchemaDiscovery; use sea_schema::mysql::discovery::SchemaDiscovery;
use sqlx::MySqlPool; use sqlx::MySqlPool;
let url_parts: Vec<&str> = url.split("/").collect();
let schema = url_parts.last().unwrap();
let connection = MySqlPool::connect(url).await?; let connection = MySqlPool::connect(url).await?;
let schema_discovery = SchemaDiscovery::new(connection, schema); let schema_discovery = SchemaDiscovery::new(connection, schema);
let schema = schema_discovery.discover().await; let schema = schema_discovery.discover().await;
schema schema
.tables .tables
.into_iter() .into_iter()
.filter(|schema| filter_hidden_tables(&schema.info.name))
.map(|schema| schema.write()) .map(|schema| schema.write())
.collect() .collect()
} else if url.starts_with("postgres://") { } else if url.starts_with("postgres://") {
use sea_schema::postgres::discovery::SchemaDiscovery; use sea_schema::postgres::discovery::SchemaDiscovery;
use sqlx::PgPool; use sqlx::PgPool;
let schema = args.value_of("DATABASE_SCHEMA").unwrap_or("public");
let connection = PgPool::connect(url).await?; let connection = PgPool::connect(url).await?;
let schema_discovery = SchemaDiscovery::new(connection, schema); let schema_discovery = SchemaDiscovery::new(connection, schema);
let schema = schema_discovery.discover().await; let schema = schema_discovery.discover().await;
schema schema
.tables .tables
.into_iter() .into_iter()
.filter(|schema| filter_hidden_tables(&schema.info.name))
.map(|schema| schema.write()) .map(|schema| schema.write())
.collect() .collect()
} else { } else {

View File

@ -1,6 +1,6 @@
[package] [package]
name = "sea-orm-codegen" name = "sea-orm-codegen"
version = "0.1.1" version = "0.1.3"
authors = ["Billy Chan <ccw.billy.123@gmail.com>"] authors = ["Billy Chan <ccw.billy.123@gmail.com>"]
edition = "2018" edition = "2018"
description = "Code Generator for SeaORM" description = "Code Generator for SeaORM"
@ -15,7 +15,7 @@ name = "sea_orm_codegen"
path = "src/lib.rs" path = "src/lib.rs"
[dependencies] [dependencies]
sea-query = { version = "^0.12.8" } sea-query = { version = "^0.15" }
syn = { version = "^1", default-features = false, features = [ syn = { version = "^1", default-features = false, features = [
"derive", "derive",
"parsing", "parsing",

View File

@ -22,6 +22,7 @@ impl Column {
} }
pub fn get_rs_type(&self) -> TokenStream { pub fn get_rs_type(&self) -> TokenStream {
#[allow(unreachable_patterns)]
let ident: TokenStream = match self.col_type { let ident: TokenStream = match self.col_type {
ColumnType::Char(_) ColumnType::Char(_)
| ColumnType::String(_) | ColumnType::String(_)
@ -284,7 +285,7 @@ mod tests {
#[test] #[test]
fn test_from_column_def() { fn test_from_column_def() {
let column: Column = ColumnDef::new(Alias::new("id")).string().into(); let column: Column = ColumnDef::new(Alias::new("id")).string().to_owned().into();
assert_eq!( assert_eq!(
column.get_def().to_string(), column.get_def().to_string(),
quote! { quote! {
@ -293,13 +294,18 @@ mod tests {
.to_string() .to_string()
); );
let column: Column = ColumnDef::new(Alias::new("id")).string().not_null().into(); let column: Column = ColumnDef::new(Alias::new("id"))
.string()
.not_null()
.to_owned()
.into();
assert!(column.not_null); assert!(column.not_null);
let column: Column = ColumnDef::new(Alias::new("id")) let column: Column = ColumnDef::new(Alias::new("id"))
.string() .string()
.unique_key() .unique_key()
.not_null() .not_null()
.to_owned()
.into(); .into();
assert!(column.unique); assert!(column.unique);
assert!(column.not_null); assert!(column.not_null);
@ -309,6 +315,7 @@ mod tests {
.auto_increment() .auto_increment()
.unique_key() .unique_key()
.not_null() .not_null()
.to_owned()
.into(); .into();
assert!(column.auto_increment); assert!(column.auto_increment);
assert!(column.unique); assert!(column.unique);

View File

@ -34,7 +34,7 @@ impl EntityWriter {
let code_blocks = Self::gen_code_blocks(entity); let code_blocks = Self::gen_code_blocks(entity);
Self::write(&mut lines, code_blocks); Self::write(&mut lines, code_blocks);
OutputFile { OutputFile {
name: format!("{}.rs", entity.table_name), name: format!("{}.rs", entity.get_table_name_snake_case()),
content: lines.join("\n\n"), content: lines.join("\n\n"),
} }
}) })
@ -44,11 +44,18 @@ impl EntityWriter {
pub fn write_mod(&self) -> OutputFile { pub fn write_mod(&self) -> OutputFile {
let mut lines = Vec::new(); let mut lines = Vec::new();
Self::write_doc_comment(&mut lines); Self::write_doc_comment(&mut lines);
let code_blocks = self let code_blocks: Vec<TokenStream> = self
.entities .entities
.iter() .iter()
.map(|entity| Self::gen_mod(entity)) .map(|entity| Self::gen_mod(entity))
.collect(); .collect();
Self::write(
&mut lines,
vec![quote! {
pub mod prelude;
}],
);
lines.push("".to_owned());
Self::write(&mut lines, code_blocks); Self::write(&mut lines, code_blocks);
OutputFile { OutputFile {
name: "mod.rs".to_owned(), name: "mod.rs".to_owned(),
@ -123,11 +130,11 @@ impl EntityWriter {
} }
pub fn gen_impl_entity_name(entity: &Entity) -> TokenStream { pub fn gen_impl_entity_name(entity: &Entity) -> TokenStream {
let table_name_snake_case = entity.get_table_name_snake_case(); let table_name = entity.table_name.as_str();
quote! { quote! {
impl EntityName for Entity { impl EntityName for Entity {
fn table_name(&self) -> &str { fn table_name(&self) -> &str {
#table_name_snake_case #table_name
} }
} }
} }
@ -341,7 +348,7 @@ mod tests {
}], }],
}, },
Entity { Entity {
table_name: "cake_filling".to_owned(), table_name: "_cake_filling_".to_owned(),
columns: vec![ columns: vec![
Column { Column {
name: "cake_id".to_owned(), name: "cake_id".to_owned(),

View File

@ -7,7 +7,7 @@ pub struct Entity;
impl EntityName for Entity { impl EntityName for Entity {
fn table_name(&self) -> &str { fn table_name(&self) -> &str {
"cake_filling" "_cake_filling_"
} }
} }

View File

@ -1,5 +1,7 @@
//! SeaORM Entity. Generated by sea-orm-codegen 0.1.0 //! SeaORM Entity. Generated by sea-orm-codegen 0.1.0
pub mod prelude;
pub mod cake; pub mod cake;
pub mod cake_filling; pub mod cake_filling;
pub mod filling; pub mod filling;

View File

@ -1,6 +1,6 @@
[package] [package]
name = "sea-orm-macros" name = "sea-orm-macros"
version = "0.1.0" version = "0.1.1"
authors = [ "Billy Chan <ccw.billy.123@gmail.com>" ] authors = [ "Billy Chan <ccw.billy.123@gmail.com>" ]
edition = "2018" edition = "2018"
description = "Derive macros for SeaORM" description = "Derive macros for SeaORM"
@ -16,7 +16,7 @@ path = "src/lib.rs"
proc-macro = true proc-macro = true
[dependencies] [dependencies]
syn = { version = "^1", default-features = false, features = [ "derive", "clone-impls", "parsing", "proc-macro", "printing" ] } syn = { version = "^1", default-features = false, features = [ "full", "derive", "clone-impls", "parsing", "proc-macro", "printing" ] }
quote = "^1" quote = "^1"
heck = "^0.3" heck = "^0.3"
proc-macro2 = "^1" proc-macro2 = "^1"

View File

@ -1,9 +1,9 @@
use heck::SnakeCase; use heck::{MixedCase, SnakeCase};
use proc_macro2::{Ident, TokenStream}; use proc_macro2::{Ident, TokenStream};
use quote::{quote, quote_spanned}; use quote::{quote, quote_spanned};
use syn::{Data, DataEnum, Fields, Variant}; use syn::{Data, DataEnum, Fields, Variant};
pub fn expand_derive_column(ident: Ident, data: Data) -> syn::Result<TokenStream> { pub fn impl_default_as_str(ident: &Ident, data: &Data) -> syn::Result<TokenStream> {
let variants = match data { let variants = match data {
syn::Data::Enum(DataEnum { variants, .. }) => variants, syn::Data::Enum(DataEnum { variants, .. }) => variants,
_ => { _ => {
@ -31,14 +31,8 @@ pub fn expand_derive_column(ident: Ident, data: Data) -> syn::Result<TokenStream
.collect(); .collect();
Ok(quote!( Ok(quote!(
impl sea_orm::Iden for #ident { impl #ident {
fn unquoted(&self, s: &mut dyn std::fmt::Write) { fn default_as_str(&self) -> &str {
write!(s, "{}", self.as_str()).unwrap();
}
}
impl sea_orm::IdenStatic for #ident {
fn as_str(&self) -> &str {
match self { match self {
#(Self::#variant => #name),* #(Self::#variant => #name),*
} }
@ -46,3 +40,67 @@ pub fn expand_derive_column(ident: Ident, data: Data) -> syn::Result<TokenStream
} }
)) ))
} }
pub fn impl_col_from_str(ident: &Ident, data: &Data) -> syn::Result<TokenStream> {
let data_enum = match data {
Data::Enum(data_enum) => data_enum,
_ => {
return Ok(quote_spanned! {
ident.span() => compile_error!("you can only derive DeriveColumn on enums");
})
}
};
let columns = data_enum.variants.iter().map(|column| {
let column_iden = column.ident.clone();
let column_str_snake = column_iden.to_string().to_snake_case();
let column_str_mixed = column_iden.to_string().to_mixed_case();
quote!(
#column_str_snake | #column_str_mixed => Ok(#ident::#column_iden)
)
});
Ok(quote!(
impl std::str::FromStr for #ident {
type Err = sea_orm::ColumnFromStrErr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
#(#columns),*,
_ => Err(sea_orm::ColumnFromStrErr(format!("Failed to parse '{}' as `{}`", s, stringify!(#ident)))),
}
}
}
))
}
pub fn expand_derive_column(ident: &Ident, data: &Data) -> syn::Result<TokenStream> {
let impl_iden = expand_derive_custom_column(ident, data)?;
Ok(quote!(
#impl_iden
impl sea_orm::IdenStatic for #ident {
fn as_str(&self) -> &str {
self.default_as_str()
}
}
))
}
pub fn expand_derive_custom_column(ident: &Ident, data: &Data) -> syn::Result<TokenStream> {
let impl_default_as_str = impl_default_as_str(ident, data)?;
let impl_col_from_str = impl_col_from_str(ident, data)?;
Ok(quote!(
#impl_default_as_str
#impl_col_from_str
impl sea_orm::Iden for #ident {
fn unquoted(&self, s: &mut dyn std::fmt::Write) {
write!(s, "{}", self.as_str()).unwrap();
}
}
))
}

View File

@ -29,7 +29,17 @@ pub fn derive_primary_key(input: TokenStream) -> TokenStream {
pub fn derive_column(input: TokenStream) -> TokenStream { pub fn derive_column(input: TokenStream) -> TokenStream {
let DeriveInput { ident, data, .. } = parse_macro_input!(input); let DeriveInput { ident, data, .. } = parse_macro_input!(input);
match derives::expand_derive_column(ident, data) { match derives::expand_derive_column(&ident, &data) {
Ok(ts) => ts.into(),
Err(e) => e.to_compile_error().into(),
}
}
#[proc_macro_derive(DeriveCustomColumn)]
pub fn derive_custom_column(input: TokenStream) -> TokenStream {
let DeriveInput { ident, data, .. } = parse_macro_input!(input);
match derives::expand_derive_custom_column(&ident, &data) {
Ok(ts) => ts.into(), Ok(ts) => ts.into(),
Err(e) => e.to_compile_error().into(), Err(e) => e.to_compile_error().into(),
} }
@ -74,3 +84,23 @@ pub fn derive_from_query_result(input: TokenStream) -> TokenStream {
Err(e) => e.to_compile_error().into(), Err(e) => e.to_compile_error().into(),
} }
} }
#[doc(hidden)]
#[proc_macro_attribute]
pub fn test(_: TokenStream, input: TokenStream) -> TokenStream {
let input = syn::parse_macro_input!(input as syn::ItemFn);
let ret = &input.sig.output;
let name = &input.sig.ident;
let body = &input.block;
let attrs = &input.attrs;
quote::quote! (
#[test]
#(#attrs)*
fn #name() #ret {
crate::block_on!(async { #body })
}
)
.into()
}

View File

@ -25,6 +25,7 @@ pub trait MockDatabaseTrait: Send {
} }
impl MockDatabaseConnector { impl MockDatabaseConnector {
#[allow(unused_variables)]
pub fn accepts(string: &str) -> bool { pub fn accepts(string: &str) -> bool {
#[cfg(feature = "sqlx-mysql")] #[cfg(feature = "sqlx-mysql")]
if crate::SqlxMySqlConnector::accepts(string) { if crate::SqlxMySqlConnector::accepts(string) {
@ -41,6 +42,7 @@ impl MockDatabaseConnector {
false false
} }
#[allow(unused_variables)]
pub async fn connect(string: &str) -> Result<DatabaseConnection, DbErr> { pub async fn connect(string: &str) -> Result<DatabaseConnection, DbErr> {
macro_rules! connect_mock_db { macro_rules! connect_mock_db {
( $syntax: expr ) => { ( $syntax: expr ) => {

View File

@ -3,7 +3,7 @@ use crate::{
ModelTrait, PrimaryKeyToColumn, PrimaryKeyTrait, QueryFilter, Related, RelationBuilder, ModelTrait, PrimaryKeyToColumn, PrimaryKeyTrait, QueryFilter, Related, RelationBuilder,
RelationTrait, RelationType, Select, Update, UpdateMany, UpdateOne, RelationTrait, RelationType, Select, Update, UpdateMany, UpdateOne,
}; };
use sea_query::{Iden, IntoValueTuple}; use sea_query::{Alias, Iden, IntoIden, IntoTableRef, IntoValueTuple, TableRef};
pub use sea_strum::IntoEnumIterator as Iterable; pub use sea_strum::IntoEnumIterator as Iterable;
use std::fmt::Debug; use std::fmt::Debug;
@ -12,10 +12,21 @@ pub trait IdenStatic: Iden + Copy + Debug + 'static {
} }
pub trait EntityName: IdenStatic + Default { pub trait EntityName: IdenStatic + Default {
fn schema_name(&self) -> Option<&str> {
None
}
fn table_name(&self) -> &str; fn table_name(&self) -> &str;
fn module_name(&self) -> &str { fn module_name(&self) -> &str {
Self::table_name(self) self.table_name()
}
fn table_ref(&self) -> TableRef {
match self.schema_name() {
Some(schema) => (Alias::new(schema).into_iden(), self.into_iden()).into_table_ref(),
None => self.into_table_ref(),
}
} }
} }
@ -96,7 +107,7 @@ pub trait EntityTrait: EntityName {
/// # /// #
/// use sea_orm::{entity::*, query::*, tests_cfg::cake}; /// use sea_orm::{entity::*, query::*, tests_cfg::cake};
/// ///
/// # let _: Result<(), DbErr> = async_std::task::block_on(async { /// # let _: Result<(), DbErr> = smol::block_on(async {
/// # /// #
/// assert_eq!( /// assert_eq!(
/// cake::Entity::find().one(&db).await?, /// cake::Entity::find().one(&db).await?,
@ -159,7 +170,7 @@ pub trait EntityTrait: EntityName {
/// # /// #
/// use sea_orm::{entity::*, query::*, tests_cfg::cake}; /// use sea_orm::{entity::*, query::*, tests_cfg::cake};
/// ///
/// # let _: Result<(), DbErr> = async_std::task::block_on(async { /// # let _: Result<(), DbErr> = smol::block_on(async {
/// # /// #
/// assert_eq!( /// assert_eq!(
/// cake::Entity::find_by_id(11).all(&db).await?, /// cake::Entity::find_by_id(11).all(&db).await?,
@ -196,7 +207,7 @@ pub trait EntityTrait: EntityName {
/// # /// #
/// use sea_orm::{entity::*, query::*, tests_cfg::cake_filling}; /// use sea_orm::{entity::*, query::*, tests_cfg::cake_filling};
/// ///
/// # let _: Result<(), DbErr> = async_std::task::block_on(async { /// # let _: Result<(), DbErr> = smol::block_on(async {
/// # /// #
/// assert_eq!( /// assert_eq!(
/// cake_filling::Entity::find_by_id((2, 3)).all(&db).await?, /// cake_filling::Entity::find_by_id((2, 3)).all(&db).await?,
@ -264,7 +275,7 @@ pub trait EntityTrait: EntityName {
/// ..Default::default() /// ..Default::default()
/// }; /// };
/// ///
/// # let _: Result<(), DbErr> = async_std::task::block_on(async { /// # let _: Result<(), DbErr> = smol::block_on(async {
/// # /// #
/// let insert_result = cake::Entity::insert(apple).exec(&db).await?; /// let insert_result = cake::Entity::insert(apple).exec(&db).await?;
/// ///
@ -315,7 +326,7 @@ pub trait EntityTrait: EntityName {
/// ..Default::default() /// ..Default::default()
/// }; /// };
/// ///
/// # let _: Result<(), DbErr> = async_std::task::block_on(async { /// # let _: Result<(), DbErr> = smol::block_on(async {
/// # /// #
/// let insert_result = cake::Entity::insert_many(vec![apple, orange]).exec(&db).await?; /// let insert_result = cake::Entity::insert_many(vec![apple, orange]).exec(&db).await?;
/// ///
@ -367,7 +378,7 @@ pub trait EntityTrait: EntityName {
/// ..Default::default() /// ..Default::default()
/// }; /// };
/// ///
/// # let _: Result<(), DbErr> = async_std::task::block_on(async { /// # let _: Result<(), DbErr> = smol::block_on(async {
/// # /// #
/// assert_eq!( /// assert_eq!(
/// fruit::Entity::update(orange.clone()).exec(&db).await?, // Clone here because we need to assert_eq /// fruit::Entity::update(orange.clone()).exec(&db).await?, // Clone here because we need to assert_eq
@ -411,10 +422,10 @@ pub trait EntityTrait: EntityName {
/// # /// #
/// use sea_orm::{entity::*, query::*, tests_cfg::fruit, sea_query::{Expr, Value}}; /// use sea_orm::{entity::*, query::*, tests_cfg::fruit, sea_query::{Expr, Value}};
/// ///
/// # let _: Result<(), DbErr> = async_std::task::block_on(async { /// # let _: Result<(), DbErr> = smol::block_on(async {
/// # /// #
/// let update_result = fruit::Entity::update_many() /// let update_result = fruit::Entity::update_many()
/// .col_expr(fruit::Column::CakeId, Expr::value(Value::Null)) /// .col_expr(fruit::Column::CakeId, Expr::value(Value::Int(None)))
/// .filter(fruit::Column::Name.contains("Apple")) /// .filter(fruit::Column::Name.contains("Apple"))
/// .exec(&db) /// .exec(&db)
/// .await?; /// .await?;
@ -427,7 +438,7 @@ pub trait EntityTrait: EntityName {
/// assert_eq!( /// assert_eq!(
/// db.into_transaction_log(), /// db.into_transaction_log(),
/// vec![Transaction::from_sql_and_values( /// vec![Transaction::from_sql_and_values(
/// DbBackend::Postgres, r#"UPDATE "fruit" SET "cake_id" = $1 WHERE "fruit"."name" LIKE $2"#, vec![Value::Null, "%Apple%".into()] /// DbBackend::Postgres, r#"UPDATE "fruit" SET "cake_id" = $1 WHERE "fruit"."name" LIKE $2"#, vec![Value::Int(None), "%Apple%".into()]
/// )]); /// )]);
/// ``` /// ```
fn update_many() -> UpdateMany<Self> { fn update_many() -> UpdateMany<Self> {
@ -460,7 +471,7 @@ pub trait EntityTrait: EntityName {
/// ..Default::default() /// ..Default::default()
/// }; /// };
/// ///
/// # let _: Result<(), DbErr> = async_std::task::block_on(async { /// # let _: Result<(), DbErr> = smol::block_on(async {
/// # /// #
/// let delete_result = fruit::Entity::delete(orange).exec(&db).await?; /// let delete_result = fruit::Entity::delete(orange).exec(&db).await?;
/// ///
@ -503,7 +514,7 @@ pub trait EntityTrait: EntityName {
/// # /// #
/// use sea_orm::{entity::*, query::*, tests_cfg::fruit}; /// use sea_orm::{entity::*, query::*, tests_cfg::fruit};
/// ///
/// # let _: Result<(), DbErr> = async_std::task::block_on(async { /// # let _: Result<(), DbErr> = smol::block_on(async {
/// # /// #
/// let delete_result = fruit::Entity::delete_many() /// let delete_result = fruit::Entity::delete_many()
/// .filter(fruit::Column::Name.contains("Apple")) /// .filter(fruit::Column::Name.contains("Apple"))

View File

@ -1,3 +1,4 @@
use std::str::FromStr;
use crate::{EntityName, IdenStatic, Iterable}; use crate::{EntityName, IdenStatic, Iterable};
use sea_query::{DynIden, Expr, SeaRc, SelectStatement, SimpleExpr, Value}; use sea_query::{DynIden, Expr, SeaRc, SelectStatement, SimpleExpr, Value};
@ -77,7 +78,7 @@ macro_rules! bind_subquery_func {
// LINT: when the operand value does not match column type // LINT: when the operand value does not match column type
/// Wrapper of the identically named method in [`sea_query::Expr`] /// Wrapper of the identically named method in [`sea_query::Expr`]
pub trait ColumnTrait: IdenStatic + Iterable { pub trait ColumnTrait: IdenStatic + Iterable + FromStr {
type EntityName: EntityName; type EntityName: EntityName;
fn def(&self) -> ColumnDef; fn def(&self) -> ColumnDef;
@ -290,6 +291,7 @@ impl From<ColumnType> for sea_query::ColumnType {
impl From<sea_query::ColumnType> for ColumnType { impl From<sea_query::ColumnType> for ColumnType {
fn from(col_type: sea_query::ColumnType) -> Self { fn from(col_type: sea_query::ColumnType) -> Self {
#[allow(unreachable_patterns)]
match col_type { match col_type {
sea_query::ColumnType::Char(s) => Self::Char(s), sea_query::ColumnType::Char(s) => Self::Char(s),
sea_query::ColumnType::String(s) => Self::String(s), sea_query::ColumnType::String(s) => Self::String(s),
@ -347,4 +349,30 @@ mod tests {
.join(" ") .join(" ")
); );
} }
#[test]
fn test_col_from_str() {
use std::str::FromStr;
assert!(matches!(
fruit::Column::from_str("id"),
Ok(fruit::Column::Id)
));
assert!(matches!(
fruit::Column::from_str("name"),
Ok(fruit::Column::Name)
));
assert!(matches!(
fruit::Column::from_str("cake_id"),
Ok(fruit::Column::CakeId)
));
assert!(matches!(
fruit::Column::from_str("cakeId"),
Ok(fruit::Column::CakeId)
));
assert!(matches!(
fruit::Column::from_str("does_not_exist"),
Err(crate::ColumnFromStrErr(_))
));
}
} }

View File

@ -1,17 +1,24 @@
use crate::IdenStatic; use crate::{ColumnTrait, EntityTrait, IdenStatic};
use sea_query::{DynIden, IntoIden}; use sea_query::{DynIden, IntoIden};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum Identity { pub enum Identity {
Unary(DynIden), Unary(DynIden),
Binary(DynIden, DynIden), Binary(DynIden, DynIden),
// Ternary(DynIden, DynIden, DynIden), Ternary(DynIden, DynIden, DynIden),
} }
pub trait IntoIdentity { pub trait IntoIdentity {
fn into_identity(self) -> Identity; fn into_identity(self) -> Identity;
} }
pub trait IdentityOf<E>
where
E: EntityTrait,
{
fn identity_of(self) -> Identity;
}
impl<T> IntoIdentity for T impl<T> IntoIdentity for T
where where
T: IdenStatic, T: IdenStatic,
@ -30,3 +37,44 @@ where
Identity::Binary(self.0.into_iden(), self.1.into_iden()) Identity::Binary(self.0.into_iden(), self.1.into_iden())
} }
} }
impl<T, C, R> IntoIdentity for (T, C, R)
where
T: IdenStatic,
C: IdenStatic,
R: IdenStatic,
{
fn into_identity(self) -> Identity {
Identity::Ternary(self.0.into_iden(), self.1.into_iden(), self.2.into_iden())
}
}
impl<E, C> IdentityOf<E> for C
where
E: EntityTrait<Column = C>,
C: ColumnTrait,
{
fn identity_of(self) -> Identity {
self.into_identity()
}
}
impl<E, C> IdentityOf<E> for (C, C)
where
E: EntityTrait<Column = C>,
C: ColumnTrait,
{
fn identity_of(self) -> Identity {
self.into_identity()
}
}
impl<E, C> IdentityOf<E> for (C, C, C)
where
E: EntityTrait<Column = C>,
C: ColumnTrait,
{
fn identity_of(self) -> Identity {
self.into_identity()
}
}

View File

@ -1,7 +1,7 @@
pub use crate::{ pub use crate::{
error::*, ActiveModelBehavior, ActiveModelTrait, ColumnDef, ColumnTrait, ColumnType, error::*, ActiveModelBehavior, ActiveModelTrait, ColumnDef, ColumnTrait, ColumnType,
DeriveActiveModel, DeriveActiveModelBehavior, DeriveColumn, DeriveEntity, DeriveModel, DeriveActiveModel, DeriveActiveModelBehavior, DeriveColumn, DeriveCustomColumn, DeriveEntity,
DerivePrimaryKey, EntityName, EntityTrait, EnumIter, Iden, IdenStatic, ModelTrait, DeriveModel, DerivePrimaryKey, EntityName, EntityTrait, EnumIter, Iden, IdenStatic, ModelTrait,
PrimaryKeyToColumn, PrimaryKeyTrait, QueryFilter, QueryResult, Related, RelationDef, PrimaryKeyToColumn, PrimaryKeyTrait, QueryFilter, QueryResult, Related, RelationDef,
RelationTrait, Select, Value, RelationTrait, Select, Value,
}; };
@ -12,6 +12,9 @@ pub use serde_json::Value as Json;
#[cfg(feature = "with-chrono")] #[cfg(feature = "with-chrono")]
pub use chrono::NaiveDateTime as DateTime; pub use chrono::NaiveDateTime as DateTime;
#[cfg(feature = "with-chrono")]
pub type DateTimeWithTimeZone = chrono::DateTime<chrono::FixedOffset>;
#[cfg(feature = "with-rust_decimal")] #[cfg(feature = "with-rust_decimal")]
pub use rust_decimal::Decimal; pub use rust_decimal::Decimal;

View File

@ -1,6 +1,6 @@
use crate::{EntityTrait, Identity, IntoIdentity, Iterable, QuerySelect, Select}; use crate::{EntityTrait, Identity, IdentityOf, Iterable, QuerySelect, Select};
use core::marker::PhantomData; use core::marker::PhantomData;
use sea_query::{DynIden, IntoIden, JoinType}; use sea_query::{JoinType, TableRef};
use std::fmt::Debug; use std::fmt::Debug;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -30,8 +30,8 @@ where
pub struct RelationDef { pub struct RelationDef {
pub rel_type: RelationType, pub rel_type: RelationType,
pub from_tbl: DynIden, pub from_tbl: TableRef,
pub to_tbl: DynIden, pub to_tbl: TableRef,
pub from_col: Identity, pub from_col: Identity,
pub to_col: Identity, pub to_col: Identity,
} }
@ -43,8 +43,8 @@ where
{ {
entities: PhantomData<(E, R)>, entities: PhantomData<(E, R)>,
rel_type: RelationType, rel_type: RelationType,
from_tbl: DynIden, from_tbl: TableRef,
to_tbl: DynIden, to_tbl: TableRef,
from_col: Option<Identity>, from_col: Option<Identity>,
to_col: Option<Identity>, to_col: Option<Identity>,
} }
@ -71,8 +71,8 @@ where
Self { Self {
entities: PhantomData, entities: PhantomData,
rel_type, rel_type,
from_tbl: from.into_iden(), from_tbl: from.table_ref(),
to_tbl: to.into_iden(), to_tbl: to.table_ref(),
from_col: None, from_col: None,
to_col: None, to_col: None,
} }
@ -89,13 +89,19 @@ where
} }
} }
pub fn from(mut self, identifier: E::Column) -> Self { pub fn from<T>(mut self, identifier: T) -> Self
self.from_col = Some(identifier.into_identity()); where
T: IdentityOf<E>,
{
self.from_col = Some(identifier.identity_of());
self self
} }
pub fn to(mut self, identifier: R::Column) -> Self { pub fn to<T>(mut self, identifier: T) -> Self
self.to_col = Some(identifier.into_identity()); where
T: IdentityOf<R>,
{
self.to_col = Some(identifier.identity_of());
self self
} }
} }

View File

@ -16,3 +16,14 @@ impl std::fmt::Display for DbErr {
} }
} }
} }
#[derive(Debug, Clone)]
pub struct ColumnFromStrErr(pub String);
impl std::error::Error for ColumnFromStrErr {}
impl std::fmt::Display for ColumnFromStrErr {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.0.as_str())
}
}

View File

@ -97,7 +97,7 @@ where
/// # use sea_orm::{error::*, MockDatabase, DbBackend}; /// # use sea_orm::{error::*, MockDatabase, DbBackend};
/// # let owned_db = MockDatabase::new(DbBackend::Postgres).into_connection(); /// # let owned_db = MockDatabase::new(DbBackend::Postgres).into_connection();
/// # let db = &owned_db; /// # let db = &owned_db;
/// # let _: Result<(), DbErr> = async_std::task::block_on(async { /// # let _: Result<(), DbErr> = smol::block_on(async {
/// # /// #
/// use sea_orm::{entity::*, query::*, tests_cfg::cake}; /// use sea_orm::{entity::*, query::*, tests_cfg::cake};
/// let mut cake_pages = cake::Entity::find() /// let mut cake_pages = cake::Entity::find()
@ -125,7 +125,7 @@ where
/// # use sea_orm::{error::*, MockDatabase, DbBackend}; /// # use sea_orm::{error::*, MockDatabase, DbBackend};
/// # let owned_db = MockDatabase::new(DbBackend::Postgres).into_connection(); /// # let owned_db = MockDatabase::new(DbBackend::Postgres).into_connection();
/// # let db = &owned_db; /// # let db = &owned_db;
/// # let _: Result<(), DbErr> = async_std::task::block_on(async { /// # let _: Result<(), DbErr> = smol::block_on(async {
/// # /// #
/// use futures::TryStreamExt; /// use futures::TryStreamExt;
/// use sea_orm::{entity::*, query::*, tests_cfg::cake}; /// use sea_orm::{entity::*, query::*, tests_cfg::cake};
@ -203,7 +203,7 @@ mod tests {
(db, num_items) (db, num_items)
} }
#[async_std::test] #[smol_potat::test]
async fn fetch_page() -> Result<(), DbErr> { async fn fetch_page() -> Result<(), DbErr> {
let (db, pages) = setup(); let (db, pages) = setup();
@ -233,7 +233,7 @@ mod tests {
Ok(()) Ok(())
} }
#[async_std::test] #[smol_potat::test]
async fn fetch() -> Result<(), DbErr> { async fn fetch() -> Result<(), DbErr> {
let (db, pages) = setup(); let (db, pages) = setup();
@ -267,7 +267,7 @@ mod tests {
Ok(()) Ok(())
} }
#[async_std::test] #[smol_potat::test]
async fn num_pages() -> Result<(), DbErr> { async fn num_pages() -> Result<(), DbErr> {
let (db, num_items) = setup_num_items(); let (db, num_items) = setup_num_items();
@ -299,7 +299,7 @@ mod tests {
Ok(()) Ok(())
} }
#[async_std::test] #[smol_potat::test]
async fn next_and_cur_page() -> Result<(), DbErr> { async fn next_and_cur_page() -> Result<(), DbErr> {
let (db, _) = setup(); let (db, _) = setup();
@ -315,7 +315,7 @@ mod tests {
Ok(()) Ok(())
} }
#[async_std::test] #[smol_potat::test]
async fn fetch_and_next() -> Result<(), DbErr> { async fn fetch_and_next() -> Result<(), DbErr> {
let (db, pages) = setup(); let (db, pages) = setup();
@ -350,7 +350,7 @@ mod tests {
Ok(()) Ok(())
} }
#[async_std::test] #[smol_potat::test]
async fn into_stream() -> Result<(), DbErr> { async fn into_stream() -> Result<(), DbErr> {
let (db, pages) = setup(); let (db, pages) = setup();

View File

@ -1,6 +1,4 @@
use crate::{debug_print, DbErr}; use crate::{debug_print, DbErr};
use chrono::NaiveDateTime;
use serde_json::Value as Json;
use std::fmt; use std::fmt;
#[derive(Debug)] #[derive(Debug)]
@ -219,8 +217,66 @@ macro_rules! try_getable_mysql {
} }
#[cfg(feature = "sqlx-postgres")] #[cfg(feature = "sqlx-postgres")]
QueryResultRow::SqlxPostgres(_) => { QueryResultRow::SqlxPostgres(_) => {
panic!("{} unsupported by sqlx-postgres", stringify!($type))
}
#[cfg(feature = "sqlx-sqlite")]
QueryResultRow::SqlxSqlite(_) => {
panic!("{} unsupported by sqlx-sqlite", stringify!($type)) panic!("{} unsupported by sqlx-sqlite", stringify!($type))
} }
#[cfg(feature = "mock")]
QueryResultRow::Mock(row) => match row.try_get(column.as_str()) {
Ok(v) => Ok(Some(v)),
Err(e) => {
debug_print!("{:#?}", e.to_string());
Ok(None)
}
},
}
}
}
};
}
macro_rules! try_getable_postgres {
( $type: ty ) => {
impl TryGetable for $type {
fn try_get(res: &QueryResult, pre: &str, col: &str) -> Result<Self, DbErr> {
let column = format!("{}{}", pre, col);
match &res.row {
#[cfg(feature = "sqlx-mysql")]
QueryResultRow::SqlxMySql(_) => {
panic!("{} unsupported by sqlx-mysql", stringify!($type))
}
#[cfg(feature = "sqlx-postgres")]
QueryResultRow::SqlxPostgres(row) => {
use sqlx::Row;
row.try_get(column.as_str())
.map_err(crate::sqlx_error_to_query_err)
}
#[cfg(feature = "sqlx-sqlite")]
QueryResultRow::SqlxSqlite(_) => {
panic!("{} unsupported by sqlx-sqlite", stringify!($type))
}
#[cfg(feature = "mock")]
QueryResultRow::Mock(row) => Ok(row.try_get(column.as_str())?),
}
}
}
impl TryGetable for Option<$type> {
fn try_get(res: &QueryResult, pre: &str, col: &str) -> Result<Self, DbErr> {
let column = format!("{}{}", pre, col);
match &res.row {
#[cfg(feature = "sqlx-mysql")]
QueryResultRow::SqlxMySql(_) => {
panic!("{} unsupported by sqlx-mysql", stringify!($type))
}
#[cfg(feature = "sqlx-postgres")]
QueryResultRow::SqlxPostgres(row) => {
use sqlx::Row;
row.try_get::<Option<$type>, _>(column.as_str())
.map_err(crate::sqlx_error_to_query_err)
}
#[cfg(feature = "sqlx-sqlite")] #[cfg(feature = "sqlx-sqlite")]
QueryResultRow::SqlxSqlite(_) => { QueryResultRow::SqlxSqlite(_) => {
panic!("{} unsupported by sqlx-sqlite", stringify!($type)) panic!("{} unsupported by sqlx-sqlite", stringify!($type))
@ -251,14 +307,15 @@ try_getable_mysql!(u64);
try_getable_all!(f32); try_getable_all!(f32);
try_getable_all!(f64); try_getable_all!(f64);
try_getable_all!(String); try_getable_all!(String);
try_getable_all!(NaiveDateTime);
try_getable_all!(Json);
#[cfg(feature = "with-uuid")] #[cfg(feature = "with-json")]
use uuid::Uuid; try_getable_all!(serde_json::Value);
#[cfg(feature = "with-uuid")] #[cfg(feature = "with-chrono")]
try_getable_all!(Uuid); try_getable_all!(chrono::NaiveDateTime);
#[cfg(feature = "with-chrono")]
try_getable_postgres!(chrono::DateTime<chrono::FixedOffset>);
#[cfg(feature = "with-rust_decimal")] #[cfg(feature = "with-rust_decimal")]
use rust_decimal::Decimal; use rust_decimal::Decimal;
@ -345,3 +402,6 @@ impl TryGetable for Option<Decimal> {
} }
} }
} }
#[cfg(feature = "with-uuid")]
try_getable_all!(uuid::Uuid);

View File

@ -76,33 +76,6 @@ impl<E> Select<E>
where where
E: EntityTrait, E: EntityTrait,
{ {
/// ```
/// # #[cfg(feature = "mock")]
/// # use sea_orm::{error::*, tests_cfg::*, MockDatabase, Transaction, DbBackend};
/// #
/// # let db = MockDatabase::new(DbBackend::Postgres).into_connection();
/// #
/// use sea_orm::{entity::*, query::*, tests_cfg::cake};
///
/// # let _: Result<(), DbErr> = async_std::task::block_on(async {
/// #
/// let cheese: Option<cake::Model> = cake::Entity::find().from_raw_sql(
/// Statement::from_sql_and_values(
/// DbBackend::Postgres, r#"SELECT "cake"."id", "cake"."name" FROM "cake" WHERE "id" = $1"#, vec![1.into()]
/// )
/// ).one(&db).await?;
/// #
/// # Ok(())
/// # });
///
/// assert_eq!(
/// db.into_transaction_log(),
/// vec![
/// Transaction::from_sql_and_values(
/// DbBackend::Postgres, r#"SELECT "cake"."id", "cake"."name" FROM "cake" WHERE "id" = $1"#, vec![1.into()]
/// ),
/// ]);
/// ```
#[allow(clippy::wrong_self_convention)] #[allow(clippy::wrong_self_convention)]
pub fn from_raw_sql(self, stmt: Statement) -> SelectorRaw<SelectModel<E::Model>> { pub fn from_raw_sql(self, stmt: Statement) -> SelectorRaw<SelectModel<E::Model>> {
SelectorRaw { SelectorRaw {
@ -289,6 +262,167 @@ impl<S> SelectorRaw<S>
where where
S: SelectorTrait, S: SelectorTrait,
{ {
/// ```
/// # #[cfg(feature = "mock")]
/// # use sea_orm::{error::*, tests_cfg::*, MockDatabase, Transaction, DbBackend};
/// #
/// # let db = MockDatabase::new(DbBackend::Postgres)
/// # .append_query_results(vec![vec![
/// # maplit::btreemap! {
/// # "name" => Into::<Value>::into("Chocolate Forest"),
/// # "num_of_cakes" => Into::<Value>::into(1),
/// # },
/// # maplit::btreemap! {
/// # "name" => Into::<Value>::into("New York Cheese"),
/// # "num_of_cakes" => Into::<Value>::into(1),
/// # },
/// # ]])
/// # .into_connection();
/// #
/// use sea_orm::{entity::*, query::*, tests_cfg::cake, FromQueryResult};
///
/// #[derive(Debug, PartialEq, FromQueryResult)]
/// struct SelectResult {
/// name: String,
/// num_of_cakes: i32,
/// }
///
/// # let _: Result<(), DbErr> = smol::block_on(async {
/// #
/// let res: Vec<SelectResult> = cake::Entity::find().from_raw_sql(
/// Statement::from_sql_and_values(
/// DbBackend::Postgres, r#"SELECT "cake"."name", count("cake"."id") AS "num_of_cakes" FROM "cake""#, vec![]
/// )
/// )
/// .into_model::<SelectResult>()
/// .all(&db)
/// .await?;
///
/// assert_eq!(
/// res,
/// vec![
/// SelectResult {
/// name: "Chocolate Forest".to_owned(),
/// num_of_cakes: 1,
/// },
/// SelectResult {
/// name: "New York Cheese".to_owned(),
/// num_of_cakes: 1,
/// },
/// ]
/// );
/// #
/// # Ok(())
/// # });
///
/// assert_eq!(
/// db.into_transaction_log(),
/// vec![
/// Transaction::from_sql_and_values(
/// DbBackend::Postgres, r#"SELECT "cake"."name", count("cake"."id") AS "num_of_cakes" FROM "cake""#, vec![]
/// ),
/// ]);
/// ```
pub fn into_model<M>(self) -> SelectorRaw<SelectModel<M>>
where
M: FromQueryResult,
{
SelectorRaw {
stmt: self.stmt,
selector: SelectModel { model: PhantomData },
}
}
/// ```
/// # #[cfg(feature = "mock")]
/// # use sea_orm::{error::*, tests_cfg::*, MockDatabase, Transaction, DbBackend};
/// #
/// # let db = MockDatabase::new(DbBackend::Postgres)
/// # .append_query_results(vec![vec![
/// # maplit::btreemap! {
/// # "name" => Into::<Value>::into("Chocolate Forest"),
/// # "num_of_cakes" => Into::<Value>::into(1),
/// # },
/// # maplit::btreemap! {
/// # "name" => Into::<Value>::into("New York Cheese"),
/// # "num_of_cakes" => Into::<Value>::into(1),
/// # },
/// # ]])
/// # .into_connection();
/// #
/// use sea_orm::{entity::*, query::*, tests_cfg::cake};
///
/// # let _: Result<(), DbErr> = smol::block_on(async {
/// #
/// let res: Vec<serde_json::Value> = cake::Entity::find().from_raw_sql(
/// Statement::from_sql_and_values(
/// DbBackend::Postgres, r#"SELECT "cake"."id", "cake"."name" FROM "cake""#, vec![]
/// )
/// )
/// .into_json()
/// .all(&db)
/// .await?;
///
/// assert_eq!(
/// res,
/// vec![
/// serde_json::json!({
/// "name": "Chocolate Forest",
/// "num_of_cakes": 1,
/// }),
/// serde_json::json!({
/// "name": "New York Cheese",
/// "num_of_cakes": 1,
/// }),
/// ]
/// );
/// #
/// # Ok(())
/// # });
///
/// assert_eq!(
/// db.into_transaction_log(),
/// vec![
/// Transaction::from_sql_and_values(
/// DbBackend::Postgres, r#"SELECT "cake"."id", "cake"."name" FROM "cake""#, vec![]
/// ),
/// ]);
/// ```
#[cfg(feature = "with-json")]
pub fn into_json(self) -> SelectorRaw<SelectModel<JsonValue>> {
SelectorRaw {
stmt: self.stmt,
selector: SelectModel { model: PhantomData },
}
}
/// ```
/// # #[cfg(feature = "mock")]
/// # use sea_orm::{error::*, tests_cfg::*, MockDatabase, Transaction, DbBackend};
/// #
/// # let db = MockDatabase::new(DbBackend::Postgres).into_connection();
/// #
/// use sea_orm::{entity::*, query::*, tests_cfg::cake};
///
/// # let _: Result<(), DbErr> = smol::block_on(async {
/// #
/// let _: Option<cake::Model> = cake::Entity::find().from_raw_sql(
/// Statement::from_sql_and_values(
/// DbBackend::Postgres, r#"SELECT "cake"."id", "cake"."name" FROM "cake" WHERE "id" = $1"#, vec![1.into()]
/// )
/// ).one(&db).await?;
/// #
/// # Ok(())
/// # });
///
/// assert_eq!(
/// db.into_transaction_log(),
/// vec![
/// Transaction::from_sql_and_values(
/// DbBackend::Postgres, r#"SELECT "cake"."id", "cake"."name" FROM "cake" WHERE "id" = $1"#, vec![1.into()]
/// ),
/// ]);
/// ```
pub async fn one(self, db: &DatabaseConnection) -> Result<Option<S::Item>, DbErr> { pub async fn one(self, db: &DatabaseConnection) -> Result<Option<S::Item>, DbErr> {
let row = db.query_one(self.stmt).await?; let row = db.query_one(self.stmt).await?;
match row { match row {
@ -297,6 +431,33 @@ where
} }
} }
/// ```
/// # #[cfg(feature = "mock")]
/// # use sea_orm::{error::*, tests_cfg::*, MockDatabase, Transaction, DbBackend};
/// #
/// # let db = MockDatabase::new(DbBackend::Postgres).into_connection();
/// #
/// use sea_orm::{entity::*, query::*, tests_cfg::cake};
///
/// # let _: Result<(), DbErr> = smol::block_on(async {
/// #
/// let _: Vec<cake::Model> = cake::Entity::find().from_raw_sql(
/// Statement::from_sql_and_values(
/// DbBackend::Postgres, r#"SELECT "cake"."id", "cake"."name" FROM "cake""#, vec![]
/// )
/// ).all(&db).await?;
/// #
/// # Ok(())
/// # });
///
/// assert_eq!(
/// db.into_transaction_log(),
/// vec![
/// Transaction::from_sql_and_values(
/// DbBackend::Postgres, r#"SELECT "cake"."id", "cake"."name" FROM "cake""#, vec![]
/// ),
/// ]);
/// ```
pub async fn all(self, db: &DatabaseConnection) -> Result<Vec<S::Item>, DbErr> { pub async fn all(self, db: &DatabaseConnection) -> Result<Vec<S::Item>, DbErr> {
let rows = db.query_all(self.stmt).await?; let rows = db.query_all(self.stmt).await?;
let mut models = Vec::new(); let mut models = Vec::new();

View File

@ -1,6 +1,6 @@
//! <div align="center"> //! <div align="center">
//! //!
//! <img src="docs/SeaORM banner.png"/> //! <img src="https://www.sea-ql.org/SeaORM/img/SeaORM banner.png"/>
//! //!
//! <h1>SeaORM</h1> //! <h1>SeaORM</h1>
//! //!
@ -18,30 +18,26 @@
//! //!
//! # SeaORM //! # SeaORM
//! //!
//! Inspired by ActiveRecord, Eloquent and TypeORM, SeaORM aims to provide you an intuitive and ergonomic //! SeaORM is a relational ORM to help you build light weight and concurrent web services in Rust.
//! API to make working with databases in Rust a first-class experience.
//! //!
//! ```markdown //! ```markdown
//! This is an early release of SeaORM, the API is not stable yet. //! This is an early release of SeaORM, the API is not stable yet.
//! ``` //! ```
//! //!
//! <div align="center">
//!
//! [![Getting Started](https://img.shields.io/badge/Getting%20Started-blue)](https://www.sea-ql.org/SeaORM/docs/index) //! [![Getting Started](https://img.shields.io/badge/Getting%20Started-blue)](https://www.sea-ql.org/SeaORM/docs/index)
//! [![Getting Started](https://img.shields.io/badge/Examples-orange)](https://github.com/SeaQL/sea-orm/tree/master/examples/sqlx) //! [![Examples](https://img.shields.io/badge/Examples-orange)](https://github.com/SeaQL/sea-orm/tree/master/examples/sqlx)
//! [![Getting Started](https://img.shields.io/badge/Starter%20Kit-green)](https://github.com/SeaQL/sea-orm/issues/37) //! [![Starter Kit](https://img.shields.io/badge/Starter%20Kit-green)](https://github.com/SeaQL/sea-orm/issues/37)
//! //! [![Discord](https://img.shields.io/discord/873880840487206962?label=Discord)](https://discord.com/invite/uCPdDXzbdv)
//! </div>
//! //!
//! ## Features //! ## Features
//! //!
//! 1. Async //! 1. Async
//! //!
//! Relying on SQLx, SeaORM is a new library with async support from day 1. //! Relying on [SQLx](https://github.com/launchbadge/sqlx), SeaORM is a new library with async support from day 1.
//! //!
//! 2. Dynamic //! 2. Dynamic
//! //!
//! Built upon SeaQuery, SeaORM allows you to build complex queries without 'fighting the ORM'. //! Built upon [SeaQuery](https://github.com/SeaQL/sea-query), SeaORM allows you to build complex queries without 'fighting the ORM'.
//! //!
//! 3. Testable //! 3. Testable
//! //!
@ -133,7 +129,7 @@
//! //!
//! // update many: UPDATE "fruit" SET "cake_id" = NULL WHERE "fruit"."name" LIKE '%Apple%' //! // update many: UPDATE "fruit" SET "cake_id" = NULL WHERE "fruit"."name" LIKE '%Apple%'
//! Fruit::update_many() //! Fruit::update_many()
//! .col_expr(fruit::Column::CakeId, Expr::value(Value::Null)) //! .col_expr(fruit::Column::CakeId, Expr::value(Value::Int(None)))
//! .filter(fruit::Column::Name.contains("Apple")) //! .filter(fruit::Column::Name.contains("Apple"))
//! .exec(db) //! .exec(db)
//! .await?; //! .await?;
@ -222,8 +218,8 @@ pub use executor::*;
pub use query::*; pub use query::*;
pub use sea_orm_macros::{ pub use sea_orm_macros::{
DeriveActiveModel, DeriveActiveModelBehavior, DeriveColumn, DeriveEntity, DeriveModel, DeriveActiveModel, DeriveActiveModelBehavior, DeriveColumn, DeriveCustomColumn, DeriveEntity,
DerivePrimaryKey, FromQueryResult, DeriveModel, DerivePrimaryKey, FromQueryResult,
}; };
pub use sea_query; pub use sea_query;

View File

@ -65,7 +65,7 @@ impl Delete {
{ {
let myself = DeleteOne { let myself = DeleteOne {
query: DeleteStatement::new() query: DeleteStatement::new()
.from_table(A::Entity::default().into_iden()) .from_table(A::Entity::default().table_ref())
.to_owned(), .to_owned(),
model: model.into_active_model(), model: model.into_active_model(),
}; };

View File

@ -2,7 +2,9 @@ use crate::{
ColumnTrait, EntityTrait, Identity, IntoSimpleExpr, Iterable, ModelTrait, PrimaryKeyToColumn, ColumnTrait, EntityTrait, Identity, IntoSimpleExpr, Iterable, ModelTrait, PrimaryKeyToColumn,
RelationDef, RelationDef,
}; };
use sea_query::{Alias, Expr, IntoCondition, SeaRc, SelectExpr, SelectStatement, SimpleExpr}; use sea_query::{
Alias, Expr, IntoCondition, SeaRc, SelectExpr, SelectStatement, SimpleExpr, TableRef,
};
pub use sea_query::{Condition, ConditionalStatement, DynIden, JoinType, Order, OrderedStatement}; pub use sea_query::{Condition, ConditionalStatement, DynIden, JoinType, Order, OrderedStatement};
// LINT: when the column does not appear in tables selected from // LINT: when the column does not appear in tables selected from
@ -269,8 +271,8 @@ pub trait QueryFilter: Sized {
} }
fn join_condition(rel: RelationDef) -> SimpleExpr { fn join_condition(rel: RelationDef) -> SimpleExpr {
let from_tbl = rel.from_tbl.clone(); let from_tbl = unpack_table_ref(&rel.from_tbl);
let to_tbl = rel.to_tbl.clone(); let to_tbl = unpack_table_ref(&rel.to_tbl);
let owner_keys = rel.from_col; let owner_keys = rel.from_col;
let foreign_keys = rel.to_col; let foreign_keys = rel.to_col;
@ -283,6 +285,22 @@ fn join_condition(rel: RelationDef) -> SimpleExpr {
.equals(SeaRc::clone(&to_tbl), f1) .equals(SeaRc::clone(&to_tbl), f1)
.and(Expr::tbl(SeaRc::clone(&from_tbl), o2).equals(SeaRc::clone(&to_tbl), f2)) .and(Expr::tbl(SeaRc::clone(&from_tbl), o2).equals(SeaRc::clone(&to_tbl), f2))
} }
(Identity::Ternary(o1, o2, o3), Identity::Ternary(f1, f2, f3)) => {
Expr::tbl(SeaRc::clone(&from_tbl), o1)
.equals(SeaRc::clone(&to_tbl), f1)
.and(Expr::tbl(SeaRc::clone(&from_tbl), o2).equals(SeaRc::clone(&to_tbl), f2))
.and(Expr::tbl(SeaRc::clone(&from_tbl), o3).equals(SeaRc::clone(&to_tbl), f3))
}
_ => panic!("Owner key and foreign key mismatch"), _ => panic!("Owner key and foreign key mismatch"),
} }
} }
fn unpack_table_ref(table_ref: &TableRef) -> DynIden {
match table_ref {
TableRef::Table(tbl) => SeaRc::clone(tbl),
TableRef::SchemaTable(_, tbl) => SeaRc::clone(tbl),
TableRef::TableAlias(tbl, _) => SeaRc::clone(tbl),
TableRef::SchemaTableAlias(_, tbl, _) => SeaRc::clone(tbl),
TableRef::SubQuery(_, tbl) => SeaRc::clone(tbl),
}
}

View File

@ -1,6 +1,6 @@
use crate::{ActiveModelTrait, EntityTrait, IntoActiveModel, Iterable, QueryTrait}; use crate::{ActiveModelTrait, EntityName, EntityTrait, IntoActiveModel, Iterable, QueryTrait};
use core::marker::PhantomData; use core::marker::PhantomData;
use sea_query::{InsertStatement, IntoIden}; use sea_query::InsertStatement;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct Insert<A> pub struct Insert<A>
@ -28,7 +28,7 @@ where
pub(crate) fn new() -> Self { pub(crate) fn new() -> Self {
Self { Self {
query: InsertStatement::new() query: InsertStatement::new()
.into_table(A::Entity::default().into_iden()) .into_table(A::Entity::default().table_ref())
.to_owned(), .to_owned(),
columns: Vec::new(), columns: Vec::new(),
model: PhantomData, model: PhantomData,

View File

@ -61,7 +61,7 @@ where
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::tests_cfg::{cake, filling, fruit}; use crate::tests_cfg::{cake, cake_filling, cake_filling_price, filling, fruit};
use crate::{ColumnTrait, DbBackend, EntityTrait, ModelTrait, QueryFilter, QueryTrait}; use crate::{ColumnTrait, DbBackend, EntityTrait, ModelTrait, QueryFilter, QueryTrait};
#[test] #[test]
@ -182,4 +182,42 @@ mod tests {
.join(" ") .join(" ")
); );
} }
#[test]
fn join_8() {
use crate::{Related, Select};
let find_cake_filling_price: Select<cake_filling_price::Entity> =
cake_filling::Entity::find_related();
assert_eq!(
find_cake_filling_price.build(DbBackend::Postgres).to_string(),
[
r#"SELECT "cake_filling_price"."cake_id", "cake_filling_price"."filling_id", "cake_filling_price"."price""#,
r#"FROM "public"."cake_filling_price""#,
r#"INNER JOIN "cake_filling" ON"#,
r#"("cake_filling"."cake_id" = "cake_filling_price"."cake_id") AND"#,
r#"("cake_filling"."filling_id" = "cake_filling_price"."filling_id")"#,
]
.join(" ")
);
}
#[test]
fn join_9() {
use crate::{Related, Select};
let find_cake_filling: Select<cake_filling::Entity> =
cake_filling_price::Entity::find_related();
assert_eq!(
find_cake_filling.build(DbBackend::Postgres).to_string(),
[
r#"SELECT "cake_filling"."cake_id", "cake_filling"."filling_id""#,
r#"FROM "cake_filling""#,
r#"INNER JOIN "public"."cake_filling_price" ON"#,
r#"("cake_filling_price"."cake_id" = "cake_filling"."cake_id") AND"#,
r#"("cake_filling_price"."filling_id" = "cake_filling"."filling_id")"#,
]
.join(" ")
);
}
} }

View File

@ -140,11 +140,11 @@ impl FromQueryResult for JsonValue {
#[cfg(feature = "mock")] #[cfg(feature = "mock")]
mod tests { mod tests {
use crate::tests_cfg::cake; use crate::tests_cfg::cake;
use crate::{entity::*, DbBackend, MockDatabase}; use crate::{entity::*, DbBackend, DbErr, MockDatabase};
use sea_query::Value; use sea_query::Value;
#[async_std::test] #[smol_potat::test]
async fn to_json_1() { async fn to_json_1() -> Result<(), DbErr> {
let db = MockDatabase::new(DbBackend::Postgres) let db = MockDatabase::new(DbBackend::Postgres)
.append_query_results(vec![vec![maplit::btreemap! { .append_query_results(vec![vec![maplit::btreemap! {
"id" => Into::<Value>::into(128), "name" => Into::<Value>::into("apple") "id" => Into::<Value>::into(128), "name" => Into::<Value>::into("apple")
@ -158,5 +158,7 @@ mod tests {
"name": "apple" "name": "apple"
})) }))
); );
Ok(())
} }
} }

View File

@ -2,7 +2,7 @@ use crate::{ColumnTrait, EntityTrait, Iterable, QueryFilter, QueryOrder, QuerySe
use core::fmt::Debug; use core::fmt::Debug;
use core::marker::PhantomData; use core::marker::PhantomData;
pub use sea_query::JoinType; pub use sea_query::JoinType;
use sea_query::{DynIden, IntoColumnRef, IntoIden, SeaRc, SelectStatement, SimpleExpr}; use sea_query::{DynIden, IntoColumnRef, SeaRc, SelectStatement, SimpleExpr};
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct Select<E> pub struct Select<E>
@ -119,7 +119,7 @@ where
} }
fn prepare_from(mut self) -> Self { fn prepare_from(mut self) -> Self {
self.query.from(E::default().into_iden()); self.query.from(E::default().table_ref());
self self
} }
} }

View File

@ -49,7 +49,7 @@ impl Update {
{ {
let myself = UpdateOne { let myself = UpdateOne {
query: UpdateStatement::new() query: UpdateStatement::new()
.table(A::Entity::default().into_iden()) .table(A::Entity::default().table_ref())
.to_owned(), .to_owned(),
model, model,
}; };
@ -75,7 +75,7 @@ impl Update {
E: EntityTrait, E: EntityTrait,
{ {
UpdateMany { UpdateMany {
query: UpdateStatement::new().table(entity.into_iden()).to_owned(), query: UpdateStatement::new().table(entity.table_ref()).to_owned(),
entity: PhantomData, entity: PhantomData,
} }
} }
@ -232,7 +232,7 @@ mod tests {
fn update_4() { fn update_4() {
assert_eq!( assert_eq!(
Update::many(fruit::Entity) Update::many(fruit::Entity)
.col_expr(fruit::Column::CakeId, Expr::value(Value::Null)) .col_expr(fruit::Column::CakeId, Expr::value(Value::Int(None)))
.filter(fruit::Column::Id.eq(2)) .filter(fruit::Column::Id.eq(2))
.build(DbBackend::Postgres) .build(DbBackend::Postgres)
.to_string(), .to_string(),

View File

@ -66,4 +66,10 @@ impl RelationTrait for Relation {
} }
} }
impl Related<super::cake_filling_price::Entity> for Entity {
fn to() -> RelationDef {
super::cake_filling_price::Relation::CakeFilling.def().rev()
}
}
impl ActiveModelBehavior for ActiveModel {} impl ActiveModelBehavior for ActiveModel {}

View File

@ -0,0 +1,80 @@
use crate as sea_orm;
use crate::entity::prelude::*;
#[derive(Copy, Clone, Default, Debug, DeriveEntity)]
pub struct Entity;
impl EntityName for Entity {
fn schema_name(&self) -> Option<&str> {
Some("public")
}
fn table_name(&self) -> &str {
"cake_filling_price"
}
}
#[derive(Clone, Debug, PartialEq, DeriveModel, DeriveActiveModel)]
pub struct Model {
pub cake_id: i32,
pub filling_id: i32,
pub price: Decimal,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
pub enum Column {
CakeId,
FillingId,
Price,
}
#[derive(Copy, Clone, Debug, EnumIter, DerivePrimaryKey)]
pub enum PrimaryKey {
CakeId,
FillingId,
}
impl PrimaryKeyTrait for PrimaryKey {
fn auto_increment() -> bool {
false
}
}
#[derive(Copy, Clone, Debug, EnumIter)]
pub enum Relation {
CakeFilling,
}
impl ColumnTrait for Column {
type EntityName = Entity;
fn def(&self) -> ColumnDef {
match self {
Self::CakeId => ColumnType::Integer.def(),
Self::FillingId => ColumnType::Integer.def(),
Self::Price => ColumnType::Decimal(None).def(),
}
}
}
impl RelationTrait for Relation {
fn def(&self) -> RelationDef {
match self {
Self::CakeFilling => Entity::belongs_to(super::cake_filling::Entity)
.from((Column::CakeId, Column::FillingId))
.to((
super::cake_filling::Column::CakeId,
super::cake_filling::Column::FillingId,
))
.into(),
}
}
}
impl Related<super::cake_filling::Entity> for Entity {
fn to() -> RelationDef {
Relation::CakeFilling.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@ -16,12 +16,25 @@ pub struct Model {
pub name: String, pub name: String,
} }
#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)] // If your column names are not in snake-case, derive `DeriveCustomColumn` here.
#[derive(Copy, Clone, Debug, EnumIter, DeriveCustomColumn)]
pub enum Column { pub enum Column {
Id, Id,
Name, Name,
} }
// Then, customize each column names here.
impl IdenStatic for Column {
fn as_str(&self) -> &str {
match self {
// Override column names
Self::Id => "id",
// Leave all other columns using default snake-case values
_ => self.default_as_str(),
}
}
}
#[derive(Copy, Clone, Debug, EnumIter, DerivePrimaryKey)] #[derive(Copy, Clone, Debug, EnumIter, DerivePrimaryKey)]
pub enum PrimaryKey { pub enum PrimaryKey {
Id, Id,

View File

@ -2,10 +2,12 @@
pub mod cake; pub mod cake;
pub mod cake_filling; pub mod cake_filling;
pub mod cake_filling_price;
pub mod filling; pub mod filling;
pub mod fruit; pub mod fruit;
pub use cake::Entity as Cake; pub use cake::Entity as Cake;
pub use cake_filling::Entity as CakeFilling; pub use cake_filling::Entity as CakeFilling;
pub use cake_filling_price::Entity as CakeFillingPrice;
pub use filling::Entity as Filling; pub use filling::Entity as Filling;
pub use fruit::Entity as Fruit; pub use fruit::Entity as Fruit;

View File

@ -1,10 +1,10 @@
pub mod common;
#[allow(unused_imports)] #[allow(unused_imports)]
use sea_orm::{entity::*, error::*, sea_query, tests_cfg::*, Database, DbConn}; use sea_orm::{entity::*, error::*, sea_query, tests_cfg::*, Database, DbConn};
// DATABASE_URL="sqlite::memory:" cargo test --features sqlx-sqlit,runtime-async-std --test basic // DATABASE_URL="sqlite::memory:" cargo test --features sqlx-sqlit,runtime-async-std --test basic
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(feature = "sqlx-sqlite")] #[cfg(feature = "sqlx-sqlite")]
async fn main() { async fn main() {
use std::env; use std::env;

View File

@ -1,7 +1,9 @@
pub mod setup;
use sea_orm::DatabaseConnection;
pub mod bakery_chain; pub mod bakery_chain;
pub mod runtime;
pub mod setup;
pub use bakery_chain::*; pub use bakery_chain::*;
use sea_orm::DatabaseConnection;
use std::env; use std::env;
pub struct TestContext { pub struct TestContext {

26
tests/common/runtime.rs Normal file
View File

@ -0,0 +1,26 @@
#[cfg(feature = "runtime-async-std")]
#[macro_export]
macro_rules! block_on {
($($expr:tt)*) => {
::async_std::task::block_on( $($expr)* )
};
}
#[cfg(feature = "runtime-actix")]
#[macro_export]
macro_rules! block_on {
($($expr:tt)*) => {
::actix_rt::System::new()
.block_on( $($expr)* )
};
}
#[cfg(feature = "runtime-tokio")]
#[macro_export]
macro_rules! block_on {
($($expr:tt)*) => {
::tokio::runtime::Runtime::new()
.unwrap()
.block_on( $($expr)* )
};
}

View File

@ -8,9 +8,7 @@ mod crud;
// Run the test locally: // Run the test locally:
// DATABASE_URL="mysql://root:root@localhost" cargo test --features sqlx-mysql,runtime-async-std --test crud_tests // DATABASE_URL="mysql://root:root@localhost" cargo test --features sqlx-mysql,runtime-async-std --test crud_tests
// DATABASE_URL="postgres://root:root@localhost" cargo test --features sqlx-postgres,runtime-async-std --test crud_tests // DATABASE_URL="postgres://root:root@localhost" cargo test --features sqlx-postgres,runtime-async-std --test crud_tests
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any( #[cfg(any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
feature = "sqlx-sqlite", feature = "sqlx-sqlite",

View File

@ -6,9 +6,7 @@ pub use common::{bakery_chain::*, setup::*, TestContext};
// Run the test locally: // Run the test locally:
// DATABASE_URL="mysql://root:@localhost" cargo test --features sqlx-mysql,runtime-async-std --test query_tests // DATABASE_URL="mysql://root:@localhost" cargo test --features sqlx-mysql,runtime-async-std --test query_tests
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any( #[cfg(any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
feature = "sqlx-sqlite", feature = "sqlx-sqlite",
@ -23,9 +21,7 @@ pub async fn find_one_with_no_result() {
ctx.delete().await; ctx.delete().await;
} }
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any( #[cfg(any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
feature = "sqlx-sqlite", feature = "sqlx-sqlite",
@ -50,9 +46,7 @@ pub async fn find_one_with_result() {
ctx.delete().await; ctx.delete().await;
} }
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any( #[cfg(any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
feature = "sqlx-sqlite", feature = "sqlx-sqlite",
@ -67,9 +61,7 @@ pub async fn find_by_id_with_no_result() {
ctx.delete().await; ctx.delete().await;
} }
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any( #[cfg(any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
feature = "sqlx-sqlite", feature = "sqlx-sqlite",
@ -98,9 +90,7 @@ pub async fn find_by_id_with_result() {
ctx.delete().await; ctx.delete().await;
} }
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any( #[cfg(any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
feature = "sqlx-sqlite", feature = "sqlx-sqlite",
@ -115,9 +105,7 @@ pub async fn find_all_with_no_result() {
ctx.delete().await; ctx.delete().await;
} }
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any( #[cfg(any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
feature = "sqlx-sqlite", feature = "sqlx-sqlite",
@ -151,9 +139,7 @@ pub async fn find_all_with_result() {
ctx.delete().await; ctx.delete().await;
} }
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any( #[cfg(any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
feature = "sqlx-sqlite", feature = "sqlx-sqlite",
@ -191,9 +177,7 @@ pub async fn find_all_filter_no_result() {
ctx.delete().await; ctx.delete().await;
} }
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any( #[cfg(any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
feature = "sqlx-sqlite", feature = "sqlx-sqlite",

View File

@ -8,9 +8,7 @@ pub use common::{bakery_chain::*, setup::*, TestContext};
// Run the test locally: // Run the test locally:
// DATABASE_URL="mysql://root:@localhost" cargo test --features sqlx-mysql,runtime-async-std --test relational_tests // DATABASE_URL="mysql://root:@localhost" cargo test --features sqlx-mysql,runtime-async-std --test relational_tests
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any( #[cfg(any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
feature = "sqlx-sqlite", feature = "sqlx-sqlite",
@ -91,9 +89,7 @@ pub async fn left_join() {
ctx.delete().await; ctx.delete().await;
} }
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any(feature = "sqlx-mysql", feature = "sqlx-postgres"))] #[cfg(any(feature = "sqlx-mysql", feature = "sqlx-postgres"))]
pub async fn right_join() { pub async fn right_join() {
let ctx = TestContext::new("test_right_join").await; let ctx = TestContext::new("test_right_join").await;
@ -174,9 +170,7 @@ pub async fn right_join() {
ctx.delete().await; ctx.delete().await;
} }
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any( #[cfg(any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
feature = "sqlx-sqlite", feature = "sqlx-sqlite",
@ -265,9 +259,7 @@ pub async fn inner_join() {
ctx.delete().await; ctx.delete().await;
} }
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any( #[cfg(any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
feature = "sqlx-sqlite", feature = "sqlx-sqlite",
@ -371,9 +363,7 @@ pub async fn group_by() {
ctx.delete().await; ctx.delete().await;
} }
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any( #[cfg(any(
feature = "sqlx-mysql", feature = "sqlx-mysql",
feature = "sqlx-sqlite", feature = "sqlx-sqlite",

View File

@ -9,9 +9,7 @@ pub use common::{bakery_chain::*, setup::*, TestContext};
// Run the test locally: // Run the test locally:
// DATABASE_URL="mysql://root:@localhost" cargo test --features sqlx-mysql,runtime-async-std --test sequential_op_tests // DATABASE_URL="mysql://root:@localhost" cargo test --features sqlx-mysql,runtime-async-std --test sequential_op_tests
#[cfg_attr(feature = "runtime-async-std", async_std::test)] #[sea_orm_macros::test]
#[cfg_attr(feature = "runtime-actix", actix_rt::test)]
#[cfg_attr(feature = "runtime-tokio", tokio::test)]
#[cfg(any(feature = "sqlx-mysql", feature = "sqlx-postgres"))] #[cfg(any(feature = "sqlx-mysql", feature = "sqlx-postgres"))]
pub async fn test_multiple_operations() { pub async fn test_multiple_operations() {
let ctx = TestContext::new("multiple_sequential_operations").await; let ctx = TestContext::new("multiple_sequential_operations").await;