GraphQL based Admin Dashboard with Loco and Seaography (#2297)

* backend

* frontend

* frontend

* README

* CI

* fmt

* docs

* edit

* rename

* Bump seaography to 1.0

* Cargo.lock
This commit is contained in:
Billy Chan 2024-08-09 11:13:45 +08:00 committed by GitHub
parent dbe9e8f586
commit c2fc62a903
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
84 changed files with 18201 additions and 1125 deletions

View File

@ -243,6 +243,7 @@ jobs:
examples/loco_seaography,
examples/poem_example,
examples/proxy_gluesql_example,
examples/react_admin,
examples/rocket_example,
examples/rocket_okapi_example,
examples/salvo_example,

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -26,7 +26,7 @@ include_dir = "0.7"
uuid = { version = "1.6.0", features = ["v4"] }
tracing-subscriber = { version = "0.3.17", features = ["env-filter", "json"] }
seaography = { version = "1.0.0-rc.4", features = ["with-decimal", "with-chrono"] }
seaography = { version = "1.0.0", features = ["with-decimal", "with-chrono"] }
async-graphql = { version = "7.0", features = ["decimal", "chrono", "dataloader", "dynamic-schema"] }
async-graphql-axum = { version = "7.0" }
lazy_static = { version = "1.4" }

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,11 @@
# GraphQL based Admin Dashboard with Loco and Seaography
In this tutorial, we would develop a GraphQL based admin dashboard with [Seaography](https://github.com/SeaQL/seaography) and Loco.
Read The full tutorial [here](https://www.sea-ql.org/blog/2024-08-01-graphql-admin-dashboard-with-loco-seaography/).
Read our first and second tutorial of the series, [Getting Started with Loco & SeaORM](https://www.sea-ql.org/blog/2024-05-28-getting-started-with-loco-seaorm/), [Adding GraphQL Support to Loco with Seaography](https://www.sea-ql.org/blog/2024-07-01-graphql-support-with-loco-seaography/), if you haven't.
![Screenshot List](Screenshot-List.png)
![Screenshot View](Screenshot-View.png)

Binary file not shown.

After

Width:  |  Height:  |  Size: 817 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 806 KiB

View File

@ -0,0 +1,3 @@
[alias]
loco = "run --"
playground = "run --example playground"

View File

@ -0,0 +1,6 @@
POSTGRES_DB=loco_app
POSTGRES_USER=loco
POSTGRES_PASSWORD=loco
DATABASE_URL=postgres://loco:loco@db:5432/loco_app
REDIS_URL=redis://redis:6379
MAILER_HOST=mailer

View File

@ -0,0 +1,8 @@
FROM mcr.microsoft.com/devcontainers/rust:1
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install --no-install-recommends postgresql-client \
&& cargo install sea-orm-cli cargo-insta \
&& chown -R vscode /usr/local/cargo
COPY .env /.env

View File

@ -0,0 +1,9 @@
{
"name": "Loco",
"dockerComposeFile": "docker-compose.yml",
"service": "app",
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
"forwardPorts": [
3000
]
}

View File

@ -0,0 +1,50 @@
version: "3"
services:
app:
build:
context: .
dockerfile: Dockerfile
command: sleep infinity
networks:
- db
- redis
- mailer
volumes:
- ../..:/workspaces:cached
env_file:
- .env
db:
image: postgres:15.3-alpine
restart: unless-stopped
ports:
- 5432:5432
networks:
- db
volumes:
- postgres-data:/var/lib/postgresql/data
env_file:
- .env
redis:
image: redis:latest
restart: unless-stopped
ports:
- 6379:6379
networks:
- redis
mailer:
image: mailtutan/mailtutan:latest
restart: unless-stopped
ports:
- 1080:1080
- 1025:1025
networks:
- mailer
volumes:
postgres-data:
networks:
db:
redis:
mailer:

View File

@ -0,0 +1,107 @@
name: CI
on:
push:
branches:
- master
- main
pull_request:
env:
RUST_TOOLCHAIN: stable
TOOLCHAIN_PROFILE: minimal
jobs:
rustfmt:
name: Check Style
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout the code
uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
with:
profile: ${{ env.TOOLCHAIN_PROFILE }}
toolchain: ${{ env.RUST_TOOLCHAIN }}
override: true
components: rustfmt
- name: Run cargo fmt
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
clippy:
name: Run Clippy
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout the code
uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
with:
profile: ${{ env.TOOLCHAIN_PROFILE }}
toolchain: ${{ env.RUST_TOOLCHAIN }}
override: true
- name: Setup Rust cache
uses: Swatinem/rust-cache@v2
- name: Run cargo clippy
uses: actions-rs/cargo@v1
with:
command: clippy
args: --all-features -- -D warnings -W clippy::pedantic -W clippy::nursery -W rust-2018-idioms
test:
name: Run Tests
runs-on: ubuntu-latest
permissions:
contents: read
services:
redis:
image: redis
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- "6379:6379"
postgres:
image: postgres
env:
POSTGRES_DB: postgres_test
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
ports:
- "5432:5432"
# Set health checks to wait until postgres has started
options: --health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout the code
uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
with:
profile: ${{ env.TOOLCHAIN_PROFILE }}
toolchain: ${{ env.RUST_TOOLCHAIN }}
override: true
- name: Setup Rust cache
uses: Swatinem/rust-cache@v2
- name: Run cargo test
uses: actions-rs/cargo@v1
with:
command: test
args: --all-features --all
env:
REDIS_URL: redis://localhost:${{job.services.redis.ports[6379]}}
DATABASE_URL: postgres://postgres:postgres@localhost:5432/postgres_test

19
examples/react_admin/backend/.gitignore vendored Normal file
View File

@ -0,0 +1,19 @@
**/config/local.yaml
**/config/*.local.yaml
**/config/production.yaml
# Generated by Cargo
# will have compiled files and executables
debug/
target/
# include cargo lock
!Cargo.lock
# These are backup files generated by rustfmt
**/*.rs.bk
# MSVC Windows builds of rustc generate these, which store debugging information
*.pdb
uploads

View File

@ -0,0 +1,7 @@
max_width = 100
comment_width = 80
wrap_comments = true
imports_granularity = "Crate"
use_small_heuristics = "Default"
group_imports = "StdExternalCrate"
format_strings = true

5528
examples/react_admin/backend/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,58 @@
[workspace]
[package]
name = "loco_seaography"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
loco-rs = { version = "0.4.0" }
migration = { path = "migration" }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
eyre = "0.6"
tokio = { version = "1.33.0", default-features = false }
tokio-util = "0.7.11"
async-trait = "0.1.74"
tracing = "0.1.40"
chrono = "0.4"
validator = { version = "0.16" }
axum = { version = "0.7.1", features = ["multipart"] }
include_dir = "0.7"
uuid = { version = "1.6.0", features = ["v4"] }
tracing-subscriber = { version = "0.3.17", features = ["env-filter", "json"] }
seaography = { version = "1.0.0", features = ["with-decimal", "with-chrono"] }
async-graphql = { version = "7.0", features = ["decimal", "chrono", "dataloader", "dynamic-schema"] }
async-graphql-axum = { version = "7.0" }
lazy_static = { version = "1.4" }
tower-service = { version = "0.3" }
[dependencies.sea-orm]
path = "../../../" # remove this line in your own project
version = "1.0.0" # sea-orm version
features = [
"sqlx-sqlite",
"sqlx-postgres",
"runtime-tokio-rustls",
"macros",
]
[[bin]]
name = "loco_seaography-cli"
path = "src/bin/main.rs"
required-features = []
[dev-dependencies]
serial_test = "2.0.0"
rstest = "0.18.2"
loco-rs = { version = "0.4.0", features = ["testing"] }
insta = { version = "1.34.0", features = ["redactions", "yaml", "filters"] }
[patch.crates-io]
sea-orm = { path = "../../../" }
sea-orm-migration = { path = "../../../sea-orm-migration" }

View File

@ -0,0 +1,129 @@
# Loco configuration file documentation
# Application logging configuration
logger:
# Enable or disable logging.
enable: true
# Enable pretty backtrace (sets RUST_BACKTRACE=1)
pretty_backtrace: true
# Log level, options: trace, debug, info, warn or error.
level: debug
# Define the logging format. options: compact, pretty or Json
format: compact
# By default the logger has filtering only logs that came from your code or logs that came from `loco` framework. to see all third party libraries
# Uncomment the line below to override to see all third party libraries you can enable this config and override the logger filters.
# override_filter: trace
# Web server configuration
server:
# Port on which the server will listen. the server binding is 0.0.0.0:{PORT}
port: 3000
# The UI hostname or IP address that mailers will point to.
host: http://localhost
# Out of the box middleware configuration. to disable middleware you can changed the `enable` field to `false` of comment the middleware block
middlewares:
# Enable Etag cache header middleware
etag:
enable: true
# Allows to limit the payload size request. payload that bigger than this file will blocked the request.
limit_payload:
# Enable/Disable the middleware.
enable: true
# the limit size. can be b,kb,kib,mb,mib,gb,gib
body_limit: 5mb
# Generating a unique request ID and enhancing logging with additional information such as the start and completion of request processing, latency, status code, and other request details.
logger:
# Enable/Disable the middleware.
enable: true
# when your code is panicked, the request still returns 500 status code.
catch_panic:
# Enable/Disable the middleware.
enable: true
# Timeout for incoming requests middleware. requests that take more time from the configuration will cute and 408 status code will returned.
timeout_request:
# Enable/Disable the middleware.
enable: false
# Duration time in milliseconds.
timeout: 5000
cors:
enable: true
# Set the value of the [`Access-Control-Allow-Origin`][mdn] header
# allow_origins:
# - https://loco.rs
# Set the value of the [`Access-Control-Allow-Headers`][mdn] header
# allow_headers:
# - Content-Type
# Set the value of the [`Access-Control-Allow-Methods`][mdn] header
# allow_methods:
# - POST
# Set the value of the [`Access-Control-Max-Age`][mdn] header in seconds
# max_age: 3600
# Worker Configuration
workers:
# specifies the worker mode. Options:
# - BackgroundQueue - Workers operate asynchronously in the background, processing queued.
# - ForegroundBlocking - Workers operate in the foreground and block until tasks are completed.
# - BackgroundAsync - Workers operate asynchronously in the background, processing tasks with async capabilities.
mode: BackgroundQueue
# Mailer Configuration.
mailer:
# SMTP mailer configuration.
smtp:
# Enable/Disable smtp mailer.
enable: true
# SMTP server host. e.x localhost, smtp.gmail.com
host: {{ get_env(name="MAILER_HOST", default="localhost") }}
# SMTP server port
port: 1025
# Use secure connection (SSL/TLS).
secure: false
# auth:
# user:
# password:
# Initializers Configuration
# initializers:
# oauth2:
# authorization_code: # Authorization code grant type
# - client_identifier: google # Identifier for the OAuth2 provider. Replace 'google' with your provider's name if different, must be unique within the oauth2 config.
# ... other fields
# Database Configuration
database:
# Database connection URI
uri: {{ get_env(name="DATABASE_URL", default="postgres://loco:loco@localhost:5432/loco_seaography_development") }}
# When enabled, the sql query will be logged.
enable_logging: false
# Set the timeout duration when acquiring a connection.
connect_timeout: 500
# Set the idle duration before closing a connection.
idle_timeout: 500
# Minimum number of connections for a pool.
min_connections: 1
# Maximum number of connections for a pool.
max_connections: 1
# Run migration up when application loaded
auto_migrate: true
# Truncate database when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_truncate: false
# Recreating schema when application loaded. This is a dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_recreate: false
# Redis Configuration
redis:
# Redis connection URI
uri: {{ get_env(name="REDIS_URL", default="redis://127.0.0.1") }}
# Dangerously flush all data in Redis on startup. dangerous operation, make sure that you using this flag only on dev environments or test mode
dangerously_flush: false
# Authentication Configuration
auth:
# JWT authentication
jwt:
# Secret key for token generation and verification
secret: pByQUgg4GmXKAqQQvAGo
# Token expiration time in seconds
expiration: 604800 # 7 days

View File

@ -0,0 +1,22 @@
use eyre::Context;
#[allow(unused_imports)]
use loco_rs::{cli::playground, prelude::*};
use loco_seaography::app::App;
#[tokio::main]
async fn main() -> eyre::Result<()> {
let _ctx = playground::<App>().await.context("playground")?;
// let active_model: articles::ActiveModel = ActiveModel {
// title: Set(Some("how to build apps in 3 steps".to_string())),
// content: Set(Some("use Loco: https://loco.rs".to_string())),
// ..Default::default()
// };
// active_model.insert(&ctx.db).await.unwrap();
// let res = articles::Entity::find().all(&ctx.db).await.unwrap();
// println!("{:?}", res);
println!("welcome to playground. edit me at `examples/playground.rs`");
Ok(())
}

View File

@ -0,0 +1,23 @@
[package]
name = "migration"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
name = "migration"
path = "src/lib.rs"
[dependencies]
async-std = { version = "1", features = ["attributes", "tokio1"] }
loco-rs = { version = "0.4.0" }
[dependencies.sea-orm-migration]
path = "../../../../sea-orm-migration" # remove this line in your own project
version = "1.0.0" # sea-orm-migration version
features = [
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
# e.g.
"runtime-tokio-rustls", # `ASYNC_RUNTIME` feature
]

View File

@ -0,0 +1,41 @@
# Running Migrator CLI
- Generate a new migration file
```sh
cargo run -- migrate generate MIGRATION_NAME
```
- Apply all pending migrations
```sh
cargo run
```
```sh
cargo run -- up
```
- Apply first 10 pending migrations
```sh
cargo run -- up -n 10
```
- Rollback last applied migrations
```sh
cargo run -- down
```
- Rollback last 10 applied migrations
```sh
cargo run -- down -n 10
```
- Drop all tables from the database, then reapply all migrations
```sh
cargo run -- fresh
```
- Rollback all applied migrations, then reapply all migrations
```sh
cargo run -- refresh
```
- Rollback all applied migrations
```sh
cargo run -- reset
```
- Check the status of all migrations
```sh
cargo run -- status
```

View File

@ -0,0 +1,20 @@
#![allow(elided_lifetimes_in_paths)]
#![allow(clippy::wildcard_imports)]
pub use sea_orm_migration::prelude::*;
mod m20220101_000001_users;
mod m20231103_114510_notes;
mod m20240520_173001_files;
pub struct Migrator;
#[async_trait::async_trait]
impl MigratorTrait for Migrator {
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
vec![
Box::new(m20220101_000001_users::Migration),
Box::new(m20231103_114510_notes::Migration),
Box::new(m20240520_173001_files::Migration),
]
}
}

View File

@ -0,0 +1,47 @@
use sea_orm_migration::{prelude::*, schema::*};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let table = table_auto(Users::Table)
.col(pk_auto(Users::Id))
.col(uuid(Users::Pid))
.col(string_uniq(Users::Email))
.col(string(Users::Password))
.col(string(Users::ApiKey).unique_key())
.col(string(Users::Name))
.col(string_null(Users::ResetToken))
.col(timestamp_null(Users::ResetSentAt))
.col(string_null(Users::EmailVerificationToken))
.col(timestamp_null(Users::EmailVerificationSentAt))
.col(timestamp_null(Users::EmailVerifiedAt))
.to_owned();
manager.create_table(table).await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Users::Table).to_owned())
.await
}
}
#[derive(Iden)]
pub enum Users {
Table,
Id,
Pid,
Email,
Name,
Password,
ApiKey,
ResetToken,
ResetSentAt,
EmailVerificationToken,
EmailVerificationSentAt,
EmailVerifiedAt,
}

View File

@ -0,0 +1,33 @@
use sea_orm_migration::{prelude::*, schema::*};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
table_auto(Notes::Table)
.col(pk_auto(Notes::Id))
.col(string_null(Notes::Title))
.col(string_null(Notes::Content))
.to_owned(),
)
.await
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Notes::Table).to_owned())
.await
}
}
#[derive(DeriveIden)]
pub enum Notes {
Table,
Id,
Title,
Content,
}

View File

@ -0,0 +1,41 @@
use sea_orm_migration::{prelude::*, schema::*};
use super::m20231103_114510_notes::Notes;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
table_auto(Files::Table)
.col(pk_auto(Files::Id))
.col(integer(Files::NotesId))
.col(string(Files::FilePath))
.foreign_key(
ForeignKey::create()
.name("FK_files_notes_id")
.from(Files::Table, Files::NotesId)
.to(Notes::Table, Notes::Id),
)
.to_owned(),
)
.await
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Files::Table).to_owned())
.await
}
}
#[derive(DeriveIden)]
pub enum Files {
Table,
Id,
NotesId,
FilePath,
}

View File

@ -0,0 +1,6 @@
use sea_orm_migration::prelude::*;
#[async_std::main]
async fn main() {
cli::run_cli(migration::Migrator).await;
}

View File

@ -0,0 +1,74 @@
use std::path::Path;
use async_trait::async_trait;
use loco_rs::{
app::{AppContext, Hooks},
boot::{create_app, BootResult, StartMode},
controller::AppRoutes,
db::{self, truncate_table},
environment::Environment,
task::Tasks,
worker::{AppWorker, Processor},
Result,
};
use migration::Migrator;
use sea_orm::DatabaseConnection;
use crate::{
controllers,
models::_entities::{notes, users},
tasks,
workers::downloader::DownloadWorker,
};
pub struct App;
#[async_trait]
impl Hooks for App {
fn app_name() -> &'static str {
env!("CARGO_CRATE_NAME")
}
fn app_version() -> String {
format!(
"{} ({})",
env!("CARGO_PKG_VERSION"),
option_env!("BUILD_SHA")
.or(option_env!("GITHUB_SHA"))
.unwrap_or("dev")
)
}
async fn boot(mode: StartMode, environment: &Environment) -> Result<BootResult> {
create_app::<Self, Migrator>(mode, environment).await
}
fn routes(_ctx: &AppContext) -> AppRoutes {
AppRoutes::with_default_routes()
.prefix("/api")
.add_route(controllers::notes::routes())
.add_route(controllers::auth::routes())
.add_route(controllers::user::routes())
.add_route(controllers::files::routes())
.add_route(controllers::graphql::routes())
}
fn connect_workers<'a>(p: &'a mut Processor, ctx: &'a AppContext) {
p.register(DownloadWorker::build(ctx));
}
fn register_tasks(tasks: &mut Tasks) {
tasks.register(tasks::seed::SeedData);
}
async fn truncate(db: &DatabaseConnection) -> Result<()> {
truncate_table(db, users::Entity).await?;
truncate_table(db, notes::Entity).await?;
Ok(())
}
async fn seed(db: &DatabaseConnection, base: &Path) -> Result<()> {
db::seed::<users::ActiveModel>(db, &base.join("users.yaml").display().to_string()).await?;
db::seed::<notes::ActiveModel>(db, &base.join("notes.yaml").display().to_string()).await?;
Ok(())
}
}

View File

@ -0,0 +1,8 @@
use loco_rs::cli;
use loco_seaography::app::App;
use migration::Migrator;
#[tokio::main]
async fn main() -> eyre::Result<()> {
cli::main::<App, Migrator>().await
}

View File

@ -0,0 +1,156 @@
use axum::debug_handler;
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use crate::{
mailers::auth::AuthMailer,
models::{
_entities::users,
users::{LoginParams, RegisterParams},
},
views::auth::LoginResponse,
};
#[derive(Debug, Deserialize, Serialize)]
pub struct VerifyParams {
pub token: String,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct ForgotParams {
pub email: String,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct ResetParams {
pub token: String,
pub password: String,
}
/// Register function creates a new user with the given parameters and sends a
/// welcome email to the user
#[debug_handler]
async fn register(
State(ctx): State<AppContext>,
Json(params): Json<RegisterParams>,
) -> Result<Response> {
let res = users::Model::create_with_password(&ctx.db, &params).await;
let user = match res {
Ok(user) => user,
Err(err) => {
tracing::info!(
message = err.to_string(),
user_email = &params.email,
"could not register user",
);
return format::json(());
}
};
// Skip email verification, all new registrations are considered verified
let _user = user.into_active_model().verified(&ctx.db).await?;
// Skip sending verification email as we don't have a mail server
/*
let user = user
.into_active_model()
.set_email_verification_sent(&ctx.db)
.await?;
AuthMailer::send_welcome(&ctx, &user).await?;
*/
format::json(())
}
/// Verify register user. if the user not verified his email, he can't login to
/// the system.
#[debug_handler]
async fn verify(
State(ctx): State<AppContext>,
Json(params): Json<VerifyParams>,
) -> Result<Response> {
let user = users::Model::find_by_verification_token(&ctx.db, &params.token).await?;
if user.email_verified_at.is_some() {
tracing::info!(pid = user.pid.to_string(), "user already verified");
} else {
let active_model = user.into_active_model();
let user = active_model.verified(&ctx.db).await?;
tracing::info!(pid = user.pid.to_string(), "user verified");
}
format::json(())
}
/// In case the user forgot his password this endpoints generate a forgot token
/// and send email to the user. In case the email not found in our DB, we are
/// returning a valid request for for security reasons (not exposing users DB
/// list).
#[debug_handler]
async fn forgot(
State(ctx): State<AppContext>,
Json(params): Json<ForgotParams>,
) -> Result<Response> {
let Ok(user) = users::Model::find_by_email(&ctx.db, &params.email).await else {
// we don't want to expose our users email. if the email is invalid we still
// returning success to the caller
return format::json(());
};
let user = user
.into_active_model()
.set_forgot_password_sent(&ctx.db)
.await?;
AuthMailer::forgot_password(&ctx, &user).await?;
format::json(())
}
/// reset user password by the given parameters
#[debug_handler]
async fn reset(State(ctx): State<AppContext>, Json(params): Json<ResetParams>) -> Result<Response> {
let Ok(user) = users::Model::find_by_reset_token(&ctx.db, &params.token).await else {
// we don't want to expose our users email. if the email is invalid we still
// returning success to the caller
tracing::info!("reset token not found");
return format::json(());
};
user.into_active_model()
.reset_password(&ctx.db, &params.password)
.await?;
format::json(())
}
/// Creates a user login and returns a token
#[debug_handler]
async fn login(State(ctx): State<AppContext>, Json(params): Json<LoginParams>) -> Result<Response> {
let user = users::Model::find_by_email(&ctx.db, &params.email).await?;
let valid = user.verify_password(&params.password);
if !valid {
return unauthorized("unauthorized!");
}
let jwt_secret = ctx.config.get_jwt_config()?;
let token = user
.generate_jwt(&jwt_secret.secret, &jwt_secret.expiration)
.or_else(|_| unauthorized("unauthorized!"))?;
format::json(LoginResponse::new(&user, &token))
}
pub fn routes() -> Routes {
Routes::new()
.prefix("auth")
.add("/register", post(register))
.add("/verify", post(verify))
.add("/login", post(login))
.add("/forgot", post(forgot))
.add("/reset", post(reset))
}

View File

@ -0,0 +1,126 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::unnecessary_struct_initialization)]
#![allow(clippy::unused_async)]
use std::path::PathBuf;
use axum::{body::Body, debug_handler, extract::Multipart};
use loco_rs::prelude::*;
use sea_orm::QueryOrder;
use tokio::{fs, io::AsyncWriteExt};
use tokio_util::io::ReaderStream;
use crate::models::_entities::files;
const UPLOAD_DIR: &str = "./uploads";
#[debug_handler]
pub async fn upload(
_auth: auth::JWT,
Path(notes_id): Path<i32>,
State(ctx): State<AppContext>,
mut multipart: Multipart,
) -> Result<Response> {
// Collect all uploaded files
let mut files = Vec::new();
// Iterate all files in the POST body
while let Some(field) = multipart.next_field().await.map_err(|err| {
tracing::error!(error = ?err,"could not readd multipart");
Error::BadRequest("could not readd multipart".into())
})? {
// Get the file name
let file_name = match field.file_name() {
Some(file_name) => file_name.to_string(),
_ => return Err(Error::BadRequest("file name not found".into())),
};
// Get the file content as bytes
let content = field.bytes().await.map_err(|err| {
tracing::error!(error = ?err,"could not readd bytes");
Error::BadRequest("could not readd bytes".into())
})?;
// Create a folder to store the uploaded file
let now = chrono::offset::Local::now()
.format("%Y%m%d_%H%M%S")
.to_string();
let uuid = uuid::Uuid::new_v4().to_string();
let folder = format!("{now}_{uuid}");
let upload_folder = PathBuf::from(UPLOAD_DIR).join(&folder);
fs::create_dir_all(&upload_folder).await?;
// Write the file into the newly created folder
let path = upload_folder.join(file_name);
let mut f = fs::OpenOptions::new()
.create_new(true)
.write(true)
.open(&path)
.await?;
f.write_all(&content).await?;
f.flush().await?;
// Record the file upload in database
let file = files::ActiveModel {
notes_id: ActiveValue::Set(notes_id),
file_path: ActiveValue::Set(
path.strip_prefix(UPLOAD_DIR)
.unwrap()
.to_str()
.unwrap()
.to_string(),
),
..Default::default()
}
.insert(&ctx.db)
.await?;
files.push(file);
}
format::json(files)
}
#[debug_handler]
pub async fn list(
_auth: auth::JWT,
Path(notes_id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
// Fetch all files uploaded for a specific notes
let files = files::Entity::find()
.filter(files::Column::NotesId.eq(notes_id))
.order_by_asc(files::Column::Id)
.all(&ctx.db)
.await?;
format::json(files)
}
#[debug_handler]
pub async fn view(
_auth: auth::JWT,
Path(files_id): Path<i32>,
State(ctx): State<AppContext>,
) -> Result<Response> {
// Fetch the file info from database
let file = files::Entity::find_by_id(files_id)
.one(&ctx.db)
.await?
.expect("File not found");
// Stream the file
let file = fs::File::open(format!("{UPLOAD_DIR}/{}", file.file_path)).await?;
let stream = ReaderStream::new(file);
let body = Body::from_stream(stream);
Ok(format::render().response().body(body)?)
}
pub fn routes() -> Routes {
// Bind the routes
Routes::new()
.prefix("files")
.add("/upload/:notes_id", post(upload))
.add("/list/:notes_id", get(list))
.add("/view/:files_id", get(view))
}

View File

@ -0,0 +1,39 @@
use async_graphql::http::{playground_source, GraphQLPlaygroundConfig};
use axum::{body::Body, extract::Request};
use loco_rs::prelude::*;
use tower_service::Service;
use crate::graphql::query_root;
// GraphQL playground UI
async fn graphql_playground() -> Result<Response> {
// The `GraphQLPlaygroundConfig` take one parameter
// which is the URL of the GraphQL handler: `/api/graphql`
let res = playground_source(GraphQLPlaygroundConfig::new("/api/graphql"));
Ok(Response::new(res.into()))
}
async fn graphql_handler(State(ctx): State<AppContext>, req: Request<Body>) -> Result<Response> {
const DEPTH: usize = 1_000;
const COMPLEXITY: usize = 1_000;
// Construct the the GraphQL query root
let schema = query_root::schema(ctx.db.clone(), DEPTH, COMPLEXITY).unwrap();
// GraphQL handler
let mut graphql_handler = async_graphql_axum::GraphQL::new(schema);
// Execute GraphQL request and fetch the results
let res = graphql_handler.call(req).await.unwrap();
Ok(res)
}
pub fn routes() -> Routes {
// Define route
Routes::new()
// We put all GraphQL route behind `graphql` prefix
.prefix("graphql")
// GraphQL playground page is a GET request
.add("/", get(graphql_playground))
// GraphQL handler is a POST request
.add("/", post(graphql_handler))
}

View File

@ -0,0 +1,5 @@
pub mod auth;
pub mod files;
pub mod graphql;
pub mod notes;
pub mod user;

View File

@ -0,0 +1,75 @@
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::unnecessary_struct_initialization)]
#![allow(clippy::unused_async)]
use axum::debug_handler;
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use crate::models::_entities::notes::{ActiveModel, Entity, Model};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Params {
pub title: Option<String>,
pub content: Option<String>,
}
impl Params {
fn update(&self, item: &mut ActiveModel) {
item.title = Set(self.title.clone());
item.content = Set(self.content.clone());
}
}
async fn load_item(ctx: &AppContext, id: i32) -> Result<Model> {
let item = Entity::find_by_id(id).one(&ctx.db).await?;
item.ok_or_else(|| Error::NotFound)
}
#[debug_handler]
pub async fn list(State(ctx): State<AppContext>) -> Result<Response> {
format::json(Entity::find().all(&ctx.db).await?)
}
#[debug_handler]
pub async fn add(State(ctx): State<AppContext>, Json(params): Json<Params>) -> Result<Response> {
let mut item = ActiveModel {
..Default::default()
};
params.update(&mut item);
let item = item.insert(&ctx.db).await?;
format::json(item)
}
#[debug_handler]
pub async fn update(
Path(id): Path<i32>,
State(ctx): State<AppContext>,
Json(params): Json<Params>,
) -> Result<Response> {
let item = load_item(&ctx, id).await?;
let mut item = item.into_active_model();
params.update(&mut item);
let item = item.update(&ctx.db).await?;
format::json(item)
}
#[debug_handler]
pub async fn remove(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
load_item(&ctx, id).await?.delete(&ctx.db).await?;
format::empty()
}
#[debug_handler]
pub async fn get_one(Path(id): Path<i32>, State(ctx): State<AppContext>) -> Result<Response> {
format::json(load_item(&ctx, id).await?)
}
pub fn routes() -> Routes {
Routes::new()
.prefix("notes")
.add("/", get(list))
.add("/", post(add))
.add("/:id", get(get_one))
.add("/:id", delete(remove))
.add("/:id", post(update))
}

View File

@ -0,0 +1,14 @@
use axum::debug_handler;
use loco_rs::prelude::*;
use crate::{models::_entities::users, views::user::CurrentResponse};
#[debug_handler]
async fn current(auth: auth::JWT, State(ctx): State<AppContext>) -> Result<Response> {
let user = users::Model::find_by_pid(&ctx.db, &auth.claims.pid).await?;
format::json(CurrentResponse::new(&user))
}
pub fn routes() -> Routes {
Routes::new().prefix("user").add("/current", get(current))
}

View File

@ -0,0 +1,11 @@
---
- id: 1
title: Loco note 1
content: Loco note 1 content
created_at: "2023-11-12T12:34:56.789"
updated_at: "2023-11-12T12:34:56.789"
- id: 2
title: Loco note 2
content: Loco note 2 content
created_at: "2023-11-12T12:34:56.789"
updated_at: "2023-11-12T12:34:56.789"

View File

@ -0,0 +1,17 @@
---
- id: 1
pid: 11111111-1111-1111-1111-111111111111
email: user1@example.com
password: "$argon2id$v=19$m=19456,t=2,p=1$ETQBx4rTgNAZhSaeYZKOZg$eYTdH26CRT6nUJtacLDEboP0li6xUwUF/q5nSlQ8uuc"
api_key: lo-95ec80d7-cb60-4b70-9b4b-9ef74cb88758
name: user1
created_at: "2023-11-12T12:34:56.789"
updated_at: "2023-11-12T12:34:56.789"
- id: 2
pid: 22222222-2222-2222-2222-222222222222
email: user2@example.com
password: "$argon2id$v=19$m=19456,t=2,p=1$ETQBx4rTgNAZhSaeYZKOZg$eYTdH26CRT6nUJtacLDEboP0li6xUwUF/q5nSlQ8uuc"
api_key: lo-153561ca-fa84-4e1b-813a-c62526d0a77e
name: user2
created_at: "2023-11-12T12:34:56.789"
updated_at: "2023-11-12T12:34:56.789"

View File

@ -0,0 +1 @@
pub mod query_root;

View File

@ -0,0 +1,31 @@
use async_graphql::dynamic::*;
use sea_orm::DatabaseConnection;
use seaography::{Builder, BuilderContext};
use crate::models::_entities::*;
lazy_static::lazy_static! { static ref CONTEXT: BuilderContext = BuilderContext::default(); }
pub fn schema(
database: DatabaseConnection,
depth: usize,
complexity: usize,
) -> Result<Schema, SchemaError> {
// Builder of Seaography query root
let mut builder = Builder::new(&CONTEXT, database.clone());
// Register SeaORM entities
seaography::register_entities!(
builder,
// List all models we want to include in the GraphQL endpoint here
[files, notes, users]
);
// Configure async GraphQL limits
let schema = builder
.schema_builder()
// The depth is the number of nesting levels of the field
.limit_depth(depth)
// The complexity is the number of fields in the query
.limit_complexity(complexity);
// Finish up with including SeaORM database connection
schema.data(database).finish()
}

View File

@ -0,0 +1,8 @@
pub mod app;
pub mod controllers;
pub mod graphql;
pub mod mailers;
pub mod models;
pub mod tasks;
pub mod views;
pub mod workers;

View File

@ -0,0 +1,65 @@
// auth mailer
#![allow(non_upper_case_globals)]
use loco_rs::prelude::*;
use serde_json::json;
use crate::models::users;
static welcome: Dir<'_> = include_dir!("src/mailers/auth/welcome");
static forgot: Dir<'_> = include_dir!("src/mailers/auth/forgot");
// #[derive(Mailer)] // -- disabled for faster build speed. it works. but lets
// move on for now.
#[allow(clippy::module_name_repetitions)]
pub struct AuthMailer {}
impl Mailer for AuthMailer {}
impl AuthMailer {
/// Sending welcome email the the given user
///
/// # Errors
///
/// When email sending is failed
pub async fn send_welcome(ctx: &AppContext, user: &users::Model) -> Result<()> {
Self::mail_template(
ctx,
&welcome,
mailer::Args {
to: user.email.to_string(),
locals: json!({
"name": user.name,
"verifyToken": user.email_verification_token,
"domain": ctx.config.server.full_url()
}),
..Default::default()
},
)
.await?;
Ok(())
}
/// Sending forgot password email
///
/// # Errors
///
/// When email sending is failed
pub async fn forgot_password(ctx: &AppContext, user: &users::Model) -> Result<()> {
Self::mail_template(
ctx,
&forgot,
mailer::Args {
to: user.email.to_string(),
locals: json!({
"name": user.name,
"resetToken": user.reset_token,
"domain": ctx.config.server.full_url()
}),
..Default::default()
},
)
.await?;
Ok(())
}
}

View File

@ -0,0 +1,11 @@
;<html>
<body>
Hey {{name}},
Forgot your password? No worries! You can reset it by clicking the link below:
<a href="http://{{domain}}/reset#{{resetToken}}">Reset Your Password</a>
If you didn't request a password reset, please ignore this email.
Best regards,<br>The Loco Team</br>
</body>
</html>

View File

@ -0,0 +1 @@
Your reset password link

View File

@ -0,0 +1,3 @@
Reset your password with this link:
http://localhost/reset#{{resetToken}}

View File

@ -0,0 +1,13 @@
;<html>
<body>
Dear {{name}},
Welcome to Loco! You can now log in to your account.
Before you get started, please verify your account by clicking the link below:
<a href="http://{{domain}}/verify#{{verifyToken}}">
Verify Your Account
</a>
<p>Best regards,<br>The Loco Team</p>
</body>
</html>

View File

@ -0,0 +1 @@
Welcome {{name}}

View File

@ -0,0 +1,4 @@
Welcome {{name}}, you can now log in.
Verify your account with the link below:
http://localhost/verify#{{verifyToken}}

View File

@ -0,0 +1 @@
pub mod auth;

View File

@ -0,0 +1,39 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0-rc.5
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "files")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub notes_id: i32,
pub file_path: String,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::notes::Entity",
from = "Column::NotesId",
to = "super::notes::Column::Id",
on_update = "NoAction",
on_delete = "NoAction"
)]
Notes,
}
impl Related<super::notes::Entity> for Entity {
fn to() -> RelationDef {
Relation::Notes.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::notes::Entity")]
Notes,
}

View File

@ -0,0 +1,7 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0-rc.5
pub mod prelude;
pub mod files;
pub mod notes;
pub mod users;

View File

@ -0,0 +1,33 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0-rc.5
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "notes")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub title: Option<String>,
pub content: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::files::Entity")]
Files,
}
impl Related<super::files::Entity> for Entity {
fn to() -> RelationDef {
Relation::Files.def()
}
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {
#[sea_orm(entity = "super::files::Entity")]
Files,
}

View File

@ -0,0 +1,3 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0-rc.5
pub use super::{files::Entity as Files, notes::Entity as Notes, users::Entity as Users};

View File

@ -0,0 +1,31 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.0.0-rc.5
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "users")]
pub struct Model {
pub created_at: DateTime,
pub updated_at: DateTime,
#[sea_orm(primary_key)]
pub id: i32,
pub pid: Uuid,
#[sea_orm(unique)]
pub email: String,
pub password: String,
#[sea_orm(unique)]
pub api_key: String,
pub name: String,
pub reset_token: Option<String>,
pub reset_sent_at: Option<DateTime>,
pub email_verification_token: Option<String>,
pub email_verification_sent_at: Option<DateTime>,
pub email_verified_at: Option<DateTime>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelatedEntity)]
pub enum RelatedEntity {}

View File

@ -0,0 +1,7 @@
use sea_orm::entity::prelude::*;
use super::_entities::files::ActiveModel;
impl ActiveModelBehavior for ActiveModel {
// extend activemodel below (keep comment for generators)
}

View File

@ -0,0 +1,4 @@
pub mod _entities;
pub mod files;
pub mod notes;
pub mod users;

View File

@ -0,0 +1,7 @@
use sea_orm::entity::prelude::*;
use super::_entities::notes::ActiveModel;
impl ActiveModelBehavior for ActiveModel {
// extend activemodel below (keep comment for generators)
}

View File

@ -0,0 +1,267 @@
use async_trait::async_trait;
use chrono::offset::Local;
use loco_rs::{auth::jwt, hash, prelude::*};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
pub use super::_entities::users::{self, ActiveModel, Entity, Model};
#[derive(Debug, Deserialize, Serialize)]
pub struct LoginParams {
pub email: String,
pub password: String,
}
#[derive(Debug, Deserialize, Serialize)]
pub struct RegisterParams {
pub email: String,
pub password: String,
pub name: String,
}
#[derive(Debug, Validate, Deserialize)]
pub struct Validator {
#[validate(length(min = 2, message = "Name must be at least 2 characters long."))]
pub name: String,
#[validate(custom = "validation::is_valid_email")]
pub email: String,
}
impl Validatable for super::_entities::users::ActiveModel {
fn validator(&self) -> Box<dyn Validate> {
Box::new(Validator {
name: self.name.as_ref().to_owned(),
email: self.email.as_ref().to_owned(),
})
}
}
#[async_trait::async_trait]
impl ActiveModelBehavior for super::_entities::users::ActiveModel {
async fn before_save<C>(self, _db: &C, insert: bool) -> Result<Self, DbErr>
where
C: ConnectionTrait,
{
self.validate()?;
if insert {
let mut this = self;
this.pid = ActiveValue::Set(Uuid::new_v4());
this.api_key = ActiveValue::Set(format!("lo-{}", Uuid::new_v4()));
Ok(this)
} else {
Ok(self)
}
}
}
#[async_trait]
impl Authenticable for super::_entities::users::Model {
async fn find_by_api_key(db: &DatabaseConnection, api_key: &str) -> ModelResult<Self> {
let user = users::Entity::find()
.filter(users::Column::ApiKey.eq(api_key))
.one(db)
.await?;
user.ok_or_else(|| ModelError::EntityNotFound)
}
async fn find_by_claims_key(db: &DatabaseConnection, claims_key: &str) -> ModelResult<Self> {
Self::find_by_pid(db, claims_key).await
}
}
impl super::_entities::users::Model {
/// finds a user by the provided email
///
/// # Errors
///
/// When could not find user by the given token or DB query error
pub async fn find_by_email(db: &DatabaseConnection, email: &str) -> ModelResult<Self> {
let user = users::Entity::find()
.filter(users::Column::Email.eq(email))
.one(db)
.await?;
user.ok_or_else(|| ModelError::EntityNotFound)
}
/// finds a user by the provided verification token
///
/// # Errors
///
/// When could not find user by the given token or DB query error
pub async fn find_by_verification_token(
db: &DatabaseConnection,
token: &str,
) -> ModelResult<Self> {
let user = users::Entity::find()
.filter(users::Column::EmailVerificationToken.eq(token))
.one(db)
.await?;
user.ok_or_else(|| ModelError::EntityNotFound)
}
/// /// finds a user by the provided reset token
///
/// # Errors
///
/// When could not find user by the given token or DB query error
pub async fn find_by_reset_token(db: &DatabaseConnection, token: &str) -> ModelResult<Self> {
let user = users::Entity::find()
.filter(users::Column::ResetToken.eq(token))
.one(db)
.await?;
user.ok_or_else(|| ModelError::EntityNotFound)
}
/// finds a user by the provided pid
///
/// # Errors
///
/// When could not find user or DB query error
pub async fn find_by_pid(db: &DatabaseConnection, pid: &str) -> ModelResult<Self> {
let parse_uuid = Uuid::parse_str(pid).map_err(|e| ModelError::Any(e.into()))?;
let user = users::Entity::find()
.filter(users::Column::Pid.eq(parse_uuid))
.one(db)
.await?;
user.ok_or_else(|| ModelError::EntityNotFound)
}
/// finds a user by the provided api key
///
/// # Errors
///
/// When could not find user by the given token or DB query error
pub async fn find_by_api_key(db: &DatabaseConnection, api_key: &str) -> ModelResult<Self> {
let user = users::Entity::find()
.filter(users::Column::ApiKey.eq(api_key))
.one(db)
.await?;
user.ok_or_else(|| ModelError::EntityNotFound)
}
/// Verifies whether the provided plain password matches the hashed password
///
/// # Errors
///
/// when could not verify password
#[must_use]
pub fn verify_password(&self, password: &str) -> bool {
hash::verify_password(password, &self.password)
}
/// Asynchronously creates a user with a password and saves it to the
/// database.
///
/// # Errors
///
/// When could not save the user into the DB
pub async fn create_with_password(
db: &DatabaseConnection,
params: &RegisterParams,
) -> ModelResult<Self> {
let txn = db.begin().await?;
if users::Entity::find()
.filter(users::Column::Email.eq(&params.email))
.one(&txn)
.await?
.is_some()
{
return Err(ModelError::EntityAlreadyExists {});
}
let password_hash =
hash::hash_password(&params.password).map_err(|e| ModelError::Any(e.into()))?;
let user = users::ActiveModel {
email: ActiveValue::set(params.email.to_string()),
password: ActiveValue::set(password_hash),
name: ActiveValue::set(params.name.to_string()),
..Default::default()
}
.insert(&txn)
.await?;
txn.commit().await?;
Ok(user)
}
/// Creates a JWT
///
/// # Errors
///
/// when could not convert user claims to jwt token
pub fn generate_jwt(&self, secret: &str, expiration: &u64) -> ModelResult<String> {
Ok(jwt::JWT::new(secret).generate_token(expiration, self.pid.to_string())?)
}
}
impl super::_entities::users::ActiveModel {
/// Sets the email verification information for the user and
/// updates it in the database.
///
/// This method is used to record the timestamp when the email verification
/// was sent and generate a unique verification token for the user.
///
/// # Errors
///
/// when has DB query error
pub async fn set_email_verification_sent(
mut self,
db: &DatabaseConnection,
) -> ModelResult<Model> {
self.email_verification_sent_at = ActiveValue::set(Some(Local::now().naive_local()));
self.email_verification_token = ActiveValue::Set(Some(Uuid::new_v4().to_string()));
Ok(self.update(db).await?)
}
/// Sets the information for a reset password request,
/// generates a unique reset password token, and updates it in the
/// database.
///
/// This method records the timestamp when the reset password token is sent
/// and generates a unique token for the user.
///
/// # Arguments
///
/// # Errors
///
/// when has DB query error
pub async fn set_forgot_password_sent(mut self, db: &DatabaseConnection) -> ModelResult<Model> {
self.reset_sent_at = ActiveValue::set(Some(Local::now().naive_local()));
self.reset_token = ActiveValue::Set(Some(Uuid::new_v4().to_string()));
Ok(self.update(db).await?)
}
/// Records the verification time when a user verifies their
/// email and updates it in the database.
///
/// This method sets the timestamp when the user successfully verifies their
/// email.
///
/// # Errors
///
/// when has DB query error
pub async fn verified(mut self, db: &DatabaseConnection) -> ModelResult<Model> {
self.email_verified_at = ActiveValue::set(Some(Local::now().naive_local()));
Ok(self.update(db).await?)
}
/// Resets the current user password with a new password and
/// updates it in the database.
///
/// This method hashes the provided password and sets it as the new password
/// for the user.
/// # Errors
///
/// when has DB query error or could not hashed the given password
pub async fn reset_password(
mut self,
db: &DatabaseConnection,
password: &str,
) -> ModelResult<Model> {
self.password =
ActiveValue::set(hash::hash_password(password).map_err(|e| ModelError::Any(e.into()))?);
Ok(self.update(db).await?)
}
}

View File

@ -0,0 +1 @@
pub mod seed;

View File

@ -0,0 +1,44 @@
//! This task implements data seeding functionality for initializing new
//! development/demo environments.
//!
//! # Example
//!
//! Run the task with the following command:
//! ```sh
//! cargo run task
//! ```
//!
//! To override existing data and reset the data structure, use the following
//! command with the `refresh:true` argument:
//! ```sh
//! cargo run task seed_data refresh:true
//! ```
use std::collections::BTreeMap;
use loco_rs::{db, prelude::*};
use migration::Migrator;
use crate::app::App;
#[allow(clippy::module_name_repetitions)]
pub struct SeedData;
#[async_trait]
impl Task for SeedData {
fn task(&self) -> TaskInfo {
TaskInfo {
name: "seed_data".to_string(),
detail: "Task for seeding data".to_string(),
}
}
async fn run(&self, app_context: &AppContext, vars: &BTreeMap<String, String>) -> Result<()> {
let refresh = vars.get("refresh").is_some_and(|refresh| refresh == "true");
if refresh {
db::reset::<Migrator>(&app_context.db).await?;
}
let path = std::path::Path::new("src/fixtures");
db::run_app_seed::<App>(&app_context.db, path).await?;
Ok(())
}
}

View File

@ -0,0 +1,23 @@
use serde::{Deserialize, Serialize};
use crate::models::_entities::users;
#[derive(Debug, Deserialize, Serialize)]
pub struct LoginResponse {
pub token: String,
pub pid: String,
pub name: String,
pub is_verified: bool,
}
impl LoginResponse {
#[must_use]
pub fn new(user: &users::Model, token: &String) -> Self {
Self {
token: token.to_string(),
pid: user.pid.to_string(),
name: user.name.clone(),
is_verified: user.email_verified_at.is_some(),
}
}
}

View File

@ -0,0 +1,2 @@
pub mod auth;
pub mod user;

View File

@ -0,0 +1,21 @@
use serde::{Deserialize, Serialize};
use crate::models::_entities::users;
#[derive(Debug, Deserialize, Serialize)]
pub struct CurrentResponse {
pub pid: String,
pub name: String,
pub email: String,
}
impl CurrentResponse {
#[must_use]
pub fn new(user: &users::Model) -> Self {
Self {
pid: user.pid.to_string(),
name: user.name.clone(),
email: user.email.clone(),
}
}
}

View File

@ -0,0 +1,43 @@
use std::time::Duration;
use loco_rs::prelude::*;
use serde::{Deserialize, Serialize};
use tokio::time::sleep;
use crate::models::users;
pub struct DownloadWorker {
pub ctx: AppContext,
}
#[derive(Deserialize, Debug, Serialize)]
pub struct DownloadWorkerArgs {
pub user_guid: String,
}
impl worker::AppWorker<DownloadWorkerArgs> for DownloadWorker {
fn build(ctx: &AppContext) -> Self {
Self { ctx: ctx.clone() }
}
}
#[async_trait]
impl worker::Worker<DownloadWorkerArgs> for DownloadWorker {
async fn perform(&self, args: DownloadWorkerArgs) -> worker::Result<()> {
// TODO: Some actual work goes here...
println!("================================================");
println!("Sending payment report to user {}", args.user_guid);
sleep(Duration::from_millis(2000)).await;
let all = users::Entity::find()
.all(&self.ctx.db)
.await
.map_err(Box::from)?;
for user in &all {
println!("user: {}", user.id);
}
println!("================================================");
Ok(())
}
}

View File

@ -0,0 +1 @@
pub mod downloader;

View File

@ -0,0 +1,21 @@
export default {
"extends": [
"eslint:recommended",
"plugin:react/recommended",
"plugin:react/jsx-runtime",
"plugin:react-hooks/recommended",
"prettier"
],
"parser": "@typescript-eslint/parser",
"plugins": ["@typescript-eslint"],
"env": {
"browser": true,
"es2021": true,
"node": true
},
"settings": {
"react": {
"version": "detect"
}
}
}

View File

@ -0,0 +1,24 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist-ssr
*.local
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

View File

@ -0,0 +1,35 @@
# react-admin
## Installation
Install the application dependencies by running:
```sh
npm install
```
## Development
Start the application in development mode by running:
```sh
npm run dev
```
## Production
Build the application in production mode by running:
```sh
npm run build
```
## Authentication
The included auth provider should only be used for development and test purposes.
You'll find a `users.json` file in the `src` directory that includes the users you can use.
You can sign in to the application with the following usernames and password:
- janedoe / password
- johndoe / password

View File

@ -0,0 +1,125 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta
name="viewport"
content="minimum-scale=1, initial-scale=1, width=device-width, shrink-to-fit=no"
/>
<meta name="theme-color" content="#000000" />
<link rel="manifest" href="./manifest.json" />
<link rel="shortcut icon" href="./favicon.ico" />
<title>react-admin</title>
<style>
body {
margin: 0;
padding: 0;
font-family: sans-serif;
}
.loader-container {
display: flex;
align-items: center;
justify-content: center;
flex-direction: column;
position: absolute;
top: 0;
bottom: 0;
left: 0;
right: 0;
background-color: #fafafa;
}
/* CSS Spinner from https://projects.lukehaas.me/css-loaders/ */
.loader,
.loader:before,
.loader:after {
border-radius: 50%;
}
.loader {
color: #283593;
font-size: 11px;
text-indent: -99999em;
margin: 55px auto;
position: relative;
width: 10em;
height: 10em;
box-shadow: inset 0 0 0 1em;
-webkit-transform: translateZ(0);
-ms-transform: translateZ(0);
transform: translateZ(0);
}
.loader:before,
.loader:after {
position: absolute;
content: '';
}
.loader:before {
width: 5.2em;
height: 10.2em;
background: #fafafa;
border-radius: 10.2em 0 0 10.2em;
top: -0.1em;
left: -0.1em;
-webkit-transform-origin: 5.2em 5.1em;
transform-origin: 5.2em 5.1em;
-webkit-animation: load2 2s infinite ease 1.5s;
animation: load2 2s infinite ease 1.5s;
}
.loader:after {
width: 5.2em;
height: 10.2em;
background: #fafafa;
border-radius: 0 10.2em 10.2em 0;
top: -0.1em;
left: 5.1em;
-webkit-transform-origin: 0px 5.1em;
transform-origin: 0px 5.1em;
-webkit-animation: load2 2s infinite ease;
animation: load2 2s infinite ease;
}
@-webkit-keyframes load2 {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-webkit-transform: rotate(360deg);
transform: rotate(360deg);
}
}
@keyframes load2 {
0% {
-webkit-transform: rotate(0deg);
transform: rotate(0deg);
}
100% {
-webkit-transform: rotate(360deg);
transform: rotate(360deg);
}
}
</style>
<link rel="preconnect" href="https://fonts.gstatic.com" />
<link
href="https://fonts.googleapis.com/css2?family=Roboto:wght@300;400;500;700&display=swap"
rel="stylesheet"
/>
</head>
<body>
<noscript> You need to enable JavaScript to run this app. </noscript>
<div id="root">
<div class="loader-container">
<div class="loader">Loading...</div>
</div>
</div>
</body>
<script type="module" src="/src/index.tsx"></script>
</html>

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,34 @@
{
"private": true,
"scripts": {
"dev": "vite",
"build": "vite build",
"serve": "vite preview",
"type-check": "tsc --noEmit",
"lint": "eslint --fix --ext .js,.jsx,.ts,.tsx ./src",
"format": "prettier --write ./src"
},
"dependencies": {
"axios": "^1.7.2",
"ra-data-json-server": "^5.0.5",
"react": "^18.3.0",
"react-admin": "^5.0.0",
"react-dom": "^18.3.0"
},
"devDependencies": {
"@types/node": "^20.10.7",
"@types/react": "^18.3.3",
"@types/react-dom": "^18.3.0",
"@typescript-eslint/eslint-plugin": "^5.60.1",
"@typescript-eslint/parser": "^5.60.1",
"@vitejs/plugin-react": "^4.0.1",
"eslint": "^8.43.0",
"eslint-config-prettier": "^8.8.0",
"eslint-plugin-react": "^7.32.2",
"eslint-plugin-react-hooks": "^4.6.0",
"prettier": "^2.8.8",
"typescript": "^5.1.6",
"vite": "^4.3.9"
},
"name": "react-admin"
}

View File

@ -0,0 +1 @@
module.exports = {}

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@ -0,0 +1,15 @@
{
"short_name": "react-admin",
"name": "{{name}}",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
}
],
"start_url": "./index.html",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}

View File

@ -0,0 +1,15 @@
import { Admin } from 'react-admin';
import { Layout } from './Layout';
import { authProvider } from './authProvider';
export const App = () => (
<Admin
layout={Layout}
authProvider={authProvider}
>
</Admin>
);

View File

@ -0,0 +1,12 @@
import type { ReactNode } from 'react';
import {
Layout as RALayout,
CheckForApplicationUpdate,
} from "react-admin";
export const Layout = ({ children }: { children: ReactNode }) => (
<RALayout>
{children}
<CheckForApplicationUpdate />
</RALayout>
);

View File

@ -0,0 +1,44 @@
import { AuthProvider, HttpError } from 'react-admin';
import data from './users.json';
/**
* This authProvider is only for test purposes. Don't use it in production.
*/
export const authProvider: AuthProvider = {
login: ({ username, password }) => {
const user = data.users.find(
u => u.username === username && u.password === password
);
if (user) {
// eslint-disable-next-line no-unused-vars, @typescript-eslint/no-unused-vars
let { password, ...userToPersist } = user;
localStorage.setItem('user', JSON.stringify(userToPersist));
return Promise.resolve();
}
return Promise.reject(
new HttpError('Unauthorized', 401, {
message: 'Invalid username or password',
})
);
},
logout: () => {
localStorage.removeItem('user');
return Promise.resolve();
},
checkError: () => Promise.resolve(),
checkAuth: () =>
localStorage.getItem('user') ? Promise.resolve() : Promise.reject(),
getPermissions: () => {
return Promise.resolve(undefined);
},
getIdentity: () => {
const persistedUser = localStorage.getItem('user');
const user = persistedUser ? JSON.parse(persistedUser) : null;
return Promise.resolve(user);
},
};
export default authProvider;

View File

@ -0,0 +1,87 @@
import { DataProvider } from "react-admin";
import axios from 'axios';
const apiUrl = 'http://localhost:3000/api/graphql';
export const dataProvider: DataProvider = {
// Fetch data for post listing
getList: (resource, params) => {
// Paginator status
const { page, perPage } = params.pagination;
// Sorter status
const { field, order } = params.sort;
// POST request to GraphQL endpoint
return axios.post(apiUrl, {
query: `
query {
notes (
orderBy: { ${field}: ${order} },
pagination: { page: { limit: ${perPage}, page: ${page - 1} }}
) {
nodes {
id
title
content
}
paginationInfo {
pages
current
offset
total
}
}
}
`
})
.then((response) => {
// Unwrap the response
const { nodes, paginationInfo } = response.data.data.notes;
// Return the data array and total number of pages
return {
data: nodes,
total: paginationInfo.total,
};
});
},
// Fetch data for a single post
getOne: (resource, params) => {
// POST request to GraphQL endpoint
return axios.post(apiUrl, {
query: `
query {
notes(filters: {id: {eq: ${params.id}}}) {
nodes {
id
title
content
}
}
}
`
})
.then((response) => {
// Unwrap the response
const { nodes } = response.data.data.notes;
// Return the one and only data
return {
data: nodes[0],
};
});
},
getMany: (resource, params) => { },
getManyReference: (resource, params) => { },
update: (resource, params) => { },
updateMany: (resource, params) => { },
create: (resource, params) => { },
delete: (resource, params) => { },
deleteMany: (resource, params) => { },
};

View File

@ -0,0 +1,29 @@
import ReactDOM from 'react-dom/client';
import { Admin, Resource, List, Datagrid, TextField, Show, SimpleShowLayout } from 'react-admin';
import { dataProvider } from './dataProvider';
const PostList = () => (
<List>
<Datagrid bulkActionButtons={false}>
<TextField source="id" />
<TextField source="title" />
<TextField source="content" />
</Datagrid>
</List>
);
const PostShow = () => (
<Show>
<SimpleShowLayout>
<TextField source="id" />
<TextField source="title" />
<TextField source="content" />
</SimpleShowLayout>
</Show>
);
ReactDOM.createRoot(document.getElementById('root')!).render(
<Admin dataProvider={dataProvider}>
<Resource name="posts" list={PostList} show={PostShow} />
</Admin>
);

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
/// <reference types="vite/client" />

View File

@ -0,0 +1,26 @@
{
"compilerOptions": {
"target": "es5",
"lib": [
"dom",
"dom.iterable",
"esnext"
],
"allowJs": true,
"skipLibCheck": true,
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"noFallthroughCasesInSwitch": true,
"module": "esnext",
"moduleResolution": "node",
"resolveJsonModule": true,
"isolatedModules": true,
"noEmit": true,
"jsx": "react-jsx"
},
"include": [
"src"
]
}

View File

@ -0,0 +1,14 @@
import { defineConfig } from 'vite';
import react from '@vitejs/plugin-react';
// https://vitejs.dev/config/
export default defineConfig({
plugins: [react()],
define: {
'process.env': process.env,
},
server: {
host: true,
},
base: './',
});

View File

@ -25,7 +25,7 @@ cargo run
## Install Seaography
```sh
cargo install seaography-cli@^1.0.0-rc.3
cargo install seaography-cli@^1.0.0
```
## Generate GraphQL project

View File

@ -17,7 +17,7 @@ tracing-subscriber = { version = "0.3.17" }
lazy_static = { version = "1.4.0" }
[dependencies.seaography]
version = "1.0.0-rc.3" # seaography version
version = "1.0.0" # seaography version
features = ["with-decimal", "with-chrono"]
[dev-dependencies]