BLOG-43 Post related api endpoints #55

Merged
squid merged 9 commits from BLOG-43_post_crud_api into main 2025-06-07 21:26:10 +08:00
44 changed files with 3654 additions and 1 deletions

12
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,12 @@
{
"cSpell.words": [
"actix",
"chrono",
"dotenv",
"rustls",
"serde",
"sqlx",
"squidspirit"
]
}

View File

@ -3,7 +3,7 @@
## Development
- Frontend: Next.js
- Backend: Go (gin)
- Backend: Rust actix-web
Despite Next.js being a full-stack framework, I still decided to adopt a separate front-end and back-end architecture for this blog project. I believe that this separation makes the project cleaner, reduces coupling, and aligns with modern development practices. Furthermore, I wanted to practice developing a purely back-end API.

3
backend/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
.env
/.sqlx
/target

2823
backend/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

27
backend/Cargo.toml Normal file
View File

@ -0,0 +1,27 @@
[workspace]
members = ["feature/post", "server"]
resolver = "2"
[workspace.package]
version = "0.1.1"
edition = "2024"
[workspace.dependencies]
actix-web = "4.10.2"
async-trait = "0.1.88"
chrono = "0.4.41"
dotenv = "0.15.0"
env_logger = "0.11.8"
futures = "0.3.31"
log = "0.4.27"
serde = { version = "1.0.219", features = ["derive"] }
sqlx = { version = "0.8.5", features = [
"chrono",
"macros",
"postgres",
"runtime-tokio-rustls",
] }
tokio = { version = "1.45.0", features = ["full"] }
server.path = "server"
post.path = "feature/post"

42
backend/README.md Normal file
View File

@ -0,0 +1,42 @@
# Backend
## Development
### SQL Migration
1. Install sqlx
```bash
cargo install sqlx-cli
```
2. Run migration
```bash
sqlx migrate run
```
### Run Project
1. Prepare for sql schema setup
```bash
cargo sqlx prepare --workspace
```
2. Run the server
```bash
RUST_LOG=debug cargo run
```
3. (Optional) Hot restart
1. Install `watchexec`
2. Run the server with `watchexec`
```bash
RUST_LOG=debug watchexec -e rs -r 'cargo run'
```

5
backend/build.rs Normal file
View File

@ -0,0 +1,5 @@
// generated by `sqlx migrate build-script`
fn main() {
// trigger recompilation when a new migration is added
println!("cargo:rerun-if-changed=migrations");
}

View File

@ -0,0 +1,12 @@
[package]
name = "post"
version.workspace = true
edition.workspace = true
[dependencies]
actix-web.workspace = true
async-trait.workspace = true
chrono.workspace = true
log.workspace = true
serde.workspace = true
sqlx.workspace = true

View File

@ -0,0 +1,2 @@
pub mod delivery;
pub mod gateway;

View File

@ -0,0 +1,5 @@
pub mod label_response_dto;
pub mod post_controller;
pub mod post_info_query_dto;
pub mod post_info_response_dto;
pub mod post_response_dto;

View File

@ -0,0 +1,20 @@
use serde::Serialize;
use crate::domain::entity::label::Label;
#[derive(Serialize)]
pub struct LabelResponseDto {
pub id: i32,
pub name: String,
pub color: String,
}
impl From<Label> for LabelResponseDto {
fn from(entity: Label) -> Self {
Self {
id: entity.id,
name: entity.name,
color: format!("#{:08X}", entity.color),
}
}
}

View File

@ -0,0 +1,65 @@
use std::sync::Arc;
use async_trait::async_trait;
use crate::application::{
error::post_error::PostError,
use_case::{
get_all_post_info_use_case::GetAllPostInfoUseCase,
get_full_post_use_case::GetFullPostUseCase,
},
};
use super::{post_info_response_dto::PostInfoResponseDto, post_response_dto::PostResponseDto};
#[async_trait]
pub trait PostController: Send + Sync {
async fn get_all_post_info(
&self,
is_published_only: bool,
) -> Result<Vec<PostInfoResponseDto>, PostError>;
async fn get_full_post(&self, id: i32) -> Result<PostResponseDto, PostError>;
}
pub struct PostControllerImpl {
get_all_post_info_use_case: Arc<dyn GetAllPostInfoUseCase>,
get_full_post_use_case: Arc<dyn GetFullPostUseCase>,
}
impl PostControllerImpl {
pub fn new(
get_all_post_info_use_case: Arc<dyn GetAllPostInfoUseCase>,
get_full_post_use_case: Arc<dyn GetFullPostUseCase>,
) -> Self {
Self {
get_all_post_info_use_case,
get_full_post_use_case,
}
}
}
#[async_trait]
impl PostController for PostControllerImpl {
async fn get_all_post_info(
&self,
is_published_only: bool,
) -> Result<Vec<PostInfoResponseDto>, PostError> {
let result = self.get_all_post_info_use_case.execute(is_published_only).await;
result.map(|post_info_list| {
let post_info_response_dto_list: Vec<PostInfoResponseDto> = post_info_list
.into_iter()
.map(|post_info| PostInfoResponseDto::from(post_info))
.collect();
post_info_response_dto_list
})
}
async fn get_full_post(&self, id: i32) -> Result<PostResponseDto, PostError> {
let result = self.get_full_post_use_case.execute(id).await;
result.map(PostResponseDto::from)
}
}

View File

@ -0,0 +1,6 @@
use serde::Deserialize;
#[derive(Deserialize)]
pub struct PostQueryDto {
pub is_published_only: Option<bool>,
}

View File

@ -0,0 +1,34 @@
use serde::Serialize;
use crate::domain::entity::post_info::PostInfo;
use super::label_response_dto::LabelResponseDto;
#[derive(Serialize)]
pub struct PostInfoResponseDto {
pub id: i32,
pub title: String,
pub description: String,
pub preview_image_url: String,
pub labels: Vec<LabelResponseDto>,
pub published_time: Option<i64>,
}
impl From<PostInfo> for PostInfoResponseDto {
fn from(entity: PostInfo) -> Self {
Self {
id: entity.id,
title: entity.title,
description: entity.description,
preview_image_url: entity.preview_image_url,
labels: entity
.labels
.into_iter()
.map(LabelResponseDto::from)
.collect(),
published_time: entity
.published_time
.map(|datetime| datetime.timestamp_micros()),
}
}
}

View File

@ -0,0 +1,22 @@
use serde::Serialize;
use crate::domain::entity::post::Post;
use super::post_info_response_dto::PostInfoResponseDto;
#[derive(Serialize)]
pub struct PostResponseDto {
pub id: i32,
pub info: PostInfoResponseDto,
pub content: String,
}
impl From<Post> for PostResponseDto {
fn from(entity: Post) -> Self {
Self {
id: entity.id,
info: PostInfoResponseDto::from(entity.info),
content: entity.content,
}
}
}

View File

@ -0,0 +1,2 @@
pub mod post_db_service;
pub mod post_repository_impl;

View File

@ -0,0 +1,12 @@
use async_trait::async_trait;
use crate::{
application::error::post_error::PostError,
domain::entity::{post::Post, post_info::PostInfo},
};
#[async_trait]
pub trait PostDbService: Send + Sync {
async fn get_all_post_info(&self, is_published_only: bool) -> Result<Vec<PostInfo>, PostError>;
async fn get_full_post(&self, id: i32) -> Result<Post, PostError>;
}

View File

@ -0,0 +1,31 @@
use std::sync::Arc;
use async_trait::async_trait;
use crate::{
application::{error::post_error::PostError, gateway::post_repository::PostRepository},
domain::entity::{post::Post, post_info::PostInfo},
};
use super::post_db_service::PostDbService;
pub struct PostRepositoryImpl {
post_db_service: Arc<dyn PostDbService>,
}
impl PostRepositoryImpl {
pub fn new(post_db_service: Arc<dyn PostDbService>) -> Self {
Self { post_db_service }
}
}
#[async_trait]
impl PostRepository for PostRepositoryImpl {
async fn get_all_post_info(&self, is_published_only: bool) -> Result<Vec<PostInfo>, PostError> {
self.post_db_service.get_all_post_info(is_published_only).await
}
async fn get_full_post(&self, id: i32) -> Result<Post, PostError> {
self.post_db_service.get_full_post(id).await
}
}

View File

@ -0,0 +1,3 @@
pub mod error;
pub mod gateway;
pub mod use_case;

View File

@ -0,0 +1 @@
pub mod post_error;

View File

@ -0,0 +1,5 @@
#[derive(Debug, PartialEq)]
pub enum PostError {
DatabaseError(String),
NotFound,
}

View File

@ -0,0 +1 @@
pub mod post_repository;

View File

@ -0,0 +1,12 @@
use async_trait::async_trait;
use crate::{
application::error::post_error::PostError,
domain::entity::{post::Post, post_info::PostInfo},
};
#[async_trait]
pub trait PostRepository: Send + Sync {
async fn get_all_post_info(&self, is_published_only: bool) -> Result<Vec<PostInfo>, PostError>;
async fn get_full_post(&self, id: i32) -> Result<Post, PostError>;
}

View File

@ -0,0 +1,2 @@
pub mod get_all_post_info_use_case;
pub mod get_full_post_use_case;

View File

@ -0,0 +1,30 @@
use std::sync::Arc;
use async_trait::async_trait;
use crate::{
application::{error::post_error::PostError, gateway::post_repository::PostRepository},
domain::entity::post_info::PostInfo,
};
#[async_trait]
pub trait GetAllPostInfoUseCase: Send + Sync {
async fn execute(&self, is_published_only: bool) -> Result<Vec<PostInfo>, PostError>;
}
pub struct GetAllPostInfoUseCaseImpl {
post_repository: Arc<dyn PostRepository>,
}
impl GetAllPostInfoUseCaseImpl {
pub fn new(post_repository: Arc<dyn PostRepository>) -> Self {
Self { post_repository }
}
}
#[async_trait]
impl GetAllPostInfoUseCase for GetAllPostInfoUseCaseImpl {
async fn execute(&self, is_published_only: bool) -> Result<Vec<PostInfo>, PostError> {
self.post_repository.get_all_post_info(is_published_only).await
}
}

View File

@ -0,0 +1,30 @@
use std::sync::Arc;
use async_trait::async_trait;
use crate::{
application::{error::post_error::PostError, gateway::post_repository::PostRepository},
domain::entity::post::Post,
};
#[async_trait]
pub trait GetFullPostUseCase: Send + Sync {
async fn execute(&self, id: i32) -> Result<Post, PostError>;
}
pub struct GetFullPostUseCaseImpl {
post_repository: Arc<dyn PostRepository>,
}
impl GetFullPostUseCaseImpl {
pub fn new(post_repository: Arc<dyn PostRepository>) -> Self {
Self { post_repository }
}
}
#[async_trait]
impl GetFullPostUseCase for GetFullPostUseCaseImpl {
async fn execute(&self, id: i32) -> Result<Post, PostError> {
self.post_repository.get_full_post(id).await
}
}

View File

@ -0,0 +1 @@
pub mod entity;

View File

@ -0,0 +1,3 @@
pub mod label;
pub mod post_info;
pub mod post;

View File

@ -0,0 +1,5 @@
pub struct Label {
pub id: i32,
pub name: String,
pub color: u32,
}

View File

@ -0,0 +1,7 @@
use super::post_info::PostInfo;
pub struct Post {
pub id: i32,
pub info: PostInfo,
pub content: String,
}

View File

@ -0,0 +1,12 @@
use chrono::{DateTime, Utc};
use super::label::Label;
pub struct PostInfo {
pub id: i32,
pub title: String,
pub description: String,
pub preview_image_url: String,
pub labels: Vec<Label>,
pub published_time: Option<DateTime<Utc>>,
}

View File

@ -0,0 +1,2 @@
pub mod db;
pub mod web;

View File

@ -0,0 +1,4 @@
pub mod post_db_service_impl;
mod post_info_with_label_record;
mod post_with_label_record;

View File

@ -0,0 +1,168 @@
use std::{collections::HashMap, sync::Arc};
use async_trait::async_trait;
use chrono::{DateTime, Utc};
use sqlx::{Pool, Postgres};
use crate::{
adapter::gateway::post_db_service::PostDbService,
application::error::post_error::PostError,
domain::entity::{label::Label, post::Post, post_info::PostInfo},
};
use super::{
post_info_with_label_record::PostInfoWithLabelRecord,
post_with_label_record::PostWithLabelRecord,
};
pub struct PostDbServiceImpl {
db_pool: Arc<Pool<Postgres>>,
}
impl PostDbServiceImpl {
pub fn new(db_pool: Arc<Pool<Postgres>>) -> Self {
Self { db_pool }
}
}
#[async_trait]
impl PostDbService for PostDbServiceImpl {
async fn get_all_post_info(&self, is_published_only: bool) -> Result<Vec<PostInfo>, PostError> {
let mut query_builder = sqlx::QueryBuilder::new(
r#"
SELECT
p.id AS post_id,
p.title,
p.description,
p.preview_image_url,
p.published_time,
l.id AS label_id,
l.name AS label_name,
l.color AS label_color
FROM
post p
LEFT JOIN
post_label pl ON p.id = pl.post_id
LEFT JOIN
label l ON pl.label_id = l.id AND l.deleted_time IS NULL
WHERE
p.deleted_time IS NULL
"#,
);
if is_published_only {
query_builder.push(r#" AND p.published_time IS NOT NULL"#);
}
query_builder.push(r#" ORDER BY p.id"#);
let records = query_builder
.build_query_as::<PostInfoWithLabelRecord>()
.fetch_all(&*self.db_pool)
.await
.map_err(|err| PostError::DatabaseError(err.to_string()))?;
let mut post_info_map = HashMap::<i32, PostInfo>::new();
for record in records {
let post_info = post_info_map
.entry(record.post_id)
.or_insert_with(|| PostInfo {
id: record.post_id,
title: record.title,
description: record.description,
preview_image_url: record.preview_image_url,
labels: Vec::new(),
published_time: record
.published_time
.map(|dt| DateTime::<Utc>::from_naive_utc_and_offset(dt, Utc)),
});
if let (Some(label_id), Some(label_name), Some(label_color)) =
(record.label_id, record.label_name, record.label_color)
{
post_info.labels.push(Label {
id: label_id,
name: label_name,
color: label_color as u32,
});
}
}
Ok(post_info_map.into_values().collect())
}
async fn get_full_post(&self, id: i32) -> Result<Post, PostError> {
let mut query_builder = sqlx::QueryBuilder::new(
r#"
SELECT
p.id AS post_id,
p.title,
p.description,
p.preview_image_url,
p.content,
p.published_time,
l.id AS label_id,
l.name AS label_name,
l.color AS label_color
FROM
post p
LEFT JOIN
post_label pl ON p.id = pl.post_id
LEFT JOIN
label l ON pl.label_id = l.id AND l.deleted_time IS NULL
WHERE
p.deleted_time IS NULL AND p.id =
"#,
);
query_builder.push_bind(id);
query_builder.push(r#" ORDER BY p.id"#);
let records = query_builder
.build_query_as::<PostWithLabelRecord>()
.fetch_all(&*self.db_pool)
.await
.map_err(|err| PostError::DatabaseError(err.to_string()))?;
if records.is_empty() {
return Err(PostError::NotFound);
}
let mut post_map = HashMap::<i32, Post>::new();
for record in records {
let post = post_map.entry(record.post_id).or_insert_with(|| Post {
id: record.post_id,
info: PostInfo {
id: record.post_id,
title: record.title,
description: record.description,
preview_image_url: record.preview_image_url,
labels: Vec::new(),
published_time: record
.published_time
.map(|dt| DateTime::<Utc>::from_naive_utc_and_offset(dt, Utc)),
},
content: record.content,
});
if let (Some(label_id), Some(label_name), Some(label_color)) =
(record.label_id, record.label_name, record.label_color)
{
post.info.labels.push(Label {
id: label_id,
name: label_name,
color: label_color as u32,
});
}
}
let post = post_map.into_values().next();
match post {
Some(v) => Ok(v),
None => Err(PostError::NotFound),
}
}
}

View File

@ -0,0 +1,14 @@
use chrono::NaiveDateTime;
#[derive(sqlx::FromRow)]
pub struct PostInfoWithLabelRecord {
pub post_id: i32,
pub title: String,
pub description: String,
pub preview_image_url: String,
pub published_time: Option<NaiveDateTime>,
pub label_id: Option<i32>,
pub label_name: Option<String>,
pub label_color: Option<i64>,
}

View File

@ -0,0 +1,15 @@
use chrono::NaiveDateTime;
#[derive(sqlx::FromRow, Debug)]
pub struct PostWithLabelRecord {
pub post_id: i32,
pub title: String,
pub description: String,
pub preview_image_url: String,
pub content: String,
pub published_time: Option<NaiveDateTime>,
pub label_id: Option<i32>,
pub label_name: Option<String>,
pub label_color: Option<i64>,
}

View File

@ -0,0 +1 @@
pub mod post_web_routes;

View File

@ -0,0 +1,49 @@
use std::sync::Arc;
use actix_web::{HttpResponse, Responder, web};
use crate::{
adapter::delivery::{post_controller::PostController, post_info_query_dto::PostQueryDto},
application::error::post_error::PostError,
};
pub fn configure_post_routes(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("/post_info").route(web::get().to(get_all_post_info)));
cfg.service(web::resource("/post/{id}").route(web::get().to(get_full_post)));
}
async fn get_all_post_info(
post_controller: web::Data<Arc<dyn PostController>>,
query: web::Query<PostQueryDto>,
) -> impl Responder {
let is_published_only = query.is_published_only.unwrap_or_else(|| true);
let result = post_controller.get_all_post_info(is_published_only).await;
match result {
Ok(post_info_list) => HttpResponse::Ok().json(post_info_list),
Err(e) => {
log::error!("{e:?}");
HttpResponse::InternalServerError().finish()
}
}
}
async fn get_full_post(
post_controller: web::Data<Arc<dyn PostController>>,
path: web::Path<i32>,
) -> impl Responder {
let id = path.into_inner();
let result = post_controller.get_full_post(id).await;
match result {
Ok(post) => HttpResponse::Ok().json(post),
Err(e) => {
if e == PostError::NotFound {
HttpResponse::NotFound().finish()
} else {
log::error!("{e:?}");
HttpResponse::InternalServerError().finish()
}
}
}
}

View File

@ -0,0 +1,4 @@
pub mod adapter;
pub mod application;
pub mod domain;
pub mod framework;

View File

@ -0,0 +1,51 @@
-- Add migration script here
CREATE TABLE "post" (
"id" SERIAL PRIMARY KEY NOT NULL,
"title" TEXT NOT NULL,
"description" TEXT NOT NULL,
"preview_image_url" TEXT NOT NULL,
"content" TEXT NOT NULL,
"published_time" TIMESTAMP,
"deleted_time" TIMESTAMP,
"created_time" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_time" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE "label" (
"id" SERIAL PRIMARY KEY NOT NULL,
"name" TEXT NOT NULL,
"color" BIGINT NOT NULL CHECK ("color" >= 0 AND "color" <= 4294967295),
"deleted_time" TIMESTAMP,
"created_time" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updated_time" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE "post_label" (
"post_id" INTEGER NOT NULL,
"label_id" INTEGER NOT NULL,
PRIMARY KEY ("post_id", "label_id"),
FOREIGN KEY ("post_id") REFERENCES "post" ("id") ON DELETE CASCADE,
FOREIGN KEY ("label_id") REFERENCES "label" ("id") ON DELETE CASCADE
);
-- Auto update `updated_time` trigger
CREATE FUNCTION update_updated_time_column() RETURNS TRIGGER AS $$
BEGIN
NEW.updated_time = CURRENT_TIMESTAMP;
return NEW;
END;
$$ LANGUAGE 'plpgsql';
CREATE TRIGGER "update_post_updated_time"
BEFORE UPDATE ON "post"
FOR EACH ROW
EXECUTE FUNCTION update_updated_time_column();
CREATE TRIGGER "update_label_updated_time"
BEFORE UPDATE ON "label"
FOR EACH ROW
EXECUTE FUNCTION update_updated_time_column();

12
backend/server/Cargo.toml Normal file
View File

@ -0,0 +1,12 @@
[package]
name = "server"
version.workspace = true
edition.workspace = true
[dependencies]
actix-web.workspace = true
dotenv.workspace = true
env_logger.workspace = true
sqlx.workspace = true
post.workspace = true

View File

@ -0,0 +1,37 @@
use std::sync::Arc;
use post::{
adapter::{
delivery::post_controller::{PostController, PostControllerImpl},
gateway::post_repository_impl::PostRepositoryImpl,
},
application::use_case::{
get_all_post_info_use_case::GetAllPostInfoUseCaseImpl,
get_full_post_use_case::GetFullPostUseCaseImpl,
},
framework::db::post_db_service_impl::PostDbServiceImpl,
};
use sqlx::{Pool, Postgres};
pub struct Container {
pub post_controller: Arc<dyn PostController>,
}
impl Container {
pub fn new(db_pool: Arc<Pool<Postgres>>) -> Self {
let post_db_service = Arc::new(PostDbServiceImpl::new(db_pool));
let post_repository = Arc::new(PostRepositoryImpl::new(post_db_service.clone()));
let get_all_post_info_use_case =
Arc::new(GetAllPostInfoUseCaseImpl::new(post_repository.clone()));
let get_full_post_use_case = Arc::new(GetFullPostUseCaseImpl::new(post_repository.clone()));
let post_controller = Arc::new(PostControllerImpl::new(
get_all_post_info_use_case,
get_full_post_use_case,
));
Self { post_controller }
}
}

View File

@ -0,0 +1 @@
pub mod container;

View File

@ -0,0 +1,60 @@
use actix_web::{
App, Error, HttpServer,
body::MessageBody,
dev::{ServiceFactory, ServiceRequest, ServiceResponse},
web,
};
use post::framework::web::post_web_routes::configure_post_routes;
use server::container::Container;
use sqlx::{Pool, Postgres, postgres::PgPoolOptions};
use std::{env, sync::Arc};
#[actix_web::main]
async fn main() -> std::io::Result<()> {
dotenv::dotenv().ok();
env_logger::init();
let db_pool = init_database().await;
HttpServer::new(move || create_app(db_pool.clone()))
.bind(("127.0.0.1", 8080))?
.run()
.await
}
async fn init_database() -> Arc<Pool<Postgres>> {
let database_url = env::var("DATABASE_URL")
.unwrap_or_else(|_| "postgres://postgres@localhost:5432/postgres".to_string());
let db_pool = PgPoolOptions::new()
.max_connections(5)
.connect(&database_url)
.await
.expect("Failed to create database connection pool");
sqlx::migrate!("../migrations")
.run(&db_pool)
.await
.expect("Failed to run database migrations");
Arc::new(db_pool)
}
fn create_app(
db_pool: Arc<Pool<Postgres>>,
) -> App<
impl ServiceFactory<
ServiceRequest,
Response = ServiceResponse<impl MessageBody>,
Config = (),
InitError = (),
Error = Error,
>,
> {
let container = Container::new(db_pool.clone());
App::new()
.app_data(web::Data::new(db_pool))
.app_data(web::Data::new(container.post_controller))
.configure(configure_post_routes)
}