Compare commits

...

2 Commits

Author SHA1 Message Date
003db3cc75 BLOG-59 feat: enhance deployment workflow and backend server configuration
All checks were successful
PR Title Check / pr-title-check (pull_request) Successful in 13s
Frontend CI / build (push) Successful in 1m37s
- Updated deployment.yaml to specify separate build and push steps for frontend and backend.
- Added Dockerfile for backend service to define build process.
- Modified main.rs to bind the server to all network interfaces (0.0.0.0) instead of localhost.
2025-07-23 00:57:12 +08:00
d74107a0f9 BLOG-56 Align clean architecture (#57)
All checks were successful
Frontend CI / build (push) Successful in 1m53s
### Description

- As the description in the issue

  > - ~~Use case should be stateless~~
  >   > The value unwrapped from `web::Data` must be `Arc` type
  > - Initializing shouldn't be done in Container
  > - Rename the functions as xxx_handler in routes

### Package Changes

_No response_

### Screenshots

_No response_

### Reference

Resolves #56

### Checklist

- [x] A milestone is set
- [x] The related issuse has been linked to this branch

Co-authored-by: Yu Squire[ Yu, Tsung-Ying ] <squire.yu@linecorp.com>
Reviewed-on: #57
2025-07-22 23:35:54 +08:00
6 changed files with 49 additions and 24 deletions

View File

@ -24,7 +24,7 @@ jobs:
username: ${{ vars.REGISTRY_USERNAME }} username: ${{ vars.REGISTRY_USERNAME }}
password: ${{ secrets.REGISTRY_PASSWORD }} password: ${{ secrets.REGISTRY_PASSWORD }}
- name: Build and push - name: Build and push (Frontend)
uses: docker/build-push-action@v6 uses: docker/build-push-action@v6
with: with:
push: true push: true
@ -33,3 +33,13 @@ jobs:
tags: | tags: |
${{ vars.REGISTRY }}/${{ vars.IMAGE_REPO_FRONTEND }}:latest ${{ vars.REGISTRY }}/${{ vars.IMAGE_REPO_FRONTEND }}:latest
${{ vars.REGISTRY }}/${{ vars.IMAGE_REPO_FRONTEND }}:${{ gitea.event.release.tag_name }} ${{ vars.REGISTRY }}/${{ vars.IMAGE_REPO_FRONTEND }}:${{ gitea.event.release.tag_name }}
- name: Build and push (Backend)
uses: docker/build-push-action@v6
with:
push: true
provenance: false
context: ./backend
tags: |
${{ vars.REGISTRY }}/${{ vars.IMAGE_REPO_BACKEND }}:latest
${{ vars.REGISTRY }}/${{ vars.IMAGE_REPO_BACKEND }}:${{ gitea.event.release.tag_name }}

15
backend/Dockerfile Normal file
View File

@ -0,0 +1,15 @@
FROM rust:1-slim AS base
RUN apt update -qq && apt install -y -qq --no-install-recommends musl-tools
RUN rustup target add x86_64-unknown-linux-musl
FROM base AS builder
WORKDIR /app
COPY . .
RUN cargo build --release --target x86_64-unknown-linux-musl
FROM alpine:latest AS runner
WORKDIR /app
COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/server .
EXPOSE 8080
ENV DATABASE_URL=postgres://postgres@localhost:5432/postgres
CMD ["./server"]

View File

@ -1,4 +1,4 @@
use std::{collections::HashMap, sync::Arc}; use std::collections::HashMap;
use async_trait::async_trait; use async_trait::async_trait;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
@ -16,11 +16,11 @@ use super::{
}; };
pub struct PostDbServiceImpl { pub struct PostDbServiceImpl {
db_pool: Arc<Pool<Postgres>>, db_pool: Pool<Postgres>,
} }
impl PostDbServiceImpl { impl PostDbServiceImpl {
pub fn new(db_pool: Arc<Pool<Postgres>>) -> Self { pub fn new(db_pool: Pool<Postgres>) -> Self {
Self { db_pool } Self { db_pool }
} }
} }
@ -58,7 +58,7 @@ impl PostDbService for PostDbServiceImpl {
let records = query_builder let records = query_builder
.build_query_as::<PostInfoWithLabelRecord>() .build_query_as::<PostInfoWithLabelRecord>()
.fetch_all(&*self.db_pool) .fetch_all(&self.db_pool)
.await .await
.map_err(|err| PostError::DatabaseError(err.to_string()))?; .map_err(|err| PostError::DatabaseError(err.to_string()))?;
@ -121,7 +121,7 @@ impl PostDbService for PostDbServiceImpl {
let records = query_builder let records = query_builder
.build_query_as::<PostWithLabelRecord>() .build_query_as::<PostWithLabelRecord>()
.fetch_all(&*self.db_pool) .fetch_all(&self.db_pool)
.await .await
.map_err(|err| PostError::DatabaseError(err.to_string()))?; .map_err(|err| PostError::DatabaseError(err.to_string()))?;

View File

@ -1,5 +1,3 @@
use std::sync::Arc;
use actix_web::{HttpResponse, Responder, web}; use actix_web::{HttpResponse, Responder, web};
use crate::{ use crate::{
@ -8,12 +6,15 @@ use crate::{
}; };
pub fn configure_post_routes(cfg: &mut web::ServiceConfig) { pub fn configure_post_routes(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("/post_info").route(web::get().to(get_all_post_info))); cfg.service(
cfg.service(web::resource("/post/{id}").route(web::get().to(get_full_post))); web::scope("/post")
.route("/all", web::get().to(get_all_post_info_handler))
.route("/{id}", web::get().to(get_full_post_handler)),
);
} }
async fn get_all_post_info( async fn get_all_post_info_handler(
post_controller: web::Data<Arc<dyn PostController>>, post_controller: web::Data<dyn PostController>,
query: web::Query<PostQueryDto>, query: web::Query<PostQueryDto>,
) -> impl Responder { ) -> impl Responder {
let is_published_only = query.is_published_only.unwrap_or_else(|| true); let is_published_only = query.is_published_only.unwrap_or_else(|| true);
@ -28,8 +29,8 @@ async fn get_all_post_info(
} }
} }
async fn get_full_post( async fn get_full_post_handler(
post_controller: web::Data<Arc<dyn PostController>>, post_controller: web::Data<dyn PostController>,
path: web::Path<i32>, path: web::Path<i32>,
) -> impl Responder { ) -> impl Responder {
let id = path.into_inner(); let id = path.into_inner();

View File

@ -18,8 +18,8 @@ pub struct Container {
} }
impl Container { impl Container {
pub fn new(db_pool: Arc<Pool<Postgres>>) -> Self { pub fn new(db_pool: Pool<Postgres>) -> Self {
let post_db_service = Arc::new(PostDbServiceImpl::new(db_pool)); let post_db_service = Arc::new(PostDbServiceImpl::new(db_pool.clone()));
let post_repository = Arc::new(PostRepositoryImpl::new(post_db_service.clone())); let post_repository = Arc::new(PostRepositoryImpl::new(post_db_service.clone()));

View File

@ -7,7 +7,7 @@ use actix_web::{
use post::framework::web::post_web_routes::configure_post_routes; use post::framework::web::post_web_routes::configure_post_routes;
use server::container::Container; use server::container::Container;
use sqlx::{Pool, Postgres, postgres::PgPoolOptions}; use sqlx::{Pool, Postgres, postgres::PgPoolOptions};
use std::{env, sync::Arc}; use std::env;
#[actix_web::main] #[actix_web::main]
async fn main() -> std::io::Result<()> { async fn main() -> std::io::Result<()> {
@ -17,12 +17,12 @@ async fn main() -> std::io::Result<()> {
let db_pool = init_database().await; let db_pool = init_database().await;
HttpServer::new(move || create_app(db_pool.clone())) HttpServer::new(move || create_app(db_pool.clone()))
.bind(("127.0.0.1", 8080))? .bind(("0.0.0.0", 8080))?
.run() .run()
.await .await
} }
async fn init_database() -> Arc<Pool<Postgres>> { async fn init_database() -> Pool<Postgres> {
let database_url = env::var("DATABASE_URL") let database_url = env::var("DATABASE_URL")
.unwrap_or_else(|_| "postgres://postgres@localhost:5432/postgres".to_string()); .unwrap_or_else(|_| "postgres://postgres@localhost:5432/postgres".to_string());
@ -37,11 +37,11 @@ async fn init_database() -> Arc<Pool<Postgres>> {
.await .await
.expect("Failed to run database migrations"); .expect("Failed to run database migrations");
Arc::new(db_pool) db_pool
} }
fn create_app( fn create_app(
db_pool: Arc<Pool<Postgres>>, db_pool: Pool<Postgres>,
) -> App< ) -> App<
impl ServiceFactory< impl ServiceFactory<
ServiceRequest, ServiceRequest,
@ -51,10 +51,9 @@ fn create_app(
Error = Error, Error = Error,
>, >,
> { > {
let container = Container::new(db_pool.clone()); let container = Container::new(db_pool);
App::new() App::new()
.app_data(web::Data::new(db_pool)) .app_data(web::Data::from(container.post_controller))
.app_data(web::Data::new(container.post_controller))
.configure(configure_post_routes) .configure(configure_post_routes)
} }