diff --git a/.env.example b/.env.example
index 8b33639..8e3712d 100644
--- a/.env.example
+++ b/.env.example
@@ -20,3 +20,14 @@ APP_HOST=localhost
# Service Port (Removed)
BACKEND_PORT=8000
FRONTEND_PORT=3000
+
+# DB
+MYSQL_USER=your_mysql_user
+MYSQL_PASSWORD=your_mysql_password
+MYSQL_HOST=localhost
+MYSQL_DATABASE=jobpt
+
+# JWT Secret (필수)
+# 아래 명령어로 랜덤 값 하나 생성해서 사용하세요.
+# python -c "import secrets; print(secrets.token_hex(32))"
+SECRET_KEY=change_me!
\ No newline at end of file
diff --git a/README.md b/README.md
index cfeec81..0a54c3f 100644
--- a/README.md
+++ b/README.md
@@ -97,6 +97,18 @@ docker compose up [-d] [--build]
### 로컬 환경에서 직접 실행
+DB 실행(단독 docker container)
+```bash
+sh db/run.sh
+```
+
+
+DB 마이그레이션 (테이블 생성 및 수정)
+```bash
+cd backend
+alembic upgrade head
+```
+
API 실행
```bash
diff --git a/assets/DB_Schema.drawio b/assets/DB_Schema.drawio
new file mode 100644
index 0000000..56b8829
--- /dev/null
+++ b/assets/DB_Schema.drawio
@@ -0,0 +1,58 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/backend/alembic.ini b/backend/alembic.ini
new file mode 100644
index 0000000..1b03b05
--- /dev/null
+++ b/backend/alembic.ini
@@ -0,0 +1,147 @@
+# A generic, single database configuration.
+
+[alembic]
+# path to migration scripts.
+# this is typically a path given in POSIX (e.g. forward slashes)
+# format, relative to the token %(here)s which refers to the location of this
+# ini file
+script_location = %(here)s/alembic
+
+# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
+# Uncomment the line below if you want the files to be prepended with date and time
+# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
+# for all available tokens
+# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
+
+# sys.path path, will be prepended to sys.path if present.
+# defaults to the current working directory. for multiple paths, the path separator
+# is defined by "path_separator" below.
+prepend_sys_path = .
+
+
+# timezone to use when rendering the date within the migration file
+# as well as the filename.
+# If specified, requires the tzdata library which can be installed by adding
+# `alembic[tz]` to the pip requirements.
+# string value is passed to ZoneInfo()
+# leave blank for localtime
+# timezone =
+
+# max length of characters to apply to the "slug" field
+# truncate_slug_length = 40
+
+# set to 'true' to run the environment during
+# the 'revision' command, regardless of autogenerate
+# revision_environment = false
+
+# set to 'true' to allow .pyc and .pyo files without
+# a source .py file to be detected as revisions in the
+# versions/ directory
+# sourceless = false
+
+# version location specification; This defaults
+# to /versions. When using multiple version
+# directories, initial revisions must be specified with --version-path.
+# The path separator used here should be the separator specified by "path_separator"
+# below.
+# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
+
+# path_separator; This indicates what character is used to split lists of file
+# paths, including version_locations and prepend_sys_path within configparser
+# files such as alembic.ini.
+# The default rendered in new alembic.ini files is "os", which uses os.pathsep
+# to provide os-dependent path splitting.
+#
+# Note that in order to support legacy alembic.ini files, this default does NOT
+# take place if path_separator is not present in alembic.ini. If this
+# option is omitted entirely, fallback logic is as follows:
+#
+# 1. Parsing of the version_locations option falls back to using the legacy
+# "version_path_separator" key, which if absent then falls back to the legacy
+# behavior of splitting on spaces and/or commas.
+# 2. Parsing of the prepend_sys_path option falls back to the legacy
+# behavior of splitting on spaces, commas, or colons.
+#
+# Valid values for path_separator are:
+#
+# path_separator = :
+# path_separator = ;
+# path_separator = space
+# path_separator = newline
+#
+# Use os.pathsep. Default configuration used for new projects.
+path_separator = os
+
+# set to 'true' to search source files recursively
+# in each "version_locations" directory
+# new in Alembic version 1.10
+# recursive_version_locations = false
+
+# the output encoding used when revision files
+# are written from script.py.mako
+# output_encoding = utf-8
+
+# database URL. This is consumed by the user-maintained env.py script only.
+# other means of configuring database URLs may be customized within the env.py
+# file.
+sqlalchemy.url = driver://user:pass@localhost/dbname
+
+
+[post_write_hooks]
+# post_write_hooks defines scripts or Python functions that are run
+# on newly generated revision scripts. See the documentation for further
+# detail and examples
+
+# format using "black" - use the console_scripts runner, against the "black" entrypoint
+# hooks = black
+# black.type = console_scripts
+# black.entrypoint = black
+# black.options = -l 79 REVISION_SCRIPT_FILENAME
+
+# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
+# hooks = ruff
+# ruff.type = module
+# ruff.module = ruff
+# ruff.options = check --fix REVISION_SCRIPT_FILENAME
+
+# Alternatively, use the exec runner to execute a binary found on your PATH
+# hooks = ruff
+# ruff.type = exec
+# ruff.executable = ruff
+# ruff.options = check --fix REVISION_SCRIPT_FILENAME
+
+# Logging configuration. This is also consumed by the user-maintained
+# env.py script only.
+[loggers]
+keys = root,sqlalchemy,alembic
+
+[handlers]
+keys = console
+
+[formatters]
+keys = generic
+
+[logger_root]
+level = WARNING
+handlers = console
+qualname =
+
+[logger_sqlalchemy]
+level = WARNING
+handlers =
+qualname = sqlalchemy.engine
+
+[logger_alembic]
+level = INFO
+handlers =
+qualname = alembic
+
+[handler_console]
+class = StreamHandler
+args = (sys.stderr,)
+level = NOTSET
+formatter = generic
+
+[formatter_generic]
+format = %(levelname)-5.5s [%(name)s] %(message)s
+datefmt = %H:%M:%S
diff --git a/backend/alembic/README b/backend/alembic/README
new file mode 100644
index 0000000..98e4f9c
--- /dev/null
+++ b/backend/alembic/README
@@ -0,0 +1 @@
+Generic single-database configuration.
\ No newline at end of file
diff --git a/backend/alembic/env.py b/backend/alembic/env.py
new file mode 100644
index 0000000..1bc5f78
--- /dev/null
+++ b/backend/alembic/env.py
@@ -0,0 +1,87 @@
+import os
+import sys
+from logging.config import fileConfig
+
+from sqlalchemy import engine_from_config
+from sqlalchemy import pool
+
+from alembic import context
+
+# Add the backend directory to sys.path so we can import from db
+sys.path.append(os.getcwd())
+
+from db.database import Base, SQLALCHEMY_DATABASE_URL
+from db.models import User, Resume # Import all models here
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+if config.config_file_name is not None:
+ fileConfig(config.config_file_name)
+
+# Set the sqlalchemy.url in the config object
+config.set_main_option("sqlalchemy.url", SQLALCHEMY_DATABASE_URL)
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+target_metadata = Base.metadata
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_offline() -> None:
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(
+ url=url,
+ target_metadata=target_metadata,
+ literal_binds=True,
+ dialect_opts={"paramstyle": "named"},
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def run_migrations_online() -> None:
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ connectable = engine_from_config(
+ config.get_section(config.config_ini_section, {}),
+ prefix="sqlalchemy.",
+ poolclass=pool.NullPool,
+ )
+
+ with connectable.connect() as connection:
+ context.configure(
+ connection=connection, target_metadata=target_metadata
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/backend/alembic/script.py.mako b/backend/alembic/script.py.mako
new file mode 100644
index 0000000..1101630
--- /dev/null
+++ b/backend/alembic/script.py.mako
@@ -0,0 +1,28 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision: str = ${repr(up_revision)}
+down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
+branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
+depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
+
+
+def upgrade() -> None:
+ """Upgrade schema."""
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade() -> None:
+ """Downgrade schema."""
+ ${downgrades if downgrades else "pass"}
diff --git a/backend/alembic/versions/e9dbfc41c641_initial_migraion.py b/backend/alembic/versions/e9dbfc41c641_initial_migraion.py
new file mode 100644
index 0000000..1eec9cd
--- /dev/null
+++ b/backend/alembic/versions/e9dbfc41c641_initial_migraion.py
@@ -0,0 +1,32 @@
+"""Initial migraion
+
+Revision ID: e9dbfc41c641
+Revises:
+Create Date: 2025-11-27 21:08:54.018013
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision: str = 'e9dbfc41c641'
+down_revision: Union[str, Sequence[str], None] = None
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ """Upgrade schema."""
+ # ### commands auto generated by Alembic - please adjust! ###
+ pass
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ """Downgrade schema."""
+ # ### commands auto generated by Alembic - please adjust! ###
+ pass
+ # ### end Alembic commands ###
diff --git a/backend/db/__init__.py b/backend/db/__init__.py
new file mode 100644
index 0000000..05ef07d
--- /dev/null
+++ b/backend/db/__init__.py
@@ -0,0 +1,10 @@
+from .database import engine, Base, get_db
+from .models import User, Resume
+
+__all__ = [
+ "engine",
+ "Base",
+ "get_db",
+ "User",
+ "Resume",
+]
diff --git a/backend/db/database.py b/backend/db/database.py
new file mode 100644
index 0000000..cc2078b
--- /dev/null
+++ b/backend/db/database.py
@@ -0,0 +1,23 @@
+from sqlalchemy import create_engine
+from sqlalchemy.ext.declarative import declarative_base
+from sqlalchemy.orm import sessionmaker
+import os
+
+MYSQL_USER = os.getenv("MYSQL_USER", "root")
+MYSQL_PASSWORD = os.getenv("MYSQL_PASSWORD", "root")
+MYSQL_HOST = os.getenv("MYSQL_HOST", "localhost")
+MYSQL_DATABASE = os.getenv("MYSQL_DATABASE", "jobpt")
+
+SQLALCHEMY_DATABASE_URL = f"mysql+pymysql://{MYSQL_USER}:{MYSQL_PASSWORD}@{MYSQL_HOST}/{MYSQL_DATABASE}"
+
+engine = create_engine(SQLALCHEMY_DATABASE_URL)
+SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
+
+Base = declarative_base()
+
+def get_db():
+ db = SessionLocal()
+ try:
+ yield db
+ finally:
+ db.close()
diff --git a/backend/db/models.py b/backend/db/models.py
new file mode 100644
index 0000000..fd3b2fc
--- /dev/null
+++ b/backend/db/models.py
@@ -0,0 +1,34 @@
+from sqlalchemy import Integer, String, DateTime, ForeignKey
+from sqlalchemy.orm import relationship, mapped_column, Mapped
+from datetime import datetime
+from zoneinfo import ZoneInfo
+
+from .database import Base
+
+class User(Base):
+ __tablename__ = "users"
+
+ id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
+ user_id: Mapped[str] = mapped_column(String(255), unique=True, index=True, nullable=False)
+ password: Mapped[str] = mapped_column(String(255), nullable=False)
+ created_at: Mapped[datetime] = mapped_column(
+ DateTime(timezone=True),
+ default=lambda: datetime.now(ZoneInfo("Asia/Seoul")),
+ nullable=False
+ )
+
+ resumes: Mapped[list["Resume"]] = relationship("Resume", back_populates="user")
+
+class Resume(Base):
+ __tablename__ = "resumes"
+
+ id: Mapped[int] = mapped_column(Integer, primary_key=True, index=True)
+ user_id: Mapped[str] = mapped_column(String(255), ForeignKey("users.user_id"))
+ pdf_url: Mapped[str | None] = mapped_column(String(500), nullable=True)
+ created_at: Mapped[datetime] = mapped_column(
+ DateTime(timezone=True),
+ default=lambda: datetime.now(ZoneInfo("Asia/Seoul")),
+ nullable=False
+ )
+
+ user: Mapped["User"] = relationship("User", back_populates="resumes")
diff --git a/backend/db/run.sh b/backend/db/run.sh
new file mode 100644
index 0000000..af4eea8
--- /dev/null
+++ b/backend/db/run.sh
@@ -0,0 +1,2 @@
+#!/bin/bash
+docker run --name jobpt-mysql -e LC_ALL=C.UTF-8 -e MYSQL_ROOT_PASSWORD=root -d -p 3306:3306 mysql:latest --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
\ No newline at end of file
diff --git a/backend/main.py b/backend/main.py
index cca0747..7573bd9 100644
--- a/backend/main.py
+++ b/backend/main.py
@@ -27,6 +27,13 @@
from ATS_agent.ats_analyzer_improved import ATSAnalyzer
+from db.database import engine, Base
+from db import models
+from routers import auth
+
+# Create database tables
+models.Base.metadata.create_all(bind=engine)
+
# 캐시 저장소
resume_cache = {}
@@ -48,13 +55,24 @@
docs_url=None if is_prod else "/api/docs",
redoc_url=None if is_prod else "/api/redoc",
openapi_url=None if is_prod else "/api/openapi.json",
- servers=[{"url": "/api"}], # Swagger에서 /api prefix 붙여 호출
)
+# # Middleware to strip /api prefix for local development
+# @app.middleware("http")
+# async def strip_api_prefix(request: Request, call_next):
+# if request.url.path.startswith("/api"):
+# request.scope["path"] = request.url.path[4:]
+# response = await call_next(request)
+# return response
+
# /api prefix를 모든 라우트에 추가
from fastapi import APIRouter
api_router = APIRouter(prefix="/api")
+# API router를 앱에 등록
+app.include_router(api_router)
+app.include_router(auth.router)
+
# 로거 설정
logger = logging.getLogger("jobpt")
if not logger.handlers:
@@ -302,9 +320,6 @@ async def evaluate(request: EvaluateRequest):
raise HTTPException(status_code=500, detail=str(e))
-# API router를 앱에 등록
-app.include_router(api_router)
-
# 개발용 실행 명령
if __name__ == "__main__":
diff --git a/backend/requirements.txt b/backend/requirements.txt
index 4fa87a8..e5ada55 100644
--- a/backend/requirements.txt
+++ b/backend/requirements.txt
@@ -40,3 +40,11 @@ python-dotenv>=1.0.1
openai>=1.0.0
requests>=2.28.0
langfuse>=2.60.4
+
+# database
+sqlalchemy==2.0.44
+pymysql==1.1.2
+bcrypt==4.0.1
+passlib==1.7.4
+python-jose[cryptography]==3.5.0
+alembic==1.17.2
diff --git a/backend/routers/auth.py b/backend/routers/auth.py
new file mode 100644
index 0000000..023eed4
--- /dev/null
+++ b/backend/routers/auth.py
@@ -0,0 +1,114 @@
+from datetime import datetime, timedelta
+from typing import Optional
+from fastapi import APIRouter, Depends, HTTPException, status
+from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
+from sqlalchemy.orm import Session
+from passlib.context import CryptContext
+from jose import jwt
+
+from db.database import get_db
+from db.models import User
+from pydantic import BaseModel
+import os
+
+# Configuration
+SECRET_KEY = os.getenv("SECRET_KEY")
+ALGORITHM = "HS256"
+ACCESS_TOKEN_EXPIRE_MINUTES = 60 * 24 # token이 하루동안 유지됨
+
+# Password hashing
+bcrypt_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
+oauth2_scheme = OAuth2PasswordBearer(tokenUrl="auth/login")
+
+def get_password_hash(password: str) -> str:
+ return bcrypt_context.hash(password)
+
+def verify_password(plain_password, hashed_password):
+ return bcrypt_context.verify(plain_password, hashed_password)
+
+def create_access_token(data: dict, expires_delta: Optional[timedelta] = None):
+ to_encode = data.copy()
+ if expires_delta:
+ expire = datetime.utcnow() + expires_delta
+ else:
+ expire = datetime.utcnow() + timedelta(minutes=15)
+ to_encode.update({"exp": expire})
+ encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
+ return encoded_jwt
+
+# Router
+router = APIRouter(
+ prefix="/auth",
+ tags=["auth"],
+)
+
+# Request/Response models
+class SignupRequest(BaseModel):
+ user_id: str
+ password: str
+
+class SignupResponse(BaseModel):
+ id: int
+ user_id: str
+ message: str
+
+ class Config:
+ from_attributes = True
+
+class Token(BaseModel):
+ access_token: str
+ token_type: str
+
+# Signup endpoint
+@router.post("/sign-up", response_model=SignupResponse, status_code=status.HTTP_201_CREATED)
+def signup(request: SignupRequest, db: Session = Depends(get_db)):
+ """
+ 회원가입 API
+ - user_id: 로그인에 사용할 ID
+ - password: 로그인에 사용할 비밀번호 (bcrypt로 해싱됨)
+ """
+ # Check if user already exists
+ existing_user = db.query(User).filter(User.user_id == request.user_id).first()
+ if existing_user:
+ raise HTTPException(
+ status_code=status.HTTP_400_BAD_REQUEST,
+ detail="User ID already exists"
+ )
+
+ # Create new user
+ hashed_password = get_password_hash(request.password)
+ new_user = User(
+ user_id=request.user_id,
+ password=hashed_password
+ )
+
+ db.add(new_user)
+ db.commit()
+ db.refresh(new_user)
+
+ return SignupResponse(
+ id=new_user.id,
+ user_id=new_user.user_id,
+ message="User created successfully"
+ )
+
+@router.post("/login", response_model=Token)
+def login(form_data: OAuth2PasswordRequestForm = Depends(), db: Session = Depends(get_db)):
+ """
+ 로그인 API
+ - username: user_id
+ - password: password
+ """
+ user = db.query(User).filter(User.user_id == form_data.username).first()
+ if not user or not verify_password(form_data.password, user.password):
+ raise HTTPException(
+ status_code=status.HTTP_401_UNAUTHORIZED,
+ detail="Incorrect username or password",
+ headers={"WWW-Authenticate": "Bearer"},
+ )
+
+ access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
+ access_token = create_access_token(
+ data={"sub": user.user_id}, expires_delta=access_token_expires
+ )
+ return Token(access_token=access_token, token_type="bearer")