feat(tkuser): 입사일 자동표시 + 퇴사자 목록 분리 + 퇴사일 관리

- 사용자 추가 시 hire_date 전송 (서울 오늘날짜 기본값)
- resigned_date 컬럼 마이그레이션 + CRUD 지원
- 비활성화(삭제) 시 resigned_date 자동 설정 (COALESCE)
- 활성/비활성 사용자 목록 분리, 퇴사자 접기/펼치기
- 퇴사자 재활성화 기능 (resigned_date 초기화)
- 편집 모달에 퇴사일 필드 추가

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Hyungi Ahn
2026-03-23 15:47:14 +09:00
parent 1f3eb14128
commit f09aa0875a
8 changed files with 120 additions and 27 deletions

View File

@@ -24,7 +24,7 @@ async function getUsers(req, res, next) {
*/
async function createUser(req, res, next) {
try {
const { username, password, name, full_name, department, department_id, role } = req.body;
const { username, password, name, full_name, department, department_id, role, hire_date } = req.body;
if (!username || !password) {
return res.status(400).json({ success: false, error: '사용자명과 비밀번호는 필수입니다' });
@@ -41,7 +41,8 @@ async function createUser(req, res, next) {
name: name || full_name,
department,
department_id: department_id || null,
role
role,
hire_date: hire_date || null
});
res.status(201).json({ success: true, data: user });
} catch (err) {

View File

@@ -90,8 +90,9 @@ app.use((err, req, res, next) => {
// Startup: 마이그레이션 완료 후 서버 시작
async function start() {
try {
const { runMigration } = require('./models/vacationSettingsModel');
const { runMigration, runGenericMigration } = require('./models/vacationSettingsModel');
await runMigration();
await runGenericMigration('20260323_add_resigned_date.sql');
} catch (err) {
if (!['ER_DUP_FIELDNAME', 'ER_TABLE_EXISTS_ERROR', 'ER_DUP_KEYNAME'].includes(err.code)) {
console.error('Fatal migration error:', err.message);

View File

@@ -79,18 +79,18 @@ async function findById(userId) {
async function findAll() {
const db = getPool();
const [rows] = await db.query(
'SELECT user_id, username, name, department, department_id, role, system1_access, system2_access, system3_access, is_active, last_login, created_at, hire_date FROM sso_users WHERE partner_company_id IS NULL ORDER BY user_id'
'SELECT user_id, username, name, department, department_id, role, system1_access, system2_access, system3_access, is_active, last_login, created_at, hire_date, resigned_date FROM sso_users WHERE partner_company_id IS NULL ORDER BY user_id'
);
return rows;
}
async function create({ username, password, name, department, department_id, role }) {
async function create({ username, password, name, department, department_id, role, hire_date }) {
const db = getPool();
const password_hash = await hashPassword(password);
const [result] = await db.query(
`INSERT INTO sso_users (username, password_hash, name, department, department_id, role)
VALUES (?, ?, ?, ?, ?, ?)`,
[username, password_hash, name || null, department || null, department_id || null, role || 'user']
`INSERT INTO sso_users (username, password_hash, name, department, department_id, role, hire_date)
VALUES (?, ?, ?, ?, ?, ?, ?)`,
[username, password_hash, name || null, department || null, department_id || null, role || 'user', hire_date || null]
);
return findById(result.insertId);
}
@@ -109,6 +109,7 @@ async function update(userId, data) {
if (data.system3_access !== undefined) { fields.push('system3_access = ?'); values.push(data.system3_access); }
if (data.is_active !== undefined) { fields.push('is_active = ?'); values.push(data.is_active); }
if (data.hire_date !== undefined) { fields.push('hire_date = ?'); values.push(data.hire_date || null); }
if (data.resigned_date !== undefined) { fields.push('resigned_date = ?'); values.push(data.resigned_date || null); }
if (data.password) {
fields.push('password_hash = ?');
values.push(await hashPassword(data.password));
@@ -126,7 +127,7 @@ async function update(userId, data) {
async function deleteUser(userId) {
const db = getPool();
await db.query('UPDATE sso_users SET is_active = FALSE WHERE user_id = ?', [userId]);
await db.query('UPDATE sso_users SET is_active = FALSE, resigned_date = COALESCE(resigned_date, CURDATE()) WHERE user_id = ?', [userId]);
}
module.exports = {

View File

@@ -59,4 +59,25 @@ async function runMigration() {
console.log('[tkuser] Sprint 001 migration completed');
}
module.exports = { getAll, getByKey, updateSettings, loadAsObject, runMigration };
async function runGenericMigration(filename) {
const db = getPool();
const fs = require('fs');
const path = require('path');
const sqlFile = path.join(__dirname, '..', '..', 'migrations', filename);
const sql = fs.readFileSync(sqlFile, 'utf8');
const statements = sql.split(';').map(s => s.trim()).filter(s => s.length > 0);
for (const stmt of statements) {
try {
await db.query(stmt);
} catch (err) {
if (['ER_DUP_FIELDNAME', 'ER_TABLE_EXISTS_ERROR', 'ER_DUP_KEYNAME'].includes(err.code)) {
// Already migrated — safe to ignore
} else {
throw err;
}
}
}
console.log(`[tkuser] Migration ${filename} completed`);
}
module.exports = { getAll, getByKey, updateSettings, loadAsObject, runMigration, runGenericMigration };