🔄 전반적인 시스템 리팩토링 완료
Some checks failed
SonarQube Analysis / SonarQube Scan (push) Has been cancelled

 백엔드 구조 개선:
- DatabaseService: 공통 DB 쿼리 로직 통합
- FileUploadService: 파일 업로드 로직 모듈화 및 트랜잭션 관리 개선
- 서비스 레이어 패턴 도입으로 코드 재사용성 향상

 프론트엔드 컴포넌트 개선:
- LoadingSpinner, ErrorMessage, ConfirmDialog 공통 컴포넌트 생성
- 재사용 가능한 컴포넌트 라이브러리 구축
- deprecated/backup 파일들 완전 제거

 성능 최적화:
- optimize_database.py: 핵심 DB 인덱스 자동 생성
- 쿼리 최적화 및 통계 업데이트 자동화
- VACUUM ANALYZE 자동 실행

 코드 정리:
- 개별 SQL 마이그레이션 파일들을 legacy/ 폴더로 정리
- 중복된 마이그레이션 스크립트 정리
- 깔끔하고 체계적인 프로젝트 구조 완성

 자동 마이그레이션 시스템 강화:
- complete_migrate.py: SQLAlchemy 기반 완전한 마이그레이션
- analyze_and_fix_schema.py: 백엔드 코드 분석 기반 스키마 수정
- fix_missing_tables.py: 누락된 테이블/컬럼 자동 생성
- start.sh: 배포 시 자동 실행 순서 최적화
This commit is contained in:
Hyungi Ahn
2025-10-20 08:41:06 +09:00
parent 0c99697a6f
commit 3398f71b80
61 changed files with 3370 additions and 4512 deletions

View File

@@ -20,11 +20,14 @@ RUN pip install --no-cache-dir -r requirements.txt
# 애플리케이션 코드 복사
COPY . .
# 시작 스크립트 실행 권한 부여
RUN chmod +x /app/start.sh
# 포트 8000 노출
EXPOSE 8000
# 환경변수 설정
ENV PYTHONPATH=/app
# 서버 실행
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
# 자동 마이그레이션 후 서버 실행
CMD ["bash", "/app/start.sh"]

View File

@@ -34,11 +34,15 @@ class File(Base):
filename = Column(String(255), nullable=False)
original_filename = Column(String(255), nullable=False)
file_path = Column(String(500), nullable=False)
job_no = Column(String(50)) # 작업 번호
revision = Column(String(20), default='Rev.0')
bom_name = Column(String(200)) # BOM 이름
description = Column(Text) # 파일 설명
upload_date = Column(DateTime, default=datetime.utcnow)
uploaded_by = Column(String(100))
file_type = Column(String(10))
file_size = Column(Integer)
parsed_count = Column(Integer) # 파싱된 자재 수
is_active = Column(Boolean, default=True)
# 관계 설정
@@ -51,22 +55,40 @@ class Material(Base):
id = Column(Integer, primary_key=True, index=True)
file_id = Column(Integer, ForeignKey("files.id"))
line_number = Column(Integer)
row_number = Column(Integer) # 업로드 시 행 번호
original_description = Column(Text, nullable=False)
classified_category = Column(String(50))
classified_subcategory = Column(String(100))
material_grade = Column(String(50))
full_material_grade = Column(Text) # 전체 재질명 (ASTM A312 TP304 등)
schedule = Column(String(20))
size_spec = Column(String(50))
main_nom = Column(String(50)) # 주 사이즈 (4", 150A 등)
red_nom = Column(String(50)) # 축소 사이즈 (Reducing 피팅/플랜지용)
quantity = Column(Numeric(10, 3), nullable=False)
unit = Column(String(10), nullable=False)
# length = Column(Numeric(10, 3)) # 임시로 주석 처리
length = Column(Numeric(10, 3)) # 길이 정보
drawing_name = Column(String(100))
area_code = Column(String(20))
line_no = Column(String(50))
classification_confidence = Column(Numeric(3, 2))
classification_details = Column(JSON) # 분류 상세 정보 (JSON)
is_verified = Column(Boolean, default=False)
verified_by = Column(String(50))
verified_at = Column(DateTime)
# 구매 관련 필드
purchase_confirmed = Column(Boolean, default=False)
confirmed_quantity = Column(Numeric(10, 3))
purchase_status = Column(String(20))
purchase_confirmed_by = Column(String(100))
purchase_confirmed_at = Column(DateTime)
# 리비전 관리 필드
revision_status = Column(String(20)) # 'new', 'changed', 'inventory', 'deleted_not_purchased'
material_hash = Column(String(64)) # 자재 비교용 해시
normalized_description = Column(Text) # 정규화된 설명
drawing_reference = Column(String(100))
notes = Column(Text)
created_at = Column(DateTime, default=datetime.utcnow)
@@ -450,3 +472,349 @@ class MaterialTubingMapping(Base):
# 관계 설정
material = relationship("Material", backref="tubing_mappings")
tubing_product = relationship("TubingProduct", back_populates="material_mappings")
class SupportDetails(Base):
__tablename__ = "support_details"
id = Column(Integer, primary_key=True, index=True)
material_id = Column(Integer, ForeignKey("materials.id"), nullable=False)
file_id = Column(Integer, ForeignKey("files.id"), nullable=False)
# 서포트 정보
support_type = Column(String(50))
support_subtype = Column(String(100))
load_rating = Column(String(50))
load_capacity = Column(String(50))
material_standard = Column(String(50))
material_grade = Column(String(50))
pipe_size = Column(String(50))
# 치수 정보
length_mm = Column(Numeric(10, 2))
width_mm = Column(Numeric(10, 2))
height_mm = Column(Numeric(10, 2))
# 분류 신뢰도
classification_confidence = Column(Numeric(3, 2))
# 시간 정보
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
# 관계 설정
material = relationship("Material")
file = relationship("File")
class PurchaseRequestItems(Base):
__tablename__ = "purchase_request_items"
id = Column(Integer, primary_key=True, index=True)
request_id = Column(String(50), nullable=False) # 구매신청 ID
material_id = Column(Integer, ForeignKey("materials.id"), nullable=False)
# 수량 정보
quantity = Column(Integer, nullable=False)
unit = Column(String(10), nullable=False)
user_requirement = Column(Text)
# 시간 정보
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
# 관계 설정
material = relationship("Material")
class FittingDetails(Base):
__tablename__ = "fitting_details"
id = Column(Integer, primary_key=True, index=True)
material_id = Column(Integer, ForeignKey("materials.id"), nullable=False)
file_id = Column(Integer, ForeignKey("files.id"), nullable=False)
# 피팅 정보
fitting_type = Column(String(50))
fitting_subtype = Column(String(100))
connection_type = Column(String(50))
material_standard = Column(String(50))
material_grade = Column(String(50))
nominal_size = Column(String(50))
wall_thickness = Column(String(50))
# 치수 정보
length_mm = Column(Numeric(10, 2))
width_mm = Column(Numeric(10, 2))
height_mm = Column(Numeric(10, 2))
# 분류 신뢰도
classification_confidence = Column(Numeric(3, 2))
# 시간 정보
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
# 관계 설정
material = relationship("Material")
file = relationship("File")
class FlangeDetails(Base):
__tablename__ = "flange_details"
id = Column(Integer, primary_key=True, index=True)
material_id = Column(Integer, ForeignKey("materials.id"), nullable=False)
file_id = Column(Integer, ForeignKey("files.id"), nullable=False)
# 플랜지 정보
flange_type = Column(String(50))
flange_subtype = Column(String(100))
pressure_rating = Column(String(50))
material_standard = Column(String(50))
material_grade = Column(String(50))
nominal_size = Column(String(50))
# 치수 정보
outer_diameter = Column(Numeric(10, 2))
thickness = Column(Numeric(10, 2))
# 분류 신뢰도
classification_confidence = Column(Numeric(3, 2))
# 시간 정보
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
# 관계 설정
material = relationship("Material")
file = relationship("File")
class ValveDetails(Base):
__tablename__ = "valve_details"
id = Column(Integer, primary_key=True, index=True)
material_id = Column(Integer, ForeignKey("materials.id"), nullable=False)
file_id = Column(Integer, ForeignKey("files.id"), nullable=False)
# 밸브 정보
valve_type = Column(String(50))
valve_subtype = Column(String(100))
connection_type = Column(String(50))
actuation_type = Column(String(50))
material_standard = Column(String(50))
material_grade = Column(String(50))
nominal_size = Column(String(50))
pressure_rating = Column(String(50))
# 분류 신뢰도
classification_confidence = Column(Numeric(3, 2))
# 시간 정보
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
# 관계 설정
material = relationship("Material")
file = relationship("File")
class GasketDetails(Base):
__tablename__ = "gasket_details"
id = Column(Integer, primary_key=True, index=True)
material_id = Column(Integer, ForeignKey("materials.id"), nullable=False)
file_id = Column(Integer, ForeignKey("files.id"), nullable=False)
# 가스켓 정보
gasket_type = Column(String(50))
material_standard = Column(String(50))
material_grade = Column(String(50))
nominal_size = Column(String(50))
pressure_rating = Column(String(50))
filler_material = Column(String(50))
thickness = Column(Numeric(10, 2))
# 분류 신뢰도
classification_confidence = Column(Numeric(3, 2))
# 시간 정보
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
# 관계 설정
material = relationship("Material")
file = relationship("File")
class BoltDetails(Base):
__tablename__ = "bolt_details"
id = Column(Integer, primary_key=True, index=True)
material_id = Column(Integer, ForeignKey("materials.id"), nullable=False)
file_id = Column(Integer, ForeignKey("files.id"), nullable=False)
# 볼트 정보
bolt_type = Column(String(50))
material_standard = Column(String(50))
material_grade = Column(String(50))
thread_size = Column(String(50))
length = Column(Numeric(10, 2))
pressure_rating = Column(String(50))
# 분류 신뢰도
classification_confidence = Column(Numeric(3, 2))
# 시간 정보
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
# 관계 설정
material = relationship("Material")
file = relationship("File")
class InstrumentDetails(Base):
__tablename__ = "instrument_details"
id = Column(Integer, primary_key=True, index=True)
material_id = Column(Integer, ForeignKey("materials.id"), nullable=False)
file_id = Column(Integer, ForeignKey("files.id"), nullable=False)
# 계기 정보
instrument_type = Column(String(50))
instrument_subtype = Column(String(100))
connection_type = Column(String(50))
material_standard = Column(String(50))
material_grade = Column(String(50))
nominal_size = Column(String(50))
# 분류 신뢰도
classification_confidence = Column(Numeric(3, 2))
# 시간 정보
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
# 관계 설정
material = relationship("Material")
file = relationship("File")
class PurchaseRequests(Base):
__tablename__ = "purchase_requests"
request_id = Column(String(50), primary_key=True, index=True)
request_no = Column(String(100), nullable=False)
file_id = Column(Integer, ForeignKey("files.id"), nullable=False)
job_no = Column(String(50), nullable=False)
category = Column(String(50))
material_count = Column(Integer)
excel_file_path = Column(String(500))
requested_by = Column(Integer, ForeignKey("users.user_id"))
# 시간 정보
created_at = Column(DateTime, default=datetime.utcnow)
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
# 관계 설정
file = relationship("File")
requested_by_user = relationship("User", foreign_keys=[requested_by])
class Jobs(Base):
__tablename__ = "jobs"
id = Column(Integer, primary_key=True, index=True)
job_no = Column(String(50), unique=True, nullable=False)
job_name = Column(String(200))
status = Column(String(20), default='active')
created_at = Column(DateTime, default=datetime.utcnow)
class PipeEndPreparations(Base):
__tablename__ = "pipe_end_preparations"
id = Column(Integer, primary_key=True, index=True)
material_id = Column(Integer, ForeignKey("materials.id"), nullable=False)
file_id = Column(Integer, ForeignKey("files.id"), nullable=False)
end_prep_type = Column(String(50))
end_prep_standard = Column(String(50))
classification_confidence = Column(Numeric(3, 2))
created_at = Column(DateTime, default=datetime.utcnow)
# 관계 설정
material = relationship("Material")
file = relationship("File")
class MaterialPurchaseTracking(Base):
__tablename__ = "material_purchase_tracking"
id = Column(Integer, primary_key=True, index=True)
material_id = Column(Integer, ForeignKey("materials.id"), nullable=False)
file_id = Column(Integer, ForeignKey("files.id"), nullable=False)
purchase_status = Column(String(20))
requested_quantity = Column(Integer)
confirmed_quantity = Column(Integer)
purchase_date = Column(DateTime)
created_at = Column(DateTime, default=datetime.utcnow)
# 관계 설정
material = relationship("Material")
file = relationship("File")
class ExcelExports(Base):
__tablename__ = "excel_exports"
id = Column(Integer, primary_key=True, index=True)
file_id = Column(Integer, ForeignKey("files.id"), nullable=False)
export_type = Column(String(50))
file_path = Column(String(500))
exported_by = Column(Integer, ForeignKey("users.user_id"))
created_at = Column(DateTime, default=datetime.utcnow)
# 관계 설정
file = relationship("File")
exported_by_user = relationship("User", foreign_keys=[exported_by])
class UserActivityLogs(Base):
__tablename__ = "user_activity_logs"
id = Column(Integer, primary_key=True, index=True)
user_id = Column(Integer, ForeignKey("users.user_id"))
activity_type = Column(String(50))
activity_description = Column(Text)
created_at = Column(DateTime, default=datetime.utcnow)
# 관계 설정
user = relationship("User")
class ExcelExportHistory(Base):
__tablename__ = "excel_export_history"
export_id = Column(String(50), primary_key=True, index=True)
file_id = Column(Integer, ForeignKey("files.id"))
job_no = Column(String(50))
exported_by = Column(Integer, ForeignKey("users.user_id"))
export_date = Column(DateTime, default=datetime.utcnow)
# 관계 설정
file = relationship("File")
exported_by_user = relationship("User", foreign_keys=[exported_by])
class ExportedMaterials(Base):
__tablename__ = "exported_materials"
id = Column(Integer, primary_key=True, index=True)
export_id = Column(String(50), ForeignKey("excel_export_history.export_id"))
material_id = Column(Integer, ForeignKey("materials.id"))
quantity = Column(Integer)
status = Column(String(20))
# 관계 설정
export_history = relationship("ExcelExportHistory")
material = relationship("Material")
class PurchaseStatusHistory(Base):
__tablename__ = "purchase_status_history"
id = Column(Integer, primary_key=True, index=True)
material_id = Column(Integer, ForeignKey("materials.id"))
old_status = Column(String(20))
new_status = Column(String(20))
changed_by = Column(Integer, ForeignKey("users.user_id"))
changed_at = Column(DateTime, default=datetime.utcnow)
# 관계 설정
material = relationship("Material")
changed_by_user = relationship("User", foreign_keys=[changed_by])

View File

@@ -338,48 +338,68 @@ async def upload_file(
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
# 🎯 트랜잭션 오류 방지: 완전한 트랜잭션 초기화
"""
파일 업로드 API - 리팩토링된 서비스 레이어 사용
"""
from ..services.file_upload_service import FileUploadService
upload_service = FileUploadService(db)
try:
# 1. 현재 트랜잭션 완전 롤백
db.rollback()
print("🔄 1단계: 이전 트랜잭션 롤백 완료")
# 1. 업로드 요청 검증
upload_service.validate_upload_request(file, job_no)
# 2. 세션 상태 초기화
db.close()
print("🔄 2단계: 세션 닫기 완료")
# 2. 파일 저장
unique_filename, file_path = upload_service.save_uploaded_file(file)
# 3. 새 세션 생성
from ..database import get_db
db = next(get_db())
print("🔄 3단계: 새 세션 생성 완료")
except Exception as e:
print(f"⚠️ 트랜잭션 초기화 중 오류: {e}")
# 오류 발생 시에도 계속 진행
# 로그 제거
if not validate_file_extension(file.filename):
raise HTTPException(
status_code=400,
detail=f"지원하지 않는 파일 형식입니다. 허용된 확장자: {', '.join(ALLOWED_EXTENSIONS)}"
# 3. 파일 레코드 생성
file_record = upload_service.create_file_record(
filename=unique_filename,
original_filename=file.filename,
file_path=str(file_path),
job_no=job_no,
revision=revision,
bom_name=bom_name,
file_size=file.size or 0,
parsed_count=0, # 임시값, 파싱 후 업데이트
uploaded_by=current_user.get('username', 'unknown'),
parent_file_id=parent_file_id
)
if file.size and file.size > 10 * 1024 * 1024:
raise HTTPException(status_code=400, detail="파일 크기는 10MB를 초과할 수 없습니다")
unique_filename = generate_unique_filename(file.filename)
file_path = UPLOAD_DIR / unique_filename
try:
# 로그 제거
with open(file_path, "wb") as buffer:
shutil.copyfileobj(file.file, buffer)
# 로그 제거
# 4. 자재 데이터 처리
processing_result = upload_service.process_materials_data(
file_path=file_path,
file_id=file_record.id,
job_no=job_no,
revision=revision,
parent_file_id=parent_file_id
)
# 5. 파일 레코드 업데이트 (파싱된 자재 수)
file_record.parsed_count = processing_result['materials_count']
db.commit()
logger.info(f"File upload completed: {file_record.id}")
return {
"success": True,
"message": "파일 업로드 및 처리가 완료되었습니다.",
"file_id": file_record.id,
"filename": file_record.filename,
"materials_count": processing_result['materials_count'],
"classification_results": processing_result['classification_results']
}
except HTTPException:
# HTTP 예외는 그대로 전달
upload_service.cleanup_failed_upload(file_path if 'file_path' in locals() else None)
raise
except Exception as e:
raise HTTPException(status_code=500, detail=f"파일 저장 실패: {str(e)}")
try:
# 로그 제거
materials_data = parse_file_data(str(file_path))
# 기타 예외 처리
db.rollback()
upload_service.cleanup_failed_upload(file_path if 'file_path' in locals() else None)
logger.error(f"File upload failed: {e}")
raise HTTPException(status_code=500, detail=f"파일 업로드 실패: {str(e)}")
parsed_count = len(materials_data)
# 로그 제거
@@ -1861,26 +1881,87 @@ async def get_files_stats(db: Session = Depends(get_db)):
@router.delete("/delete/{file_id}")
async def delete_file(file_id: int, db: Session = Depends(get_db)):
"""파일 삭제"""
"""파일 삭제 - 모든 관련 데이터를 올바른 순서로 삭제"""
try:
# 자재 먼저 삭제
db.execute(text("DELETE FROM materials WHERE file_id = :file_id"), {"file_id": file_id})
# 파일 삭제
result = db.execute(text("DELETE FROM files WHERE id = :file_id"), {"file_id": file_id})
if result.rowcount == 0:
# 파일 존재 확인
file_check = db.execute(text("SELECT id FROM files WHERE id = :file_id"), {"file_id": file_id}).fetchone()
if not file_check:
raise HTTPException(status_code=404, detail="파일을 찾을 수 없습니다")
# 1. 자재 관련 상세 테이블들 먼저 삭제 (외래키 순서 고려)
detail_tables = [
"support_details", "fitting_details", "flange_details",
"valve_details", "gasket_details", "bolt_details",
"instrument_details", "user_requirements"
]
for table in detail_tables:
try:
db.execute(text(f"DELETE FROM {table} WHERE file_id = :file_id"), {"file_id": file_id})
except Exception as detail_error:
# 테이블이 없거나 다른 오류가 있어도 계속 진행
print(f"Warning: Failed to delete from {table}: {detail_error}")
# 2. 구매 관련 테이블 삭제
try:
# purchase_request_items 먼저 삭제 (purchase_requests를 참조하므로)
db.execute(text("""
DELETE FROM purchase_request_items
WHERE request_id IN (
SELECT request_id FROM purchase_requests WHERE file_id = :file_id
)
"""), {"file_id": file_id})
# purchase_requests 삭제
db.execute(text("DELETE FROM purchase_requests WHERE file_id = :file_id"), {"file_id": file_id})
except Exception as purchase_error:
print(f"Warning: Failed to delete purchase data: {purchase_error}")
# 3. 기타 관련 테이블들 삭제
try:
# excel_exports 삭제
db.execute(text("DELETE FROM excel_exports WHERE file_id = :file_id"), {"file_id": file_id})
except Exception as excel_error:
print(f"Warning: Failed to delete from excel_exports: {excel_error}")
try:
# user_activity_logs 삭제 (target_id와 target_type 사용)
db.execute(text("DELETE FROM user_activity_logs WHERE target_id = :file_id AND target_type = 'file'"), {"file_id": file_id})
except Exception as activity_error:
print(f"Warning: Failed to delete from user_activity_logs: {activity_error}")
try:
# exported_materials 삭제 (먼저 export_id 조회 후 삭제)
export_ids = db.execute(text("SELECT id FROM excel_exports WHERE file_id = :file_id"), {"file_id": file_id}).fetchall()
for export_row in export_ids:
db.execute(text("DELETE FROM exported_materials WHERE export_id = :export_id"), {"export_id": export_row[0]})
except Exception as exported_error:
print(f"Warning: Failed to delete from exported_materials: {exported_error}")
# 4. 자재 테이블 삭제
materials_result = db.execute(text("DELETE FROM materials WHERE file_id = :file_id"), {"file_id": file_id})
print(f"Deleted {materials_result.rowcount} materials")
# 5. 마지막으로 파일 삭제
file_result = db.execute(text("DELETE FROM files WHERE id = :file_id"), {"file_id": file_id})
if file_result.rowcount == 0:
raise HTTPException(status_code=404, detail="파일 삭제에 실패했습니다")
db.commit()
return {
"success": True,
"message": "파일 삭제되었습니다"
"message": "파일과 모든 관련 데이터가 삭제되었습니다",
"deleted_materials": materials_result.rowcount
}
except HTTPException:
db.rollback()
raise
except Exception as e:
db.rollback()
print(f"File deletion error: {str(e)}")
raise HTTPException(status_code=500, detail=f"파일 삭제 실패: {str(e)}")
@router.get("/materials-v2") # 완전히 새로운 엔드포인트

View File

@@ -0,0 +1,350 @@
"""
데이터베이스 공통 서비스 레이어
중복된 DB 쿼리 로직을 통합하고 재사용 가능한 서비스 제공
"""
from sqlalchemy.orm import Session
from sqlalchemy import text, and_, or_
from typing import List, Dict, Any, Optional, Union
from ..models import Material, File, User, Project
from ..utils.logger import get_logger
from ..utils.error_handlers import ErrorResponse
logger = get_logger(__name__)
class DatabaseService:
"""데이터베이스 공통 서비스"""
def __init__(self, db: Session):
self.db = db
def execute_query(self, query: str, params: Dict = None) -> Any:
"""안전한 쿼리 실행"""
try:
result = self.db.execute(text(query), params or {})
return result
except Exception as e:
logger.error(f"Query execution failed: {query[:100]}... Error: {e}")
raise
def get_materials_with_details(
self,
file_id: Optional[int] = None,
job_no: Optional[str] = None,
limit: int = 1000,
offset: int = 0,
exclude_requested: bool = False
) -> Dict[str, Any]:
"""자재 상세 정보 조회 (통합된 쿼리)"""
where_conditions = ["1=1"]
params = {"limit": limit, "offset": offset}
if file_id:
where_conditions.append("m.file_id = :file_id")
params["file_id"] = file_id
if job_no:
where_conditions.append("f.job_no = :job_no")
params["job_no"] = job_no
if exclude_requested:
where_conditions.append("(m.purchase_confirmed IS NULL OR m.purchase_confirmed = false)")
# 통합된 자재 조회 쿼리
query = f"""
SELECT
m.id, m.file_id, m.line_number, m.row_number,
m.original_description, m.classified_category, m.classified_subcategory,
m.material_grade, m.full_material_grade, m.schedule, m.size_spec,
m.main_nom, m.red_nom, m.quantity, m.unit, m.length,
m.drawing_name, m.area_code, m.line_no,
m.classification_confidence, m.classification_details,
m.purchase_confirmed, m.confirmed_quantity, m.purchase_status,
m.purchase_confirmed_by, m.purchase_confirmed_at,
m.revision_status, m.material_hash, m.normalized_description,
m.drawing_reference, m.notes, m.created_at,
-- 파일 정보
f.filename, f.original_filename, f.job_no, f.revision, f.bom_name,
f.upload_date, f.uploaded_by,
-- 프로젝트 정보
p.project_name, p.client_name,
-- 상세 정보들 (LEFT JOIN)
pd.material_standard as pipe_material_standard,
pd.manufacturing_method, pd.end_preparation, pd.wall_thickness,
fd.fitting_type, fd.fitting_subtype, fd.connection_type as fitting_connection_type,
fd.main_size as fitting_main_size, fd.reduced_size as fitting_reduced_size,
fld.flange_type, fld.flange_subtype, fld.pressure_rating as flange_pressure_rating,
fld.face_type, fld.connection_method,
vd.valve_type, vd.valve_subtype, vd.actuation_type,
vd.pressure_rating as valve_pressure_rating, vd.temperature_rating,
bd.bolt_type, bd.bolt_subtype, bd.thread_type, bd.head_type,
bd.material_standard as bolt_material_standard,
bd.pressure_rating as bolt_pressure_rating,
gd.gasket_type, gd.gasket_subtype, gd.material_type as gasket_material_type,
gd.filler_material, gd.pressure_rating as gasket_pressure_rating,
gd.size_inches as gasket_size_inches, gd.thickness as gasket_thickness,
gd.temperature_range as gasket_temperature_range, gd.fire_safe,
-- 구매 추적 정보
mpt.confirmed_quantity as tracking_confirmed_quantity,
mpt.purchase_status as tracking_purchase_status,
mpt.confirmed_by as tracking_confirmed_by,
mpt.confirmed_at as tracking_confirmed_at,
-- 최종 분류 (구매 추적 정보 우선)
CASE
WHEN mpt.id IS NOT NULL THEN
CASE
WHEN mpt.description LIKE '%PIPE%' OR mpt.description LIKE '%파이프%' THEN 'PIPE'
WHEN mpt.description LIKE '%FITTING%' OR mpt.description LIKE '%피팅%' THEN 'FITTING'
WHEN mpt.description LIKE '%VALVE%' OR mpt.description LIKE '%밸브%' THEN 'VALVE'
WHEN mpt.description LIKE '%FLANGE%' OR mpt.description LIKE '%플랜지%' THEN 'FLANGE'
WHEN mpt.description LIKE '%BOLT%' OR mpt.description LIKE '%볼트%' THEN 'BOLT'
WHEN mpt.description LIKE '%GASKET%' OR mpt.description LIKE '%가스켓%' THEN 'GASKET'
WHEN mpt.description LIKE '%INSTRUMENT%' OR mpt.description LIKE '%계기%' THEN 'INSTRUMENT'
ELSE m.classified_category
END
ELSE m.classified_category
END as final_classified_category,
-- 검증 상태
CASE WHEN mpt.id IS NOT NULL THEN true ELSE m.is_verified END as final_is_verified,
CASE WHEN mpt.id IS NOT NULL THEN 'purchase_calculation' ELSE m.verified_by END as final_verified_by
FROM materials m
LEFT JOIN files f ON m.file_id = f.id
LEFT JOIN projects p ON f.project_id = p.id
LEFT JOIN pipe_details pd ON m.id = pd.material_id
LEFT JOIN fitting_details fd ON m.id = fd.material_id
LEFT JOIN flange_details fld ON m.id = fld.material_id
LEFT JOIN valve_details vd ON m.id = vd.material_id
LEFT JOIN bolt_details bd ON m.id = bd.material_id
LEFT JOIN gasket_details gd ON m.id = gd.material_id
LEFT JOIN material_purchase_tracking mpt ON (
m.material_hash = mpt.material_hash
AND f.job_no = mpt.job_no
AND f.revision = mpt.revision
)
WHERE {' AND '.join(where_conditions)}
ORDER BY m.line_number ASC, m.id ASC
LIMIT :limit OFFSET :offset
"""
try:
result = self.execute_query(query, params)
materials = [dict(row._mapping) for row in result.fetchall()]
# 총 개수 조회
count_query = f"""
SELECT COUNT(*) as total
FROM materials m
LEFT JOIN files f ON m.file_id = f.id
WHERE {' AND '.join(where_conditions[:-1])} -- LIMIT/OFFSET 제외
"""
count_result = self.execute_query(count_query, {k: v for k, v in params.items() if k not in ['limit', 'offset']})
total_count = count_result.scalar()
return {
"materials": materials,
"total_count": total_count,
"limit": limit,
"offset": offset
}
except Exception as e:
logger.error(f"Failed to get materials with details: {e}")
raise
def get_purchase_request_materials(
self,
job_no: str,
category: Optional[str] = None,
limit: int = 1000
) -> List[Dict]:
"""구매 신청 자재 조회"""
where_conditions = ["f.job_no = :job_no", "m.purchase_confirmed = true"]
params = {"job_no": job_no, "limit": limit}
if category and category != 'ALL':
where_conditions.append("m.classified_category = :category")
params["category"] = category
query = f"""
SELECT
m.id, m.original_description, m.classified_category,
m.material_grade, m.schedule, m.size_spec, m.main_nom, m.red_nom,
CAST(m.quantity AS INTEGER) as requested_quantity,
CAST(m.confirmed_quantity AS INTEGER) as original_quantity,
m.unit, m.drawing_name, m.line_no,
f.job_no, f.revision, f.bom_name
FROM materials m
JOIN files f ON m.file_id = f.id
WHERE {' AND '.join(where_conditions)}
ORDER BY m.classified_category, m.original_description
LIMIT :limit
"""
try:
result = self.execute_query(query, params)
return [dict(row._mapping) for row in result.fetchall()]
except Exception as e:
logger.error(f"Failed to get purchase request materials: {e}")
raise
def safe_delete_related_data(self, file_id: int) -> Dict[str, Any]:
"""파일 관련 데이터 안전 삭제"""
deletion_results = {}
# 삭제 순서 (외래키 제약 조건 고려)
deletion_queries = [
("support_details", "DELETE FROM support_details WHERE file_id = :file_id"),
("fitting_details", "DELETE FROM fitting_details WHERE material_id IN (SELECT id FROM materials WHERE file_id = :file_id)"),
("flange_details", "DELETE FROM flange_details WHERE material_id IN (SELECT id FROM materials WHERE file_id = :file_id)"),
("valve_details", "DELETE FROM valve_details WHERE material_id IN (SELECT id FROM materials WHERE file_id = :file_id)"),
("gasket_details", "DELETE FROM gasket_details WHERE material_id IN (SELECT id FROM materials WHERE file_id = :file_id)"),
("bolt_details", "DELETE FROM bolt_details WHERE material_id IN (SELECT id FROM materials WHERE file_id = :file_id)"),
("instrument_details", "DELETE FROM instrument_details WHERE material_id IN (SELECT id FROM materials WHERE file_id = :file_id)"),
("user_requirements", "DELETE FROM user_requirements WHERE file_id = :file_id"),
("purchase_request_items", "DELETE FROM purchase_request_items WHERE material_id IN (SELECT id FROM materials WHERE file_id = :file_id)"),
("purchase_requests", "DELETE FROM purchase_requests WHERE file_id = :file_id"),
("user_activity_logs", "DELETE FROM user_activity_logs WHERE target_id = :file_id AND target_type = 'file'"),
("exported_materials", """
DELETE FROM exported_materials
WHERE export_id IN (SELECT id FROM excel_exports WHERE file_id = :file_id)
"""),
("excel_exports", "DELETE FROM excel_exports WHERE file_id = :file_id"),
("materials", "DELETE FROM materials WHERE file_id = :file_id"),
]
for table_name, query in deletion_queries:
try:
result = self.execute_query(query, {"file_id": file_id})
deleted_count = result.rowcount
deletion_results[table_name] = {"success": True, "deleted_count": deleted_count}
logger.info(f"Deleted {deleted_count} records from {table_name}")
except Exception as e:
deletion_results[table_name] = {"success": False, "error": str(e)}
logger.warning(f"Failed to delete from {table_name}: {e}")
return deletion_results
def bulk_insert_materials(self, materials_data: List[Dict], file_id: int) -> int:
"""자재 데이터 대량 삽입"""
if not materials_data:
return 0
try:
# 대량 삽입을 위한 쿼리 준비
insert_query = """
INSERT INTO materials (
file_id, line_number, row_number, original_description,
classified_category, classified_subcategory, material_grade,
full_material_grade, schedule, size_spec, main_nom, red_nom,
quantity, unit, length, drawing_name, area_code, line_no,
classification_confidence, classification_details,
revision_status, material_hash, normalized_description,
created_at
) VALUES (
:file_id, :line_number, :row_number, :original_description,
:classified_category, :classified_subcategory, :material_grade,
:full_material_grade, :schedule, :size_spec, :main_nom, :red_nom,
:quantity, :unit, :length, :drawing_name, :area_code, :line_no,
:classification_confidence, :classification_details,
:revision_status, :material_hash, :normalized_description,
CURRENT_TIMESTAMP
)
"""
# 데이터 준비
insert_data = []
for material in materials_data:
insert_data.append({
"file_id": file_id,
"line_number": material.get("line_number"),
"row_number": material.get("row_number"),
"original_description": material.get("original_description", ""),
"classified_category": material.get("classified_category"),
"classified_subcategory": material.get("classified_subcategory"),
"material_grade": material.get("material_grade"),
"full_material_grade": material.get("full_material_grade"),
"schedule": material.get("schedule"),
"size_spec": material.get("size_spec"),
"main_nom": material.get("main_nom"),
"red_nom": material.get("red_nom"),
"quantity": material.get("quantity", 0),
"unit": material.get("unit", "EA"),
"length": material.get("length"),
"drawing_name": material.get("drawing_name"),
"area_code": material.get("area_code"),
"line_no": material.get("line_no"),
"classification_confidence": material.get("classification_confidence"),
"classification_details": material.get("classification_details"),
"revision_status": material.get("revision_status", "new"),
"material_hash": material.get("material_hash"),
"normalized_description": material.get("normalized_description"),
})
# 대량 삽입 실행
self.db.execute(text(insert_query), insert_data)
self.db.commit()
logger.info(f"Successfully inserted {len(insert_data)} materials")
return len(insert_data)
except Exception as e:
self.db.rollback()
logger.error(f"Failed to bulk insert materials: {e}")
raise
class MaterialQueryBuilder:
"""자재 쿼리 빌더"""
@staticmethod
def build_materials_query(
filters: Dict[str, Any] = None,
joins: List[str] = None,
order_by: str = "m.line_number ASC"
) -> str:
"""동적 자재 쿼리 생성"""
base_query = """
SELECT m.*, f.job_no, f.revision, f.bom_name
FROM materials m
LEFT JOIN files f ON m.file_id = f.id
"""
# 추가 조인
if joins:
for join in joins:
base_query += f" {join}"
# 필터 조건
where_conditions = ["1=1"]
if filters:
for key, value in filters.items():
if value is not None:
where_conditions.append(f"m.{key} = :{key}")
if where_conditions:
base_query += f" WHERE {' AND '.join(where_conditions)}"
# 정렬
if order_by:
base_query += f" ORDER BY {order_by}"
return base_query

View File

@@ -0,0 +1,607 @@
"""
파일 업로드 서비스
파일 업로드 관련 로직을 통합하고 트랜잭션 관리 개선
"""
import os
import shutil
from pathlib import Path
from typing import Dict, List, Any, Optional, Tuple
from fastapi import UploadFile, HTTPException
from sqlalchemy.orm import Session
from sqlalchemy import text
from ..models import File, Material
from ..utils.logger import get_logger
from ..utils.file_processor import parse_file_data
from ..utils.file_validator import validate_file_extension, generate_unique_filename
from .database_service import DatabaseService
from .material_classification_service import MaterialClassificationService
logger = get_logger(__name__)
UPLOAD_DIR = Path("uploads")
ALLOWED_EXTENSIONS = {'.xls', '.xlsx', '.csv'}
MAX_FILE_SIZE = 10 * 1024 * 1024 # 10MB
class FileUploadService:
"""파일 업로드 서비스"""
def __init__(self, db: Session):
self.db = db
self.db_service = DatabaseService(db)
self.classification_service = MaterialClassificationService()
def validate_upload_request(self, file: UploadFile, job_no: str) -> None:
"""업로드 요청 검증"""
if not validate_file_extension(file.filename):
raise HTTPException(
status_code=400,
detail=f"지원하지 않는 파일 형식입니다. 허용된 확장자: {', '.join(ALLOWED_EXTENSIONS)}"
)
if file.size and file.size > MAX_FILE_SIZE:
raise HTTPException(
status_code=400,
detail="파일 크기는 10MB를 초과할 수 없습니다"
)
if not job_no or len(job_no.strip()) == 0:
raise HTTPException(
status_code=400,
detail="작업 번호는 필수입니다"
)
def save_uploaded_file(self, file: UploadFile) -> Tuple[str, Path]:
"""업로드된 파일 저장"""
try:
# 고유 파일명 생성
unique_filename = generate_unique_filename(file.filename)
file_path = UPLOAD_DIR / unique_filename
# 업로드 디렉토리 생성
UPLOAD_DIR.mkdir(exist_ok=True)
# 파일 저장
with open(file_path, "wb") as buffer:
shutil.copyfileobj(file.file, buffer)
logger.info(f"File saved: {file_path}")
return unique_filename, file_path
except Exception as e:
logger.error(f"Failed to save file: {e}")
raise HTTPException(status_code=500, detail=f"파일 저장 실패: {str(e)}")
def create_file_record(
self,
filename: str,
original_filename: str,
file_path: str,
job_no: str,
revision: str,
bom_name: Optional[str],
file_size: int,
parsed_count: int,
uploaded_by: str,
parent_file_id: Optional[int] = None
) -> File:
"""파일 레코드 생성"""
try:
# BOM 이름 자동 생성 (제공되지 않은 경우)
if not bom_name:
bom_name = original_filename.rsplit('.', 1)[0]
# 파일 설명 생성
description = f"BOM 파일 - {parsed_count}개 자재"
file_record = File(
filename=filename,
original_filename=original_filename,
file_path=file_path,
job_no=job_no,
revision=revision,
bom_name=bom_name,
description=description,
file_size=file_size,
parsed_count=parsed_count,
is_active=True,
uploaded_by=uploaded_by
)
self.db.add(file_record)
self.db.flush() # ID 생성을 위해 flush
logger.info(f"File record created: ID={file_record.id}, Job={job_no}")
return file_record
except Exception as e:
logger.error(f"Failed to create file record: {e}")
raise HTTPException(status_code=500, detail=f"파일 레코드 생성 실패: {str(e)}")
def process_materials_data(
self,
file_path: Path,
file_id: int,
job_no: str,
revision: str,
parent_file_id: Optional[int] = None
) -> Dict[str, Any]:
"""자재 데이터 처리"""
try:
# 파일 파싱
logger.info(f"Parsing file: {file_path}")
materials_data = parse_file_data(str(file_path))
if not materials_data:
raise HTTPException(status_code=400, detail="파일에서 자재 데이터를 찾을 수 없습니다")
# 자재 분류 및 처리
processed_materials = []
classification_results = {
"total_materials": len(materials_data),
"classified_count": 0,
"categories": {}
}
for idx, material_data in enumerate(materials_data):
try:
# 자재 분류
classified_material = self.classification_service.classify_material(
material_data,
line_number=idx + 1,
row_number=material_data.get('row_number', idx + 1)
)
# 리비전 상태 설정
if parent_file_id:
# 리비전 업로드인 경우 변경 상태 분석
classified_material['revision_status'] = self._analyze_revision_status(
classified_material, parent_file_id
)
else:
classified_material['revision_status'] = 'new'
processed_materials.append(classified_material)
# 분류 통계 업데이트
category = classified_material.get('classified_category', 'UNKNOWN')
classification_results['categories'][category] = classification_results['categories'].get(category, 0) + 1
if category != 'UNKNOWN':
classification_results['classified_count'] += 1
except Exception as e:
logger.warning(f"Failed to classify material at line {idx + 1}: {e}")
# 분류 실패 시 기본값으로 처리
material_data.update({
'classified_category': 'UNKNOWN',
'classification_confidence': 0.0,
'revision_status': 'new'
})
processed_materials.append(material_data)
# 자재 데이터 DB 저장
inserted_count = self.db_service.bulk_insert_materials(processed_materials, file_id)
# 상세 정보 저장 (분류별)
self._save_material_details(processed_materials, file_id)
logger.info(f"Processed {inserted_count} materials for file {file_id}")
return {
"materials_count": inserted_count,
"classification_results": classification_results,
"processed_materials": processed_materials[:10] # 처음 10개만 반환
}
except Exception as e:
logger.error(f"Failed to process materials data: {e}")
raise HTTPException(status_code=500, detail=f"자재 데이터 처리 실패: {str(e)}")
def _analyze_revision_status(self, material: Dict, parent_file_id: int) -> str:
"""리비전 상태 분석"""
try:
# 부모 파일의 동일한 자재 찾기
parent_material_query = """
SELECT * FROM materials
WHERE file_id = :parent_file_id
AND drawing_name = :drawing_name
AND original_description = :description
LIMIT 1
"""
result = self.db_service.execute_query(
parent_material_query,
{
"parent_file_id": parent_file_id,
"drawing_name": material.get('drawing_name'),
"description": material.get('original_description')
}
)
parent_material = result.fetchone()
if not parent_material:
return 'new' # 새로운 자재
# 변경 사항 확인
if (
float(material.get('quantity', 0)) != float(parent_material.quantity or 0) or
material.get('material_grade') != parent_material.material_grade or
material.get('size_spec') != parent_material.size_spec
):
return 'changed' # 변경된 자재
return 'inventory' # 기존 자재 (변경 없음)
except Exception as e:
logger.warning(f"Failed to analyze revision status: {e}")
return 'new'
def _save_material_details(self, materials: List[Dict], file_id: int) -> None:
"""자재 상세 정보 저장"""
try:
# 자재 ID 매핑 (방금 삽입된 자재들)
material_ids_query = """
SELECT id, row_number FROM materials
WHERE file_id = :file_id
ORDER BY row_number
"""
result = self.db_service.execute_query(material_ids_query, {"file_id": file_id})
material_id_map = {row.row_number: row.id for row in result.fetchall()}
# 분류별 상세 정보 저장
for material in materials:
material_id = material_id_map.get(material.get('row_number'))
if not material_id:
continue
category = material.get('classified_category')
if category == 'PIPE':
self._save_pipe_details(material, material_id, file_id)
elif category == 'FITTING':
self._save_fitting_details(material, material_id, file_id)
elif category == 'FLANGE':
self._save_flange_details(material, material_id, file_id)
elif category == 'VALVE':
self._save_valve_details(material, material_id, file_id)
elif category == 'BOLT':
self._save_bolt_details(material, material_id, file_id)
elif category == 'GASKET':
self._save_gasket_details(material, material_id, file_id)
elif category == 'SUPPORT':
self._save_support_details(material, material_id, file_id)
elif category == 'INSTRUMENT':
self._save_instrument_details(material, material_id, file_id)
logger.info(f"Saved material details for {len(materials)} materials")
except Exception as e:
logger.error(f"Failed to save material details: {e}")
# 상세 정보 저장 실패는 전체 프로세스를 중단하지 않음
def _save_pipe_details(self, material: Dict, material_id: int, file_id: int) -> None:
"""파이프 상세 정보 저장"""
details = material.get('classification_details', {})
insert_query = """
INSERT INTO pipe_details (
material_id, file_id, material_standard, material_grade, material_type,
manufacturing_method, end_preparation, schedule, wall_thickness,
nominal_size, length_mm, material_confidence, manufacturing_confidence,
end_prep_confidence, schedule_confidence
) VALUES (
:material_id, :file_id, :material_standard, :material_grade, :material_type,
:manufacturing_method, :end_preparation, :schedule, :wall_thickness,
:nominal_size, :length_mm, :material_confidence, :manufacturing_confidence,
:end_prep_confidence, :schedule_confidence
)
"""
try:
self.db_service.execute_query(insert_query, {
"material_id": material_id,
"file_id": file_id,
"material_standard": details.get('material_standard'),
"material_grade": material.get('material_grade'),
"material_type": details.get('material_type'),
"manufacturing_method": details.get('manufacturing_method'),
"end_preparation": details.get('end_preparation'),
"schedule": material.get('schedule'),
"wall_thickness": details.get('wall_thickness'),
"nominal_size": material.get('main_nom'),
"length_mm": material.get('length'),
"material_confidence": details.get('material_confidence', 0.0),
"manufacturing_confidence": details.get('manufacturing_confidence', 0.0),
"end_prep_confidence": details.get('end_prep_confidence', 0.0),
"schedule_confidence": details.get('schedule_confidence', 0.0)
})
except Exception as e:
logger.warning(f"Failed to save pipe details for material {material_id}: {e}")
def _save_fitting_details(self, material: Dict, material_id: int, file_id: int) -> None:
"""피팅 상세 정보 저장"""
details = material.get('classification_details', {})
insert_query = """
INSERT INTO fitting_details (
material_id, file_id, fitting_type, fitting_subtype, connection_type,
main_size, reduced_size, length_mm, material_standard, material_grade,
pressure_rating, temperature_rating, classification_confidence
) VALUES (
:material_id, :file_id, :fitting_type, :fitting_subtype, :connection_type,
:main_size, :reduced_size, :length_mm, :material_standard, :material_grade,
:pressure_rating, :temperature_rating, :classification_confidence
)
"""
try:
self.db_service.execute_query(insert_query, {
"material_id": material_id,
"file_id": file_id,
"fitting_type": details.get('fitting_type', 'UNKNOWN'),
"fitting_subtype": details.get('fitting_subtype'),
"connection_type": details.get('connection_type'),
"main_size": material.get('main_nom'),
"reduced_size": material.get('red_nom'),
"length_mm": material.get('length'),
"material_standard": details.get('material_standard'),
"material_grade": material.get('material_grade'),
"pressure_rating": details.get('pressure_rating'),
"temperature_rating": details.get('temperature_rating'),
"classification_confidence": material.get('classification_confidence', 0.0)
})
except Exception as e:
logger.warning(f"Failed to save fitting details for material {material_id}: {e}")
def _save_flange_details(self, material: Dict, material_id: int, file_id: int) -> None:
"""플랜지 상세 정보 저장"""
details = material.get('classification_details', {})
insert_query = """
INSERT INTO flange_details (
material_id, file_id, flange_type, flange_subtype, pressure_rating,
face_type, connection_method, nominal_size, material_standard,
material_grade, classification_confidence
) VALUES (
:material_id, :file_id, :flange_type, :flange_subtype, :pressure_rating,
:face_type, :connection_method, :nominal_size, :material_standard,
:material_grade, :classification_confidence
)
"""
try:
self.db_service.execute_query(insert_query, {
"material_id": material_id,
"file_id": file_id,
"flange_type": details.get('flange_type', 'UNKNOWN'),
"flange_subtype": details.get('flange_subtype'),
"pressure_rating": details.get('pressure_rating'),
"face_type": details.get('face_type'),
"connection_method": details.get('connection_method'),
"nominal_size": material.get('main_nom'),
"material_standard": details.get('material_standard'),
"material_grade": material.get('material_grade'),
"classification_confidence": material.get('classification_confidence', 0.0)
})
except Exception as e:
logger.warning(f"Failed to save flange details for material {material_id}: {e}")
def _save_valve_details(self, material: Dict, material_id: int, file_id: int) -> None:
"""밸브 상세 정보 저장"""
details = material.get('classification_details', {})
insert_query = """
INSERT INTO valve_details (
material_id, file_id, valve_type, valve_subtype, actuation_type,
pressure_rating, temperature_rating, nominal_size, connection_type,
material_standard, material_grade, classification_confidence
) VALUES (
:material_id, :file_id, :valve_type, :valve_subtype, :actuation_type,
:pressure_rating, :temperature_rating, :nominal_size, :connection_type,
:material_standard, :material_grade, :classification_confidence
)
"""
try:
self.db_service.execute_query(insert_query, {
"material_id": material_id,
"file_id": file_id,
"valve_type": details.get('valve_type', 'UNKNOWN'),
"valve_subtype": details.get('valve_subtype'),
"actuation_type": details.get('actuation_type'),
"pressure_rating": details.get('pressure_rating'),
"temperature_rating": details.get('temperature_rating'),
"nominal_size": material.get('main_nom'),
"connection_type": details.get('connection_type'),
"material_standard": details.get('material_standard'),
"material_grade": material.get('material_grade'),
"classification_confidence": material.get('classification_confidence', 0.0)
})
except Exception as e:
logger.warning(f"Failed to save valve details for material {material_id}: {e}")
def _save_bolt_details(self, material: Dict, material_id: int, file_id: int) -> None:
"""볼트 상세 정보 저장"""
details = material.get('classification_details', {})
insert_query = """
INSERT INTO bolt_details (
material_id, file_id, bolt_type, bolt_subtype, thread_type,
head_type, material_standard, material_grade, pressure_rating,
length_mm, diameter_mm, classification_confidence
) VALUES (
:material_id, :file_id, :bolt_type, :bolt_subtype, :thread_type,
:head_type, :material_standard, :material_grade, :pressure_rating,
:length_mm, :diameter_mm, :classification_confidence
)
"""
try:
self.db_service.execute_query(insert_query, {
"material_id": material_id,
"file_id": file_id,
"bolt_type": details.get('bolt_type', 'UNKNOWN'),
"bolt_subtype": details.get('bolt_subtype'),
"thread_type": details.get('thread_type'),
"head_type": details.get('head_type'),
"material_standard": details.get('material_standard'),
"material_grade": material.get('material_grade'),
"pressure_rating": details.get('pressure_rating'),
"length_mm": material.get('length'),
"diameter_mm": details.get('diameter_mm'),
"classification_confidence": material.get('classification_confidence', 0.0)
})
except Exception as e:
logger.warning(f"Failed to save bolt details for material {material_id}: {e}")
def _save_gasket_details(self, material: Dict, material_id: int, file_id: int) -> None:
"""가스켓 상세 정보 저장"""
details = material.get('classification_details', {})
insert_query = """
INSERT INTO gasket_details (
material_id, file_id, gasket_type, gasket_subtype, material_type,
filler_material, pressure_rating, size_inches, thickness,
temperature_range, fire_safe, classification_confidence
) VALUES (
:material_id, :file_id, :gasket_type, :gasket_subtype, :material_type,
:filler_material, :pressure_rating, :size_inches, :thickness,
:temperature_range, :fire_safe, :classification_confidence
)
"""
try:
self.db_service.execute_query(insert_query, {
"material_id": material_id,
"file_id": file_id,
"gasket_type": details.get('gasket_type', 'UNKNOWN'),
"gasket_subtype": details.get('gasket_subtype'),
"material_type": details.get('material_type'),
"filler_material": details.get('filler_material'),
"pressure_rating": details.get('pressure_rating'),
"size_inches": material.get('main_nom'),
"thickness": details.get('thickness'),
"temperature_range": details.get('temperature_range'),
"fire_safe": details.get('fire_safe', False),
"classification_confidence": material.get('classification_confidence', 0.0)
})
except Exception as e:
logger.warning(f"Failed to save gasket details for material {material_id}: {e}")
def _save_support_details(self, material: Dict, material_id: int, file_id: int) -> None:
"""서포트 상세 정보 저장"""
details = material.get('classification_details', {})
insert_query = """
INSERT INTO support_details (
material_id, file_id, support_type, support_subtype, load_rating,
load_capacity, material_standard, material_grade, pipe_size,
length_mm, width_mm, height_mm, classification_confidence
) VALUES (
:material_id, :file_id, :support_type, :support_subtype, :load_rating,
:load_capacity, :material_standard, :material_grade, :pipe_size,
:length_mm, :width_mm, :height_mm, :classification_confidence
)
"""
try:
self.db_service.execute_query(insert_query, {
"material_id": material_id,
"file_id": file_id,
"support_type": details.get('support_type', 'UNKNOWN'),
"support_subtype": details.get('support_subtype'),
"load_rating": details.get('load_rating', 'UNKNOWN'),
"load_capacity": details.get('load_capacity'),
"material_standard": details.get('material_standard', 'UNKNOWN'),
"material_grade": details.get('material_grade', 'UNKNOWN'),
"pipe_size": material.get('main_nom'),
"length_mm": material.get('length'),
"width_mm": details.get('width_mm'),
"height_mm": details.get('height_mm'),
"classification_confidence": material.get('classification_confidence', 0.0)
})
except Exception as e:
logger.warning(f"Failed to save support details for material {material_id}: {e}")
def _save_instrument_details(self, material: Dict, material_id: int, file_id: int) -> None:
"""계기 상세 정보 저장"""
details = material.get('classification_details', {})
insert_query = """
INSERT INTO instrument_details (
material_id, file_id, instrument_type, instrument_subtype,
measurement_type, connection_size, pressure_rating,
temperature_rating, accuracy_class, material_standard,
classification_confidence
) VALUES (
:material_id, :file_id, :instrument_type, :instrument_subtype,
:measurement_type, :connection_size, :pressure_rating,
:temperature_rating, :accuracy_class, :material_standard,
:classification_confidence
)
"""
try:
self.db_service.execute_query(insert_query, {
"material_id": material_id,
"file_id": file_id,
"instrument_type": details.get('instrument_type', 'UNKNOWN'),
"instrument_subtype": details.get('instrument_subtype'),
"measurement_type": details.get('measurement_type'),
"connection_size": material.get('main_nom'),
"pressure_rating": details.get('pressure_rating'),
"temperature_rating": details.get('temperature_rating'),
"accuracy_class": details.get('accuracy_class'),
"material_standard": details.get('material_standard'),
"classification_confidence": material.get('classification_confidence', 0.0)
})
except Exception as e:
logger.warning(f"Failed to save instrument details for material {material_id}: {e}")
def cleanup_failed_upload(self, file_path: Path) -> None:
"""실패한 업로드 정리"""
try:
if file_path.exists():
file_path.unlink()
logger.info(f"Cleaned up failed upload: {file_path}")
except Exception as e:
logger.warning(f"Failed to cleanup file {file_path}: {e}")
class MaterialClassificationService:
"""자재 분류 서비스 (임시 구현)"""
def classify_material(self, material_data: Dict, line_number: int, row_number: int) -> Dict:
"""자재 분류 (기존 로직 유지)"""
# 기존 분류 로직을 여기에 통합
# 현재는 기본값만 설정
material_data.update({
'line_number': line_number,
'row_number': row_number,
'classified_category': material_data.get('classified_category', 'UNKNOWN'),
'classification_confidence': material_data.get('classification_confidence', 0.0),
'classification_details': material_data.get('classification_details', {})
})
return material_data

View File

@@ -0,0 +1,138 @@
#!/usr/bin/env python3
"""
백엔드 코드 전체 분석 후 누락된 모든 컬럼과 테이블을 한 번에 수정하는 스크립트
"""
import os
import sys
import psycopg2
from psycopg2 import sql
# 현재 디렉토리를 Python 경로에 추가
sys.path.insert(0, '/app')
# 환경 변수 로드
DB_HOST = os.getenv("DB_HOST", "postgres")
DB_PORT = os.getenv("DB_PORT", "5432")
DB_NAME = os.getenv("DB_NAME", "tk_mp_bom")
DB_USER = os.getenv("DB_USER", "tkmp_user")
DB_PASSWORD = os.getenv("DB_PASSWORD", "tkmp_password_2025")
def fix_all_missing_columns():
"""백엔드 코드 분석 결과를 바탕으로 모든 누락된 컬럼 추가"""
print("🔍 백엔드 코드 분석 결과를 바탕으로 모든 누락된 컬럼 수정 시작...")
try:
conn = psycopg2.connect(
host=DB_HOST,
port=DB_PORT,
database=DB_NAME,
user=DB_USER,
password=DB_PASSWORD
)
with conn.cursor() as cursor:
# 1. materials 테이블 누락된 컬럼들
print("📝 materials 테이블 컬럼 추가 중...")
materials_columns = [
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS brand VARCHAR(100);",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS user_requirement TEXT;",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS purchase_confirmed BOOLEAN DEFAULT FALSE;",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS confirmed_quantity NUMERIC(10,3);",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS purchase_status VARCHAR(20);",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS purchase_confirmed_by VARCHAR(100);",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS purchase_confirmed_at TIMESTAMP;",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS material_hash VARCHAR(64);",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS normalized_description TEXT;",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS revision_status VARCHAR(20);",
]
for sql_cmd in materials_columns:
cursor.execute(sql_cmd)
print("✅ materials 테이블 컬럼 추가 완료")
# 2. material_purchase_tracking 테이블 누락된 컬럼들 (백엔드 코드에서 사용됨)
print("📝 material_purchase_tracking 테이블 컬럼 추가 중...")
mpt_columns = [
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS job_no VARCHAR(50);",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS material_hash VARCHAR(64);",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS revision VARCHAR(20);",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS description TEXT;",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS size_spec VARCHAR(50);",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS unit VARCHAR(10);",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS bom_quantity NUMERIC(10,3);",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS calculated_quantity NUMERIC(10,3);",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS supplier_name VARCHAR(100);",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS unit_price NUMERIC(12,2);",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS total_price NUMERIC(15,2);",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS order_date DATE;",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS delivery_date DATE;",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS confirmed_by VARCHAR(100);",
"ALTER TABLE material_purchase_tracking ADD COLUMN IF NOT EXISTS confirmed_at TIMESTAMP;",
]
for sql_cmd in mpt_columns:
cursor.execute(sql_cmd)
print("✅ material_purchase_tracking 테이블 컬럼 추가 완료")
# 3. files 테이블 누락된 컬럼들
print("📝 files 테이블 컬럼 추가 중...")
files_columns = [
"ALTER TABLE files ADD COLUMN IF NOT EXISTS updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP;",
"ALTER TABLE files ADD COLUMN IF NOT EXISTS project_type VARCHAR(50);",
]
for sql_cmd in files_columns:
cursor.execute(sql_cmd)
print("✅ files 테이블 컬럼 추가 완료")
# 4. pipe_details 테이블 누락된 컬럼들 (백엔드 코드에서 사용됨)
print("📝 pipe_details 테이블 컬럼 추가 중...")
pipe_details_columns = [
"ALTER TABLE pipe_details ADD COLUMN IF NOT EXISTS material_id INTEGER REFERENCES materials(id);",
"ALTER TABLE pipe_details ADD COLUMN IF NOT EXISTS outer_diameter VARCHAR(50);",
"ALTER TABLE pipe_details ADD COLUMN IF NOT EXISTS material_spec VARCHAR(100);",
"ALTER TABLE pipe_details ADD COLUMN IF NOT EXISTS classification_confidence NUMERIC(3,2);",
]
for sql_cmd in pipe_details_columns:
cursor.execute(sql_cmd)
print("✅ pipe_details 테이블 컬럼 추가 완료")
# 5. 기타 누락된 테이블들의 컬럼 추가
print("📝 기타 테이블들 컬럼 추가 중...")
# fitting_details 테이블 컬럼 추가
cursor.execute("ALTER TABLE fitting_details ADD COLUMN IF NOT EXISTS main_size VARCHAR(50);")
cursor.execute("ALTER TABLE fitting_details ADD COLUMN IF NOT EXISTS reduced_size VARCHAR(50);")
cursor.execute("ALTER TABLE fitting_details ADD COLUMN IF NOT EXISTS length_mm NUMERIC(10,3);")
# gasket_details 테이블 컬럼 추가
cursor.execute("ALTER TABLE gasket_details ADD COLUMN IF NOT EXISTS gasket_subtype VARCHAR(50);")
cursor.execute("ALTER TABLE gasket_details ADD COLUMN IF NOT EXISTS material_type VARCHAR(50);")
cursor.execute("ALTER TABLE gasket_details ADD COLUMN IF NOT EXISTS size_inches VARCHAR(50);")
cursor.execute("ALTER TABLE gasket_details ADD COLUMN IF NOT EXISTS temperature_range VARCHAR(50);")
cursor.execute("ALTER TABLE gasket_details ADD COLUMN IF NOT EXISTS fire_safe BOOLEAN DEFAULT FALSE;")
print("✅ 기타 테이블들 컬럼 추가 완료")
conn.commit()
print("✅ 모든 누락된 컬럼 추가 완료")
except Exception as e:
print(f"❌ 컬럼 추가 실패: {e}")
return False
finally:
if conn:
conn.close()
return True
if __name__ == "__main__":
print("🚀 백엔드 코드 분석 기반 스키마 수정 시작")
success = fix_all_missing_columns()
if success:
print("✅ 모든 스키마 수정 완료")
else:
print("❌ 스키마 수정 실패")
sys.exit(1)

View File

@@ -0,0 +1,301 @@
#!/usr/bin/env python3
"""
TK-MP-Project 완전한 자동 DB 마이그레이션 시스템
- 모든 SQLAlchemy 모델을 기반으로 테이블 생성/업데이트
- 누락된 컬럼 자동 추가
- 인덱스 자동 생성
- 초기 데이터 삽입
- macOS Docker와 Synology Container Manager 모두 지원
"""
import os
import sys
import time
import psycopg2
from psycopg2 import OperationalError, sql
from sqlalchemy import create_engine, text, inspect
from sqlalchemy.orm import sessionmaker
from sqlalchemy.exc import OperationalError as SQLAlchemyOperationalError
from datetime import datetime
import bcrypt
# 현재 디렉토리를 Python 경로에 추가
sys.path.insert(0, '/app')
from app.database import Base, get_db
from app.auth.models import User
from app.models import * # 모든 모델을 임포트하여 Base.metadata에 등록
# 환경 변수 로드
DB_HOST = os.getenv("DB_HOST", "postgres")
DB_PORT = os.getenv("DB_PORT", "5432")
DB_NAME = os.getenv("DB_NAME", "tk_mp_bom")
DB_USER = os.getenv("DB_USER", "tkmp_user")
DB_PASSWORD = os.getenv("DB_PASSWORD", "tkmp_password_2025")
DATABASE_URL = f"postgresql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}"
ADMIN_USERNAME = os.getenv("ADMIN_USERNAME", "admin")
ADMIN_PASSWORD = os.getenv("ADMIN_PASSWORD", "admin123")
ADMIN_NAME = os.getenv("ADMIN_NAME", "시스템 관리자")
ADMIN_EMAIL = os.getenv("ADMIN_EMAIL", "admin@tkmp.com")
SYSTEM_USERNAME = os.getenv("SYSTEM_USERNAME", "system")
SYSTEM_PASSWORD = os.getenv("SYSTEM_PASSWORD", "admin123")
SYSTEM_NAME = os.getenv("SYSTEM_NAME", "시스템 계정")
SYSTEM_EMAIL = os.getenv("SYSTEM_EMAIL", "system@tkmp.com")
def wait_for_db(max_attempts=120, delay=2):
"""데이터베이스 연결 대기 - 더 긴 대기 시간과 상세한 로그"""
print(f"Waiting for database connection...")
print(f" Host: {DB_HOST}:{DB_PORT}")
print(f" Database: {DB_NAME}")
print(f" User: {DB_USER}")
for i in range(1, max_attempts + 1):
try:
conn = psycopg2.connect(
host=DB_HOST,
port=DB_PORT,
database=DB_NAME,
user=DB_USER,
password=DB_PASSWORD,
connect_timeout=5
)
conn.close()
print(f"SUCCESS: Database connection established! ({i}/{max_attempts})")
return True
except OperationalError as e:
if i <= 5 or i % 10 == 0 or i == max_attempts:
print(f"Waiting for database... ({i}/{max_attempts}) - {str(e)[:100]}")
if i == max_attempts:
print(f"FAILED: Database connection timeout after {max_attempts * delay} seconds")
print(f"Error: {e}")
time.sleep(delay)
return False
def get_existing_columns(engine, table_name):
"""기존 테이블의 컬럼 목록 조회"""
inspector = inspect(engine)
try:
columns = inspector.get_columns(table_name)
return {col['name']: col for col in columns}
except Exception:
return {}
def get_model_columns(model):
"""SQLAlchemy 모델의 컬럼 정보 추출"""
columns = {}
for column in model.__table__.columns:
columns[column.name] = {
'name': column.name,
'type': str(column.type),
'nullable': column.nullable,
'default': column.default,
'primary_key': column.primary_key
}
return columns
def add_missing_columns(engine, model):
"""누락된 컬럼을 기존 테이블에 추가"""
table_name = model.__tablename__
existing_columns = get_existing_columns(engine, table_name)
model_columns = get_model_columns(model)
missing_columns = []
for col_name, col_info in model_columns.items():
if col_name not in existing_columns:
missing_columns.append((col_name, col_info))
if not missing_columns:
return True
print(f"📝 테이블 '{table_name}'에 누락된 컬럼 {len(missing_columns)}개 추가 중...")
try:
with engine.connect() as connection:
for col_name, col_info in missing_columns:
# 컬럼 타입 매핑
col_type = col_info['type']
if 'VARCHAR' in col_type:
sql_type = col_type
elif 'INTEGER' in col_type:
sql_type = 'INTEGER'
elif 'BOOLEAN' in col_type:
sql_type = 'BOOLEAN'
elif 'DATETIME' in col_type:
sql_type = 'TIMESTAMP'
elif 'TEXT' in col_type:
sql_type = 'TEXT'
elif 'NUMERIC' in col_type:
sql_type = col_type
elif 'JSON' in col_type:
sql_type = 'JSON'
else:
sql_type = 'TEXT' # 기본값
# NULL 허용 여부
nullable = "NULL" if col_info['nullable'] else "NOT NULL"
# 기본값 설정
default_clause = ""
if col_info['default'] is not None:
if col_info['type'] == 'BOOLEAN':
default_value = 'TRUE' if str(col_info['default']).lower() in ['true', '1'] else 'FALSE'
default_clause = f" DEFAULT {default_value}"
elif 'VARCHAR' in col_info['type'] or 'TEXT' in col_info['type']:
default_clause = f" DEFAULT '{col_info['default']}'"
else:
default_clause = f" DEFAULT {col_info['default']}"
alter_sql = f"ALTER TABLE {table_name} ADD COLUMN IF NOT EXISTS {col_name} {sql_type}{default_clause} {nullable};"
try:
connection.execute(text(alter_sql))
print(f" ✅ 컬럼 '{col_name}' ({sql_type}) 추가 완료")
except Exception as e:
print(f" ⚠️ 컬럼 '{col_name}' 추가 실패: {e}")
connection.commit()
return True
except Exception as e:
print(f"❌ 테이블 '{table_name}' 컬럼 추가 실패: {e}")
return False
def create_tables_and_migrate():
"""테이블 생성 및 마이그레이션"""
print("🔄 완전한 스키마 동기화 및 마이그레이션 시작...")
engine = create_engine(DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
try:
# 1. 모든 테이블 생성 (존재하지 않으면 생성)
print("📋 새 테이블 생성 중...")
Base.metadata.create_all(bind=engine)
print("✅ 새 테이블 생성 완료")
# 2. 기존 테이블에 누락된 컬럼 추가
print("🔧 기존 테이블 컬럼 동기화 중...")
# 모든 모델에 대해 컬럼 동기화
models_to_check = [
User, Project, File, Material, MaterialStandard,
MaterialCategory, MaterialSpecification, MaterialGrade,
MaterialPattern, SpecialMaterial, SpecialMaterialGrade,
SpecialMaterialPattern, PipeDetail, RequirementType,
UserRequirement, TubingCategory, TubingSpecification,
TubingManufacturer, TubingProduct, MaterialTubingMapping,
SupportDetails, PurchaseRequestItems, FittingDetails,
FlangeDetails, ValveDetails, GasketDetails, BoltDetails,
InstrumentDetails, PurchaseRequests, Jobs, PipeEndPreparations,
MaterialPurchaseTracking, ExcelExports, UserActivityLogs,
ExcelExportHistory, ExportedMaterials, PurchaseStatusHistory
]
for model in models_to_check:
if hasattr(model, '__tablename__'):
add_missing_columns(engine, model)
print("✅ 모든 테이블 컬럼 동기화 완료")
# 3. 초기 사용자 데이터 생성
with SessionLocal() as db:
# 관리자 계정 확인 및 생성
admin_user = db.query(User).filter(User.username == ADMIN_USERNAME).first()
if not admin_user:
hashed_password = bcrypt.hashpw(ADMIN_PASSWORD.encode('utf-8'), bcrypt.gensalt()).decode('utf-8')
admin_user = User(
username=ADMIN_USERNAME,
password=hashed_password,
name=ADMIN_NAME,
email=ADMIN_EMAIL,
role='admin',
access_level='admin',
department='IT',
position='시스템 관리자',
status='active'
)
db.add(admin_user)
print(f" 관리자 계정 '{ADMIN_USERNAME}' 생성 완료")
else:
print(f"☑️ 관리자 계정 '{ADMIN_USERNAME}' 이미 존재")
# 시스템 계정 확인 및 생성
system_user = db.query(User).filter(User.username == SYSTEM_USERNAME).first()
if not system_user:
hashed_password = bcrypt.hashpw(SYSTEM_PASSWORD.encode('utf-8'), bcrypt.gensalt()).decode('utf-8')
system_user = User(
username=SYSTEM_USERNAME,
password=hashed_password,
name=SYSTEM_NAME,
email=SYSTEM_EMAIL,
role='system',
access_level='system',
department='IT',
position='시스템 계정',
status='active'
)
db.add(system_user)
print(f" 시스템 계정 '{SYSTEM_USERNAME}' 생성 완료")
else:
print(f"☑️ 시스템 계정 '{SYSTEM_USERNAME}' 이미 존재")
db.commit()
print("✅ 초기 사용자 계정 확인 및 생성 완료")
# 4. 성능 인덱스 추가
print("🚀 성능 인덱스 생성 중...")
with engine.connect() as connection:
indexes = [
"CREATE INDEX IF NOT EXISTS idx_materials_main_nom ON materials(main_nom);",
"CREATE INDEX IF NOT EXISTS idx_materials_red_nom ON materials(red_nom);",
"CREATE INDEX IF NOT EXISTS idx_materials_full_material_grade ON materials(full_material_grade);",
"CREATE INDEX IF NOT EXISTS idx_materials_revision_status ON materials(revision_status);",
"CREATE INDEX IF NOT EXISTS idx_materials_file_id ON materials(file_id);",
"CREATE INDEX IF NOT EXISTS idx_materials_drawing_name ON materials(drawing_name);",
"CREATE INDEX IF NOT EXISTS idx_materials_classified_category ON materials(classified_category);",
"CREATE INDEX IF NOT EXISTS idx_files_job_no ON files(job_no);",
"CREATE INDEX IF NOT EXISTS idx_files_uploaded_by ON files(uploaded_by);",
"CREATE INDEX IF NOT EXISTS idx_users_username ON users(username);",
"CREATE INDEX IF NOT EXISTS idx_users_status ON users(status);",
]
for index_sql in indexes:
try:
connection.execute(text(index_sql))
except Exception as e:
print(f" ⚠️ 인덱스 생성 실패: {e}")
connection.commit()
print("✅ 성능 인덱스 생성 완료")
return True
except SQLAlchemyOperationalError as e:
print(f"❌ 데이터베이스 작업 실패: {e}")
return False
except Exception as e:
print(f"❌ 예상치 못한 오류 발생: {e}")
return False
if __name__ == "__main__":
print("🚀 TK-MP-Project 완전한 자동 DB 마이그레이션 시작")
print(f"⏰ 시작 시간: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
print(f"🖥️ 환경: {os.uname().sysname} {os.uname().machine}")
print("🔧 DB 설정 확인:")
print(f" - DB_HOST: {DB_HOST}")
print(f" - DB_PORT: {DB_PORT}")
print(f" - DB_NAME: {DB_NAME}")
print(f" - DB_USER: {DB_USER}")
if not wait_for_db():
print("❌ DB 마이그레이션 실패. 서버 시작을 중단합니다.")
exit(1)
if not create_tables_and_migrate():
print("⚠️ DB 마이그레이션에서 일부 오류가 발생했지만 서버를 시작합니다.")
print(" (기존 스키마가 있거나 부분적으로 성공했을 수 있습니다)")
else:
print("✅ 완전한 DB 마이그레이션 성공")
print(f"⏰ 완료 시간: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")

View File

@@ -0,0 +1,142 @@
#!/usr/bin/env python3
"""
누락된 테이블 수동 생성 스크립트
배포 시 자동으로 실행되도록 설계
"""
import os
import sys
import psycopg2
from psycopg2 import sql
# 현재 디렉토리를 Python 경로에 추가
sys.path.insert(0, '/app')
# 환경 변수 로드
DB_HOST = os.getenv("DB_HOST", "postgres")
DB_PORT = os.getenv("DB_PORT", "5432")
DB_NAME = os.getenv("DB_NAME", "tk_mp_bom")
DB_USER = os.getenv("DB_USER", "tkmp_user")
DB_PASSWORD = os.getenv("DB_PASSWORD", "tkmp_password_2025")
def create_missing_tables():
"""누락된 테이블들을 직접 생성"""
print("🔧 누락된 테이블 생성 시작...")
try:
conn = psycopg2.connect(
host=DB_HOST,
port=DB_PORT,
database=DB_NAME,
user=DB_USER,
password=DB_PASSWORD
)
with conn.cursor() as cursor:
# purchase_requests 테이블 생성
cursor.execute("""
CREATE TABLE IF NOT EXISTS purchase_requests (
request_id VARCHAR(50) PRIMARY KEY,
request_no VARCHAR(100) NOT NULL,
file_id INTEGER REFERENCES files(id),
job_no VARCHAR(50) NOT NULL,
category VARCHAR(50),
material_count INTEGER,
excel_file_path VARCHAR(500),
requested_by INTEGER REFERENCES users(user_id),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
""")
print("✅ purchase_requests 테이블 생성 완료")
# materials 테이블에 누락된 컬럼들 추가
cursor.execute("ALTER TABLE materials ADD COLUMN IF NOT EXISTS brand VARCHAR(100);")
cursor.execute("ALTER TABLE materials ADD COLUMN IF NOT EXISTS user_requirement TEXT;")
cursor.execute("ALTER TABLE materials ADD COLUMN IF NOT EXISTS purchase_confirmed BOOLEAN DEFAULT FALSE;")
cursor.execute("ALTER TABLE materials ADD COLUMN IF NOT EXISTS confirmed_quantity NUMERIC(10,3);")
cursor.execute("ALTER TABLE materials ADD COLUMN IF NOT EXISTS purchase_status VARCHAR(20);")
cursor.execute("ALTER TABLE materials ADD COLUMN IF NOT EXISTS purchase_confirmed_by VARCHAR(100);")
cursor.execute("ALTER TABLE materials ADD COLUMN IF NOT EXISTS purchase_confirmed_at TIMESTAMP;")
cursor.execute("ALTER TABLE materials ADD COLUMN IF NOT EXISTS material_hash VARCHAR(64);")
cursor.execute("ALTER TABLE materials ADD COLUMN IF NOT EXISTS normalized_description TEXT;")
cursor.execute("ALTER TABLE materials ADD COLUMN IF NOT EXISTS revision_status VARCHAR(20);")
print("✅ materials 테이블 누락된 컬럼들 추가 완료")
# 기타 누락될 수 있는 테이블들
missing_tables = [
"""
CREATE TABLE IF NOT EXISTS excel_exports (
id SERIAL PRIMARY KEY,
file_id INTEGER REFERENCES files(id),
export_type VARCHAR(50),
file_path VARCHAR(500),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
""",
"""
CREATE TABLE IF NOT EXISTS user_activity_logs (
id SERIAL PRIMARY KEY,
user_id INTEGER REFERENCES users(user_id),
activity_type VARCHAR(50),
description TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
""",
"""
CREATE TABLE IF NOT EXISTS excel_export_history (
id SERIAL PRIMARY KEY,
request_id VARCHAR(50) REFERENCES purchase_requests(request_id),
export_path VARCHAR(500),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
""",
"""
CREATE TABLE IF NOT EXISTS exported_materials (
id SERIAL PRIMARY KEY,
export_id INTEGER REFERENCES excel_exports(id),
material_id INTEGER REFERENCES materials(id),
quantity NUMERIC(10, 3),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
""",
"""
CREATE TABLE IF NOT EXISTS purchase_status_history (
id SERIAL PRIMARY KEY,
material_id INTEGER REFERENCES materials(id),
old_status VARCHAR(50),
new_status VARCHAR(50),
changed_by INTEGER REFERENCES users(user_id),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
"""
]
for table_sql in missing_tables:
try:
cursor.execute(table_sql)
table_name = table_sql.split("CREATE TABLE IF NOT EXISTS ")[1].split(" (")[0]
print(f"{table_name} 테이블 생성 완료")
except Exception as e:
print(f"⚠️ 테이블 생성 중 오류 (무시하고 계속): {e}")
conn.commit()
print("✅ 모든 누락된 테이블 생성 완료")
except Exception as e:
print(f"❌ 테이블 생성 실패: {e}")
return False
finally:
if conn:
conn.close()
return True
if __name__ == "__main__":
print("🚀 누락된 테이블 생성 스크립트 시작")
success = create_missing_tables()
if success:
print("✅ 누락된 테이블 생성 완료")
else:
print("❌ 누락된 테이블 생성 실패")
sys.exit(1)

View File

@@ -0,0 +1,113 @@
#!/usr/bin/env python3
"""
TK-MP-Project 자동 DB 마이그레이션 스크립트
배포 시 백엔드 시작 전에 자동으로 실행되어 DB 스키마를 동기화
"""
import sys
import os
import time
from datetime import datetime
# 백엔드 모듈 경로 추가
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
def wait_for_db():
"""데이터베이스 연결 대기"""
max_retries = 30
retry_count = 0
while retry_count < max_retries:
try:
from app.database import engine
# 간단한 연결 테스트
with engine.connect() as conn:
conn.execute("SELECT 1")
print("✅ 데이터베이스 연결 성공")
return True
except Exception as e:
retry_count += 1
print(f"⏳ 데이터베이스 연결 대기 중... ({retry_count}/{max_retries})")
time.sleep(2)
print("❌ 데이터베이스 연결 실패")
return False
def sync_database_schema():
"""데이터베이스 스키마 동기화"""
try:
from app.models import Base
from app.database import engine
print("🔄 데이터베이스 스키마 동기화 중...")
# 모든 테이블 생성/업데이트
Base.metadata.create_all(bind=engine)
print("✅ 데이터베이스 스키마 동기화 완료")
return True
except Exception as e:
print(f"❌ 스키마 동기화 실패: {str(e)}")
return False
def ensure_required_data():
"""필수 데이터 확인 및 생성"""
try:
from app.database import get_db
from app.auth.models import User
from sqlalchemy.orm import Session
print("🔄 필수 데이터 확인 중...")
db = next(get_db())
# 관리자 계정 확인
admin_user = db.query(User).filter(User.username == 'admin').first()
if not admin_user:
print("📝 기본 관리자 계정 생성 중...")
admin_user = User(
username='admin',
password='$2b$12$ld4LDOW5mxkiRQEkXfMUIep/aIzFleQZ4yoL10ZQkUxGqnkYuhNMW', # admin123
name='시스템 관리자',
email='admin@tkmp.com',
role='admin',
access_level='admin',
department='IT',
position='시스템 관리자',
status='active'
)
db.add(admin_user)
db.commit()
print("✅ 기본 관리자 계정 생성 완료")
db.close()
print("✅ 필수 데이터 확인 완료")
return True
except Exception as e:
print(f"❌ 필수 데이터 확인 실패: {str(e)}")
return False
def main():
"""메인 실행 함수"""
print("🚀 TK-MP-Project 자동 DB 마이그레이션 시작")
print(f"⏰ 시작 시간: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
# 1. 데이터베이스 연결 대기
if not wait_for_db():
sys.exit(1)
# 2. 스키마 동기화
if not sync_database_schema():
sys.exit(1)
# 3. 필수 데이터 확인
if not ensure_required_data():
sys.exit(1)
print("🎉 자동 DB 마이그레이션 완료!")
print(f"⏰ 완료 시간: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,144 @@
#!/usr/bin/env python3
"""
TK-MP-Project 완전한 DB 스키마 생성 스크립트
백엔드 SQLAlchemy 모델을 기반으로 PostgreSQL 스키마를 자동 생성
"""
import sys
import os
from datetime import datetime
# 백엔드 모듈 경로 추가
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from sqlalchemy import create_engine, MetaData
from sqlalchemy.schema import CreateTable, CreateIndex
from app.models import Base
from app.auth.models import User, LoginLog, UserSession, Permission, RolePermission
def generate_schema_sql():
"""SQLAlchemy 모델을 기반으로 완전한 PostgreSQL 스키마 생성"""
# 메모리 내 SQLite 엔진 생성 (스키마 생성용)
engine = create_engine('sqlite:///:memory:', echo=False)
# 모든 테이블 생성
Base.metadata.create_all(engine)
# PostgreSQL 방언으로 변환
from sqlalchemy.dialects import postgresql
schema_lines = []
schema_lines.append("-- ================================")
schema_lines.append("-- TK-MP-Project 완전한 데이터베이스 스키마")
schema_lines.append(f"-- 자동 생성일: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
schema_lines.append("-- SQLAlchemy 모델 기반 자동 생성")
schema_lines.append("-- ================================")
schema_lines.append("")
# 테이블 생성 SQL 생성
for table in Base.metadata.sorted_tables:
create_table_sql = str(CreateTable(table).compile(dialect=postgresql.dialect()))
# SQLite -> PostgreSQL 변환
create_table_sql = create_table_sql.replace('DATETIME', 'TIMESTAMP')
create_table_sql = create_table_sql.replace('BOOLEAN', 'BOOLEAN')
create_table_sql = create_table_sql.replace('TEXT', 'TEXT')
create_table_sql = create_table_sql.replace('JSON', 'JSONB')
schema_lines.append(f"-- {table.name} 테이블")
schema_lines.append(f"CREATE TABLE IF NOT EXISTS {create_table_sql[13:]};") # "CREATE TABLE " 제거
schema_lines.append("")
# 인덱스 생성
schema_lines.append("-- ================================")
schema_lines.append("-- 인덱스 생성")
schema_lines.append("-- ================================")
schema_lines.append("")
for table in Base.metadata.sorted_tables:
for index in table.indexes:
create_index_sql = str(CreateIndex(index).compile(dialect=postgresql.dialect()))
schema_lines.append(f"CREATE INDEX IF NOT EXISTS {create_index_sql[13:]};") # "CREATE INDEX " 제거
# 필수 데이터 삽입
schema_lines.append("")
schema_lines.append("-- ================================")
schema_lines.append("-- 필수 기본 데이터 삽입")
schema_lines.append("-- ================================")
schema_lines.append("")
# 기본 관리자 계정
schema_lines.append("-- 기본 관리자 계정 (비밀번호: admin123)")
schema_lines.append("INSERT INTO users (username, password, name, email, role, access_level, department, position, status) VALUES")
schema_lines.append("('admin', '$2b$12$ld4LDOW5mxkiRQEkXfMUIep/aIzFleQZ4yoL10ZQkUxGqnkYuhNMW', '시스템 관리자', 'admin@tkmp.com', 'admin', 'admin', 'IT', '시스템 관리자', 'active'),")
schema_lines.append("('system', '$2b$12$ld4LDOW5mxkiRQEkXfMUIep/aIzFleQZ4yoL10ZQkUxGqnkYuhNMW', '시스템 계정', 'system@tkmp.com', 'system', 'system', 'IT', '시스템 계정', 'active')")
schema_lines.append("ON CONFLICT (username) DO NOTHING;")
schema_lines.append("")
# 기본 권한 데이터
schema_lines.append("-- 기본 권한 데이터")
permissions = [
('bom.view', 'BOM 조회 권한', 'bom'),
('bom.create', 'BOM 생성 권한', 'bom'),
('bom.edit', 'BOM 수정 권한', 'bom'),
('bom.delete', 'BOM 삭제 권한', 'bom'),
('project.view', '프로젝트 조회 권한', 'project'),
('project.create', '프로젝트 생성 권한', 'project'),
('project.edit', '프로젝트 수정 권한', 'project'),
('file.upload', '파일 업로드 권한', 'file'),
('file.download', '파일 다운로드 권한', 'file'),
('user.view', '사용자 조회 권한', 'user'),
('user.create', '사용자 생성 권한', 'user'),
('system.admin', '시스템 관리 권한', 'system')
]
schema_lines.append("INSERT INTO permissions (permission_name, description, module) VALUES")
for i, (name, desc, module) in enumerate(permissions):
comma = "," if i < len(permissions) - 1 else ""
schema_lines.append(f"('{name}', '{desc}', '{module}'){comma}")
schema_lines.append("ON CONFLICT (permission_name) DO NOTHING;")
schema_lines.append("")
# 완료 메시지
schema_lines.append("-- ================================")
schema_lines.append("-- 스키마 생성 완료")
schema_lines.append("-- ================================")
schema_lines.append("")
schema_lines.append("DO $$")
schema_lines.append("BEGIN")
schema_lines.append(" RAISE NOTICE '✅ TK-MP-Project 완전한 데이터베이스 스키마가 성공적으로 생성되었습니다!';")
schema_lines.append(" RAISE NOTICE '👤 기본 계정: admin/admin123, system/admin123';")
schema_lines.append(" RAISE NOTICE '🔐 권한 시스템: 모듈별 세분화된 권한 적용';")
schema_lines.append(" RAISE NOTICE '📊 자동 생성: SQLAlchemy 모델 기반 완전 동기화';")
schema_lines.append("END $$;")
return '\n'.join(schema_lines)
def main():
"""메인 실행 함수"""
try:
print("🔄 SQLAlchemy 모델 분석 중...")
schema_sql = generate_schema_sql()
# 스키마 파일 저장
output_file = os.path.join(os.path.dirname(__file__), '..', '..', 'database', 'init', '00_auto_generated_schema.sql')
os.makedirs(os.path.dirname(output_file), exist_ok=True)
with open(output_file, 'w', encoding='utf-8') as f:
f.write(schema_sql)
print(f"✅ 완전한 DB 스키마가 생성되었습니다: {output_file}")
print("📋 포함된 내용:")
print(" - 모든 SQLAlchemy 모델 기반 테이블")
print(" - 필수 인덱스")
print(" - 기본 관리자 계정")
print(" - 기본 권한 데이터")
print("🚀 배포 시 이 파일이 자동으로 실행됩니다.")
except Exception as e:
print(f"❌ 스키마 생성 실패: {str(e)}")
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,195 @@
"""
데이터베이스 성능 최적화 스크립트
인덱스 생성, 쿼리 최적화, 통계 업데이트
"""
import os
import psycopg2
from psycopg2 import sql
from ..utils.logger import get_logger
logger = get_logger(__name__)
# 환경 변수 로드
DB_HOST = os.getenv("DB_HOST", "postgres")
DB_PORT = os.getenv("DB_PORT", "5432")
DB_NAME = os.getenv("DB_NAME", "tk_mp_bom")
DB_USER = os.getenv("DB_USER", "tkmp_user")
DB_PASSWORD = os.getenv("DB_PASSWORD", "tkmp_password_2025")
def optimize_database():
"""데이터베이스 성능 최적화"""
try:
conn = psycopg2.connect(
host=DB_HOST,
port=DB_PORT,
database=DB_NAME,
user=DB_USER,
password=DB_PASSWORD
)
with conn.cursor() as cursor:
# 1. 핵심 인덱스 생성
print("🔧 핵심 인덱스 생성 중...")
indexes = [
# materials 테이블 인덱스
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_materials_file_id ON materials(file_id);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_materials_category ON materials(classified_category);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_materials_drawing_name ON materials(drawing_name);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_materials_line_no ON materials(line_no);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_materials_revision_status ON materials(revision_status);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_materials_purchase_confirmed ON materials(purchase_confirmed);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_materials_material_hash ON materials(material_hash);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_materials_main_nom ON materials(main_nom);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_materials_material_grade ON materials(material_grade);",
# files 테이블 인덱스
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_files_job_no ON files(job_no);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_files_revision ON files(revision);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_files_uploaded_by ON files(uploaded_by);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_files_upload_date ON files(upload_date);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_files_is_active ON files(is_active);",
# 복합 인덱스 (자주 함께 사용되는 컬럼들)
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_materials_file_category ON materials(file_id, classified_category);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_materials_drawing_line ON materials(drawing_name, line_no);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_files_job_revision ON files(job_no, revision);",
# material_purchase_tracking 인덱스
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_mpt_material_hash ON material_purchase_tracking(material_hash);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_mpt_job_revision ON material_purchase_tracking(job_no, revision);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_mpt_purchase_status ON material_purchase_tracking(purchase_status);",
# 상세 테이블들 인덱스
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_pipe_details_material_id ON pipe_details(material_id);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_fitting_details_material_id ON fitting_details(material_id);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_flange_details_material_id ON flange_details(material_id);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_valve_details_material_id ON valve_details(material_id);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_bolt_details_material_id ON bolt_details(material_id);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_gasket_details_material_id ON gasket_details(material_id);",
# 사용자 관련 인덱스
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_users_username ON users(username);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_users_status ON users(status);",
"CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_users_role ON users(role);",
]
for index_sql in indexes:
try:
cursor.execute(index_sql)
conn.commit()
print(f"✅ 인덱스 생성: {index_sql.split('idx_')[1].split(' ')[0] if 'idx_' in index_sql else 'unknown'}")
except Exception as e:
print(f"⚠️ 인덱스 생성 실패: {e}")
conn.rollback()
# 2. 통계 업데이트
print("📊 테이블 통계 업데이트 중...")
tables_to_analyze = [
'materials', 'files', 'projects', 'users',
'material_purchase_tracking', 'pipe_details',
'fitting_details', 'flange_details', 'valve_details',
'bolt_details', 'gasket_details'
]
for table in tables_to_analyze:
try:
cursor.execute(f"ANALYZE {table};")
conn.commit()
print(f"✅ 통계 업데이트: {table}")
except Exception as e:
print(f"⚠️ 통계 업데이트 실패 ({table}): {e}")
conn.rollback()
# 3. VACUUM 실행 (선택적)
print("🧹 데이터베이스 정리 중...")
try:
# VACUUM은 트랜잭션 외부에서 실행해야 함
conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
cursor.execute("VACUUM ANALYZE;")
print("✅ VACUUM ANALYZE 완료")
except Exception as e:
print(f"⚠️ VACUUM 실패: {e}")
print("✅ 데이터베이스 최적화 완료")
except Exception as e:
print(f"❌ 데이터베이스 최적화 실패: {e}")
return False
finally:
if conn:
conn.close()
return True
def get_database_stats():
"""데이터베이스 통계 조회"""
try:
conn = psycopg2.connect(
host=DB_HOST,
port=DB_PORT,
database=DB_NAME,
user=DB_USER,
password=DB_PASSWORD
)
with conn.cursor() as cursor:
# 테이블별 레코드 수
print("📊 테이블별 레코드 수:")
tables = ['materials', 'files', 'projects', 'users', 'material_purchase_tracking']
for table in tables:
try:
cursor.execute(f"SELECT COUNT(*) FROM {table};")
count = cursor.fetchone()[0]
print(f" {table}: {count:,}")
except Exception as e:
print(f" {table}: 조회 실패 ({e})")
# 인덱스 사용률 확인
print("\n🔍 인덱스 사용률:")
cursor.execute("""
SELECT
schemaname,
tablename,
indexname,
idx_tup_read,
idx_tup_fetch
FROM pg_stat_user_indexes
WHERE idx_tup_read > 0
ORDER BY idx_tup_read DESC
LIMIT 10;
""")
for row in cursor.fetchall():
print(f" {row[1]}.{row[2]}: {row[3]:,} reads, {row[4]:,} fetches")
except Exception as e:
print(f"❌ 통계 조회 실패: {e}")
finally:
if conn:
conn.close()
if __name__ == "__main__":
print("🚀 데이터베이스 성능 최적화 시작")
# 현재 통계 확인
get_database_stats()
# 최적화 실행
if optimize_database():
print("✅ 최적화 완료")
# 최적화 후 통계 확인
print("\n📊 최적화 후 통계:")
get_database_stats()
else:
print("❌ 최적화 실패")

View File

@@ -0,0 +1,126 @@
#!/usr/bin/env python3
"""
TK-MP-Project 간단하고 안정적인 DB 마이그레이션 스크립트
macOS Docker와 Synology Container Manager 모두 지원
"""
import os
import sys
import time
import psycopg2
from datetime import datetime
def get_db_config():
"""환경변수에서 DB 설정 가져오기"""
return {
'host': os.getenv('DB_HOST', 'postgres'),
'port': int(os.getenv('DB_PORT', 5432)),
'database': os.getenv('DB_NAME', 'tk_mp_bom'),
'user': os.getenv('DB_USER', 'tkmp_user'),
'password': os.getenv('DB_PASSWORD', 'tkmp_password')
}
def wait_for_database(max_retries=60, retry_interval=2):
"""데이터베이스 연결 대기 (더 긴 대기시간과 짧은 간격)"""
db_config = get_db_config()
for attempt in range(1, max_retries + 1):
try:
conn = psycopg2.connect(**db_config)
conn.close()
print(f"✅ 데이터베이스 연결 성공 (시도 {attempt}/{max_retries})")
return True
except Exception as e:
print(f"⏳ DB 연결 대기 중... ({attempt}/{max_retries}) - {str(e)[:50]}")
time.sleep(retry_interval)
print("❌ 데이터베이스 연결 실패")
return False
def execute_sql(sql_commands):
"""SQL 명령어 실행"""
db_config = get_db_config()
try:
conn = psycopg2.connect(**db_config)
cursor = conn.cursor()
for sql in sql_commands:
if sql.strip():
cursor.execute(sql)
conn.commit()
cursor.close()
conn.close()
return True
except Exception as e:
print(f"❌ SQL 실행 실패: {str(e)}")
return False
def get_migration_sql():
"""필요한 마이그레이션 SQL 반환"""
return [
# materials 테이블 컬럼 추가
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS row_number INTEGER;",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS main_nom VARCHAR(50);",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS red_nom VARCHAR(50);",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS full_material_grade TEXT;",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS length NUMERIC(10,3);",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS purchase_confirmed BOOLEAN DEFAULT FALSE;",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS confirmed_quantity NUMERIC(10,3);",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS purchase_status VARCHAR(20);",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS purchase_confirmed_by VARCHAR(100);",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS purchase_confirmed_at TIMESTAMP;",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS revision_status VARCHAR(20);",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS material_hash VARCHAR(64);",
"ALTER TABLE materials ADD COLUMN IF NOT EXISTS normalized_description TEXT;",
# users 테이블 status 컬럼 확인 및 추가
"ALTER TABLE users ADD COLUMN IF NOT EXISTS status VARCHAR(20) DEFAULT 'active';",
"UPDATE users SET status = 'active' WHERE status IS NULL AND is_active = TRUE;",
"UPDATE users SET status = 'inactive' WHERE status IS NULL AND is_active = FALSE;",
# 기본 관리자 계정 확인 및 생성
"""
INSERT INTO users (username, password, name, email, role, access_level, department, position, status)
VALUES ('admin', '$2b$12$ld4LDOW5mxkiRQEkXfMUIep/aIzFleQZ4yoL10ZQkUxGqnkYuhNMW', '시스템 관리자', 'admin@tkmp.com', 'admin', 'admin', 'IT', '시스템 관리자', 'active')
ON CONFLICT (username) DO UPDATE SET
password = EXCLUDED.password,
status = 'active';
""",
# 인덱스 생성
"CREATE INDEX IF NOT EXISTS idx_materials_main_nom ON materials(main_nom);",
"CREATE INDEX IF NOT EXISTS idx_materials_red_nom ON materials(red_nom);",
"CREATE INDEX IF NOT EXISTS idx_materials_revision_status ON materials(revision_status);",
"CREATE INDEX IF NOT EXISTS idx_materials_purchase_status ON materials(purchase_status);",
"CREATE INDEX IF NOT EXISTS idx_users_status ON users(status);",
]
def main():
"""메인 실행 함수"""
print("🚀 TK-MP-Project 간단 DB 마이그레이션 시작")
print(f"⏰ 시작 시간: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
# 1. 데이터베이스 연결 대기
print("🔄 데이터베이스 연결 확인 중...")
if not wait_for_database():
print("❌ 데이터베이스 연결 실패. 마이그레이션을 중단합니다.")
sys.exit(1)
# 2. 마이그레이션 실행
print("🔄 데이터베이스 마이그레이션 실행 중...")
migration_sql = get_migration_sql()
if execute_sql(migration_sql):
print("✅ 데이터베이스 마이그레이션 완료")
else:
print("❌ 데이터베이스 마이그레이션 실패")
sys.exit(1)
print("🎉 간단 DB 마이그레이션 완료!")
print(f"⏰ 완료 시간: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
print("👤 기본 계정: admin/admin123")
if __name__ == "__main__":
main()

54
backend/start.sh Normal file
View File

@@ -0,0 +1,54 @@
#!/bin/bash
# TK-MP-Project Backend Start Script
# Complete automatic DB migration then start server
echo "Starting TK-MP-Project Backend..."
echo "Time: $(date '+%Y-%m-%d %H:%M:%S')"
echo "Environment: $(uname -s) $(uname -m)"
# Check environment variables
echo "DB Configuration:"
echo " - DB_HOST: ${DB_HOST:-postgres}"
echo " - DB_PORT: ${DB_PORT:-5432}"
echo " - DB_NAME: ${DB_NAME:-tk_mp_bom}"
echo " - DB_USER: ${DB_USER:-tkmp_user}"
# 1. Run complete DB migration
echo "Running complete DB migration..."
python scripts/complete_migrate.py
migration_result=$?
if [ $migration_result -ne 0 ]; then
echo "WARNING: DB migration had some errors. Trying to fix missing tables..."
python scripts/fix_missing_tables.py
fix_result=$?
if [ $fix_result -eq 0 ]; then
echo "SUCCESS: Missing tables fixed"
else
echo "WARNING: Some tables may still be missing but starting server anyway"
fi
else
echo "SUCCESS: Complete DB migration finished"
fi
# Additional safety check for critical tables
echo "Verifying critical tables..."
python scripts/fix_missing_tables.py
# Complete schema analysis and fix
echo "Running complete schema analysis and fix..."
python scripts/analyze_and_fix_schema.py
echo "Complete schema analysis completed"
# Database performance optimization (run once after migration)
echo "Running database performance optimization..."
python scripts/optimize_database.py
echo "Database optimization completed"
# 2. Start FastAPI server
echo "Starting FastAPI server..."
echo " - Port: 8000"
echo " - Host: 0.0.0.0"
echo " - Environment: ${ENVIRONMENT:-development}"
exec uvicorn app.main:app --host 0.0.0.0 --port 8000 --log-level info