Files
tk-factory-services/tkeg/api/app/routers/files.py
Hyungi Ahn f09c86ee01 fix(security): CRITICAL 보안 이슈 13건 일괄 수정
- SEC-42: JWT algorithm HS256 명시 (sign 5곳, verify 3곳)
- SEC-44: MariaDB/PhpMyAdmin 포트 127.0.0.1 바인딩
- SEC-29: escHtml = escapeHtml alias 추가 (XSS 방지)
- SEC-39: Python Dockerfile 4개 non-root user + chown
- SEC-43: deploy-remote.sh 삭제 (평문 비밀번호 포함)
- SEC-11,12: SQL SET ? → 명시적 컬럼 whitelist + IN절 parameterized
- QA-34: vacation approveRequest/cancelRequest 트랜잭션 래핑
- SEC-32,34: material_comparison.py 5개 엔드포인트 인증 + confirmed_by
- SEC-33: files.py 17개 미인증 엔드포인트 인증 추가
- SEC-37: chatbot 프롬프트 인젝션 방어 (sanitize + XML 구분자)
- SEC-38: fastapi-bridge 프록시 JWT 검증 + 캐시 키 user_id 포함
- SEC-58/QA-98: monthly-comparison API_BASE_URL 수정 + 401 처리
- SEC-61: monthlyComparisonModel SELECT FOR UPDATE 추가
- SEC-63: proxyInputController 에러 메시지 노출 제거
- QA-103: pageAccessRoutes error→message 통일
- SEC-62: tbm-create onclick 인젝션 → data-attribute event delegation
- QA-99: tbm-mobile/create 캐시 버스팅 갱신
- QA-100,101: ESC 키 리스너 cleanup 추가

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-01 10:48:58 +09:00

3087 lines
134 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form, Request, Query, Body
from sqlalchemy.orm import Session
from sqlalchemy import text
from typing import List, Optional, Dict
from pydantic import BaseModel
import os
import shutil
from datetime import datetime
import uuid
import pandas as pd
import re
from pathlib import Path
import json
from ..database import get_db
from ..auth.middleware import get_current_user
from ..services.activity_logger import ActivityLogger, log_activity_from_request
from ..utils.logger import get_logger
from app.services.excel_parser import BOMParser
from app.services.material_service import MaterialService # [신규] 자재 서비스 임포트
# 로거 설정
logger = get_logger(__name__)
# [REMOVED] classify_material_integrated 등 직접 임포트 제거 (서비스에서 사용)
from app.services.revision_comparator import get_revision_comparison
router = APIRouter()
class ExcelSaveRequest(BaseModel):
file_id: int
category: str
materials: List[Dict]
filename: str
user_id: Optional[int] = None
def extract_enhanced_material_grade(description: str, original_grade: str, category: str) -> str:
"""
원본 설명에서 개선된 재질 정보를 추출
Args:
description: 원본 설명
original_grade: 기존 재질 정보
category: 자재 카테고리 (PIPE, FITTING, FLANGE 등)
Returns:
개선된 재질 정보
"""
if not description:
return original_grade or '-'
desc_upper = description.upper()
# PIPE 재질 패턴
if category == 'PIPE':
pipe_patterns = [
(r'A312\s*(TP\d+[A-Z]*)', lambda m: f'A312 {m.group(1)}'),
(r'A106\s*(GR\.?\s*[A-Z]+)', lambda m: f'A106 {m.group(1)}'), # A106 GR.B, A106 GR B 등 전체 보존
(r'A106\s*([A-Z]+)', lambda m: f'A106 GR.{m.group(1)}'), # A106 B -> A106 GR.B
(r'A333\s*(GR\.?\s*[A-Z0-9]+)', lambda m: f'A333 {m.group(1)}'), # A333 GR.6 등 전체 보존
(r'A333\s*([A-Z0-9]+)', lambda m: f'A333 GR.{m.group(1)}'), # A333 6 -> A333 GR.6
(r'A53\s*(GR\.?\s*[A-Z]+)', lambda m: f'A53 {m.group(1)}'), # A53 GR.B 등 전체 보존
(r'A53\s*([A-Z]+)', lambda m: f'A53 GR.{m.group(1)}'), # A53 B -> A53 GR.B
(r'A335\s*(P\d+[A-Z]*)', lambda m: f'A335 {m.group(1)}'),
(r'STPG\s*(\d+)', lambda m: f'STPG {m.group(1)}'),
(r'STS\s*(\d+[A-Z]*)', lambda m: f'STS {m.group(1)}')
]
for pattern, formatter in pipe_patterns:
match = re.search(pattern, desc_upper)
if match:
return formatter(match)
# FITTING 재질 패턴
elif category == 'FITTING':
fitting_patterns = [
(r'A403\s*(WP\d+[A-Z]*)', lambda m: f'A403 {m.group(1)}'),
(r'A234\s*(WP[A-Z]+)', lambda m: f'A234 {m.group(1)}'),
(r'A420\s*(WPL\d+)', lambda m: f'A420 {m.group(1)}'),
(r'A105\s*(GR\.?\s*[N])', lambda m: f'A105 {m.group(1)}'), # A105 GR.N 전체 보존
(r'A105\s*([N])', lambda m: f'A105 GR.{m.group(1)}'), # A105 N -> A105 GR.N
(r'A105(?!\s*[A-Z])', lambda m: f'A105'), # A105만 있는 경우
(r'A106\s*(GR\.?\s*[A-Z]+)', lambda m: f'A106 {m.group(1)}'), # A106 GR.B 전체 보존
(r'A106\s*([A-Z]+)', lambda m: f'A106 GR.{m.group(1)}'), # A106 B -> A106 GR.B
(r'A182\s*(F\d+[A-Z]*)', lambda m: f'A182 {m.group(1)}'),
(r'A350\s*(LF\d+)', lambda m: f'A350 {m.group(1)}')
]
for pattern, formatter in fitting_patterns:
match = re.search(pattern, desc_upper)
if match:
return formatter(match)
# FLANGE 재질 패턴
elif category == 'FLANGE':
flange_patterns = [
(r'A182\s*(F\d+[A-Z]*)', lambda m: f'A182 {m.group(1)}'),
(r'A105\s*(GR\.?\s*[N])', lambda m: f'A105 {m.group(1)}'), # A105 GR.N 전체 보존
(r'A105\s*([N])', lambda m: f'A105 GR.{m.group(1)}'), # A105 N -> A105 GR.N
(r'A105(?!\s*[A-Z])', lambda m: f'A105'), # A105만 있는 경우
(r'A350\s*(LF\d+)', lambda m: f'A350 {m.group(1)}'),
(r'A694\s*(F\d+)', lambda m: f'A694 {m.group(1)}')
]
for pattern, formatter in flange_patterns:
match = re.search(pattern, desc_upper)
if match:
return formatter(match)
# 패턴이 매치되지 않으면 기존 값 반환
return original_grade or '-'
def extract_enhanced_flange_type(description: str, original_type: str) -> str:
"""
FLANGE 타입에 PIPE측 연결면 정보 추가
Args:
description: 원본 설명
original_type: 기존 플랜지 타입
Returns:
개선된 플랜지 타입 (예: WN RF, SO FF 등)
"""
if not description:
return original_type or '-'
desc_upper = description.upper()
# 기본 플랜지 타입 매핑
flange_type_map = {
'WELD_NECK': 'WN',
'SLIP_ON': 'SO',
'BLIND': 'BL',
'SOCKET_WELD': 'SW',
'LAP_JOINT': 'LJ',
'THREADED': 'TH',
'ORIFICE': 'ORIFICE'
}
display_type = flange_type_map.get(original_type, original_type) if original_type else '-'
# PIPE측 연결면 타입 추출
pipe_end_type = ''
if ' RF' in desc_upper or 'RAISED FACE' in desc_upper:
pipe_end_type = ' RF'
elif ' FF' in desc_upper or 'FLAT FACE' in desc_upper:
pipe_end_type = ' FF'
elif ' RTJ' in desc_upper or 'RING TYPE JOINT' in desc_upper:
pipe_end_type = ' RTJ'
elif ' MSF' in desc_upper or 'MALE AND FEMALE' in desc_upper:
pipe_end_type = ' MSF'
elif ' T&G' in desc_upper or 'TONGUE AND GROOVE' in desc_upper:
pipe_end_type = ' T&G'
# 최종 타입 조합
if pipe_end_type and display_type != '-':
return display_type + pipe_end_type
return display_type
UPLOAD_DIR = Path("uploads")
UPLOAD_DIR.mkdir(exist_ok=True)
ALLOWED_EXTENSIONS = {".xlsx", ".xls", ".csv"}
# API 정보는 /info 엔드포인트로 이동됨
@router.get("/test")
async def test_endpoint(current_user: dict = Depends(get_current_user)):
return {"status": "파일 API가 정상 작동합니다!"}
@router.post("/add-missing-columns")
async def add_missing_columns(db: Session = Depends(get_db), current_user: dict = Depends(get_current_user)):
"""누락된 컬럼들 추가"""
try:
db.execute(text("ALTER TABLE files ADD COLUMN IF NOT EXISTS parsed_count INTEGER DEFAULT 0"))
db.execute(text("ALTER TABLE materials ADD COLUMN IF NOT EXISTS row_number INTEGER"))
db.commit()
return {
"success": True,
"message": "누락된 컬럼들이 추가되었습니다",
"added_columns": ["files.parsed_count", "materials.row_number"]
}
except Exception as e:
db.rollback()
return {"success": False, "error": f"컬럼 추가 실패: {str(e)}"}
# [REMOVED] 구형 파싱 함수들 (validate_file_extension, generate_unique_filename, parse_dataframe, parse_file_data)
# 이 기능들은 이제 app/services/excel_parser.py의 BOMParser 클래스로 이관되었습니다.
@router.post("/upload")
async def upload_file(
request: Request,
file: UploadFile = File(...),
job_no: str = Form(...),
revision: str = Form("Rev.0"), # 기본값은 Rev.0 (새 BOM)
parent_file_id: Optional[int] = Form(None), # 리비전 업로드 시 부모 파일 ID
bom_name: Optional[str] = Form(None), # BOM 이름 (사용자 입력)
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
# 🎯 트랜잭션 오류 방지: 완전한 트랜잭션 초기화
# 🔍 디버깅: 업로드 파라미터 로깅
logger.info(f"[UPLOAD] job_no: {job_no}, revision: {revision}, parent_file_id: {parent_file_id}")
logger.info(f"[UPLOAD] bom_name: {bom_name}, filename: {file.filename}")
logger.info(f"[UPLOAD] revision != 'Rev.0': {revision != 'Rev.0'}")
try:
# 1. 현재 트랜잭션 완전 롤백
db.rollback()
logger.info("1단계: 이전 트랜잭션 롤백 완료")
# 2. 세션 상태 초기화
db.close()
logger.info("2단계: 세션 닫기 완료")
# 3. 새 세션 생성
from ..database import get_db
db = next(get_db())
logger.info("3단계: 새 세션 생성 완료")
except Exception as e:
logger.warning(f"트랜잭션 초기화 중 오류: {e}")
# 오류 발생 시에도 계속 진행
# [변경] BOMParser 사용하여 확장자 검증
if not BOMParser.validate_extension(file.filename):
raise HTTPException(
status_code=400,
detail=f"지원하지 않는 파일 형식입니다. 허용된 확장자: xlsx, xls, csv"
)
if file.size and file.size > 10 * 1024 * 1024:
raise HTTPException(status_code=400, detail="파일 크기는 10MB를 초과할 수 없습니다")
# [변경] BOMParser 사용하여 유니크 파일명 생성
unique_filename = BOMParser.generate_unique_filename(file.filename)
file_path = UPLOAD_DIR / unique_filename
try:
with open(file_path, "wb") as buffer:
shutil.copyfileobj(file.file, buffer)
except Exception as e:
raise HTTPException(status_code=500, detail=f"파일 저장 실패: {str(e)}")
try:
# [변경] BOMParser 사용하여 파일 파싱 (자동 양식 감지 포함)
logger.info(f"파일 파싱 시작: {file_path}")
materials_data = BOMParser.parse_file(str(file_path))
parsed_count = len(materials_data)
logger.info(f"파싱 완료: {parsed_count}개 자재 추출됨")
# 신규 자재 카운트 초기화
new_materials_count = 0
existing_materials_descriptions = set()
# 리비전 업로드인 경우만 자동 리비전 생성 및 기존 자재 조회
if parent_file_id is not None:
logger.info(f"리비전 업로드 감지: parent_file_id={parent_file_id}")
# 부모 파일의 정보 조회
parent_query = text("""
SELECT original_filename, revision, bom_name FROM files
WHERE id = :parent_file_id AND job_no = :job_no
""")
parent_result = db.execute(parent_query, {
"parent_file_id": parent_file_id,
"job_no": job_no
})
parent_file = parent_result.fetchone()
if not parent_file:
raise HTTPException(status_code=404, detail="부모 파일을 찾을 수 없습니다.")
# 해당 BOM의 최신 리비전 확인 (bom_name 기준)
bom_name_to_use = parent_file[2] or parent_file[0] # bom_name 우선, 없으면 original_filename
latest_revision_query = text("""
SELECT revision FROM files
WHERE job_no = :job_no
AND (bom_name = :bom_name OR (bom_name IS NULL AND original_filename = :bom_name))
ORDER BY revision DESC
LIMIT 1
""")
latest_result = db.execute(latest_revision_query, {
"job_no": job_no,
"bom_name": bom_name_to_use
})
latest_revision = latest_result.fetchone()
if latest_revision:
latest_rev = latest_revision[0]
if latest_rev.startswith("Rev."):
try:
rev_num = int(latest_rev.replace("Rev.", ""))
revision = f"Rev.{rev_num + 1}"
except ValueError:
revision = "Rev.1"
else:
revision = "Rev.1"
logger.info(f"리비전 업로드: {latest_rev} -> {revision}")
else:
revision = "Rev.1"
logger.info(f"첫 번째 리비전: {revision}")
# 모든 이전 리비전의 누적 자재 목록 조회 (리비전 0부터 현재까지)
existing_materials_query = text("""
SELECT m.original_description, m.size_spec, SUM(m.quantity) as total_quantity
FROM materials m
JOIN files f ON m.file_id = f.id
WHERE f.job_no = :job_no
AND f.id <= :parent_file_id
AND f.is_active = TRUE
GROUP BY m.original_description, m.size_spec
""")
existing_result = db.execute(existing_materials_query, {
"job_no": job_no,
"parent_file_id": parent_file_id
})
existing_materials_with_quantity = {}
for row in existing_result:
# 설명과 사이즈를 조합하여 유니크 키 생성
key = f"{row.original_description}|{row.size_spec or ''}"
existing_materials_descriptions.add(key)
existing_materials_with_quantity[key] = float(row.total_quantity or 0)
logger.info(f"누적 자재 수 (Rev.0~현재): {len(existing_materials_descriptions)}")
logger.info(f"누적 자재 총 수량: {sum(existing_materials_with_quantity.values())}")
if len(existing_materials_descriptions) > 0:
logger.info(f"기존 자재 샘플 (처음 3개): {list(existing_materials_descriptions)[:3]}")
# 수량이 있는 자재들 확인
quantity_samples = [(k, v) for k, v in list(existing_materials_with_quantity.items())[:3]]
logger.info(f"기존 자재 수량 샘플: {quantity_samples}")
else:
logger.warning(f"기존 자재가 없습니다! parent_file_id={parent_file_id}의 materials 테이블을 확인하세요.")
# 파일명을 부모와 동일하게 유지
file.filename = parent_file[0]
else:
# 일반 업로드 (새 BOM)
logger.info(f"일반 업로드 모드: 새 BOM 파일 (Rev.0)")
# 파일 정보 저장 (사용자 정보 포함)
logger.info("DB 저장 시작")
username = current_user.get('username', 'unknown')
user_id = current_user.get('user_id')
file_insert_query = text("""
INSERT INTO files (filename, original_filename, file_path, job_no, revision, bom_name, description, file_size, parsed_count, is_active, uploaded_by)
VALUES (:filename, :original_filename, :file_path, :job_no, :revision, :bom_name, :description, :file_size, :parsed_count, :is_active, :uploaded_by)
RETURNING id
""")
file_result = db.execute(file_insert_query, {
"filename": unique_filename,
"original_filename": file.filename,
"file_path": str(file_path),
"job_no": job_no,
"revision": revision,
"bom_name": bom_name or file.filename, # bom_name 우선, 없으면 파일명
"description": f"BOM 파일 - {parsed_count}개 자재",
"file_size": file.size,
"parsed_count": parsed_count,
"is_active": True,
"uploaded_by": username
})
file_id = file_result.fetchone()[0]
db.commit() # 파일 레코드 즉시 커밋
logger.info(f"파일 저장 완료: file_id = {file_id}, uploaded_by = {username}")
# 🔄 리비전 비교 수행 (RULES.md 코딩 컨벤션 준수)
revision_comparison = None
materials_to_classify = materials_data
purchased_materials_map = {} # 구매확정된 자재 매핑 (키 -> 구매확정 정보)
if revision != "Rev.0": # 리비전 업로드인 경우만 비교
logger.info(f"[DEBUG] 리비전 비교 시작 - revision: {revision}, parent_file_id: {parent_file_id}")
try:
# 간단한 리비전 비교 로직 (purchase_confirmed 기반)
logger.info(f"[DEBUG] perform_simple_revision_comparison 호출 중...")
revision_comparison = perform_simple_revision_comparison(db, job_no, parent_file_id, materials_data)
logger.info(f"[DEBUG] 리비전 비교 완료: {revision_comparison.keys() if revision_comparison else 'None'}")
if revision_comparison.get("has_purchased_materials", False):
logger.info(f"간단한 리비전 비교 결과:")
logger.info(f" - 구매확정된 자재: {revision_comparison.get('purchased_count', 0)}")
logger.info(f" - 미구매 자재: {revision_comparison.get('unpurchased_count', 0)}")
logger.info(f" - 신규 자재: {revision_comparison.get('new_count', 0)}")
logger.info(f" - 제외된 구매확정 자재: {revision_comparison.get('excluded_purchased_count', 0)}")
# 신규 및 변경된 자재만 분류
materials_to_classify = revision_comparison.get("materials_to_classify", [])
logger.info(f" - 분류 필요: {len(materials_to_classify)}")
# 🔥 구매확정된 자재 매핑 정보 저장
purchased_materials_map = revision_comparison.get("purchased_materials_map", {})
logger.info(f" - 구매확정 자재 매핑: {len(purchased_materials_map)}")
else:
logger.info("이전 구매확정 자료 없음 - 전체 자재 분류")
except Exception as e:
logger.warning(f"리비전 비교 실패, 전체 자재 분류로 진행: {str(e)}")
import traceback
traceback.print_exc()
logger.info(f"자재 분류 및 저장 시작: {len(materials_to_classify)}개 자재")
# [변경] MaterialService를 사용하여 자재 처리 및 저장
materials_inserted = MaterialService.process_and_save_materials(
db, file_id, materials_data,
revision_comparison=revision_comparison,
parent_file_id=parent_file_id,
purchased_materials_map=purchased_materials_map
)
db.commit()
logger.info(f"자재 저장 완료: {materials_inserted}")
# [변경] MaterialService를 사용하여 구매신청 정보 상속
if parent_file_id is not None:
MaterialService.inherit_purchase_requests(db, file_id, parent_file_id)
db.commit()
# 활동 로그 기록
try:
activity_logger = ActivityLogger(db)
activity_logger.log_file_upload(
username=username,
file_id=file_id,
filename=file.filename,
file_size=file.size or 0,
job_no=job_no,
revision=revision,
user_id=user_id,
ip_address=request.client.host if request.client else None,
user_agent=request.headers.get('user-agent')
)
logger.info(f"활동 로그 기록 완료: {username} - 파일 업로드")
except Exception as e:
logger.error(f"활동 로그 기록 실패: {str(e)}")
# 로그 실패는 업로드 성공에 영향을 주지 않음
# 리비전 업로드인 경우 누락된 도면 감지 및 구매신청 여부 확인
missing_drawings_info = None
has_previous_purchase = False
if parent_file_id is not None:
try:
# 이전 리비전의 구매신청 여부 확인
purchase_check_query = text("""
SELECT COUNT(*) as purchase_count
FROM purchase_request_items pri
JOIN materials m ON pri.material_id = m.id
WHERE m.file_id = :parent_file_id
""")
purchase_result = db.execute(purchase_check_query, {"parent_file_id": parent_file_id}).fetchone()
has_previous_purchase = purchase_result.purchase_count > 0
logger.info(f"이전 리비전 구매신청 여부: {has_previous_purchase} ({purchase_result.purchase_count}개 자재)")
logger.info(f"parent_file_id: {parent_file_id}, new file_id: {file_id}")
# 이전 리비전의 도면 목록 조회
prev_drawings_query = text("""
SELECT DISTINCT drawing_name, line_no, COUNT(*) as material_count
FROM materials
WHERE file_id = :parent_file_id
AND (drawing_name IS NOT NULL OR line_no IS NOT NULL)
GROUP BY drawing_name, line_no
""")
prev_drawings_result = db.execute(prev_drawings_query, {"parent_file_id": parent_file_id}).fetchall()
logger.info(f"이전 리비전 도면 수: {len(prev_drawings_result)}")
# 새 리비전의 도면 목록 조회
new_drawings_query = text("""
SELECT DISTINCT drawing_name, line_no
FROM materials
WHERE file_id = :file_id
AND (drawing_name IS NOT NULL OR line_no IS NOT NULL)
""")
new_drawings_result = db.execute(new_drawings_query, {"file_id": file_id}).fetchall()
logger.info(f"새 리비전 도면 수: {len(new_drawings_result)}")
prev_drawings = set()
for row in prev_drawings_result:
if row.drawing_name:
prev_drawings.add(row.drawing_name)
elif row.line_no:
prev_drawings.add(row.line_no)
new_drawings = set()
for row in new_drawings_result:
if row.drawing_name:
new_drawings.add(row.drawing_name)
elif row.line_no:
new_drawings.add(row.line_no)
missing_drawings = prev_drawings - new_drawings
logger.info(f"이전 도면: {list(prev_drawings)[:5]}")
logger.info(f"새 도면: {list(new_drawings)[:5]}")
logger.error(f"누락 도면: {len(missing_drawings)}")
if missing_drawings:
# 누락된 도면의 자재 상세 정보
missing_materials = []
for row in prev_drawings_result:
drawing = row.drawing_name or row.line_no
if drawing in missing_drawings:
missing_materials.append({
"drawing_name": drawing,
"material_count": row.material_count
})
missing_drawings_info = {
"drawings": list(missing_drawings),
"materials": missing_materials,
"count": len(missing_drawings),
"requires_confirmation": True,
"has_previous_purchase": has_previous_purchase,
"action": "mark_as_inventory" if has_previous_purchase else "hide_materials"
}
# 누락된 도면의 자재 상태 업데이트
if missing_drawings:
for drawing in missing_drawings:
status_to_set = 'inventory' if has_previous_purchase else 'deleted_not_purchased'
update_status_query = text("""
UPDATE materials
SET revision_status = :status
WHERE file_id = :parent_file_id
AND (drawing_name = :drawing OR line_no = :drawing)
""")
db.execute(update_status_query, {
"status": status_to_set,
"parent_file_id": parent_file_id,
"drawing": drawing
})
db.commit()
logger.info(f"누락 도면 자재 상태 업데이트: {len(missing_drawings)}개 도면 -> {status_to_set}")
except Exception as e:
logger.error(f"누락 도면 감지 실패: {str(e)}")
db.rollback()
# 감지 실패는 업로드 성공에 영향 없음
# 리비전 업로드인 경우 메시지 다르게 표시
if parent_file_id is not None:
message = f"리비전 업로드 성공! {new_materials_count}개의 신규 자재가 추가되었습니다."
else:
message = f"업로드 성공! {materials_inserted}개 자재가 분류되었습니다."
response_data = {
"success": True,
"message": message,
"original_filename": file.filename,
"file_id": file_id,
"materials_count": materials_inserted,
"saved_materials_count": materials_inserted,
"new_materials_count": new_materials_count if parent_file_id is not None else None,
"revision": revision,
"uploaded_by": username,
"parsed_count": parsed_count
}
# 누락된 도면 정보 추가
if missing_drawings_info:
response_data["missing_drawings"] = missing_drawings_info
# 🔄 리비전 비교 결과 추가
if revision_comparison:
response_data["revision_comparison"] = {
"has_purchased_materials": revision_comparison.get("has_purchased_materials", False),
"purchased_count": revision_comparison.get("purchased_count", 0),
"unpurchased_count": revision_comparison.get("unpurchased_count", 0),
"new_count": revision_comparison.get("new_count", 0),
"removed_count": revision_comparison.get("removed_count", 0),
"excluded_purchased_count": revision_comparison.get("excluded_purchased_count", 0),
"removed_materials": revision_comparison.get("removed_materials", []),
"total_previous": revision_comparison.get("total_previous", 0),
"total_new": revision_comparison.get("total_new", 0)
}
return response_data
except Exception as e:
import traceback
error_details = traceback.format_exc()
logger.error(f"파일 업로드 실패 - 상세 에러:")
logger.error(error_details)
db.rollback()
if os.path.exists(file_path):
os.remove(file_path)
raise HTTPException(status_code=500, detail=f"파일 처리 실패: {str(e)}")
@router.get("/")
async def get_files(
job_no: Optional[str] = None,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""파일 목록 조회"""
try:
query = """
SELECT id, filename, original_filename, bom_name, job_no, revision,
description, file_size, parsed_count, upload_date, is_active
FROM files
WHERE is_active = TRUE
"""
params = {}
if job_no:
query += " AND job_no = :job_no"
params["job_no"] = job_no
query += " ORDER BY upload_date DESC"
result = db.execute(text(query), params)
files = result.fetchall()
return [
{
"id": file.id,
"filename": file.filename,
"original_filename": file.original_filename,
"bom_name": file.bom_name,
"job_no": file.job_no,
"revision": file.revision,
"description": file.description,
"file_size": file.file_size,
"parsed_count": file.parsed_count,
"created_at": file.upload_date,
"is_active": file.is_active
}
for file in files
]
except Exception as e:
raise HTTPException(status_code=500, detail=f"파일 목록 조회 실패: {str(e)}")
@router.get("/list")
async def get_files_list(
job_no: Optional[str] = None,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""파일 목록 조회 (리비전 모드 확인용)"""
try:
query = """
SELECT id, filename, original_filename, bom_name, job_no, revision,
description, file_size, parsed_count, upload_date, is_active
FROM files
WHERE is_active = TRUE
"""
params = {}
if job_no:
query += " AND job_no = :job_no"
params["job_no"] = job_no
query += " ORDER BY upload_date ASC" # 업로드 순서대로 정렬
result = db.execute(text(query), params)
files = result.fetchall()
files_list = [
{
"id": file.id,
"filename": file.filename,
"original_filename": file.original_filename,
"bom_name": file.bom_name,
"job_no": file.job_no,
"revision": file.revision,
"description": file.description,
"file_size": file.file_size,
"parsed_count": file.parsed_count,
"upload_date": file.upload_date,
"is_active": file.is_active
}
for file in files
]
return {
"success": True,
"files": files_list,
"total_count": len(files_list)
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"파일 목록 조회 실패: {str(e)}")
@router.get("/project/{project_code}")
async def get_files_by_project(
project_code: str,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""프로젝트별 파일 목록 조회"""
try:
query = """
SELECT id, filename, original_filename, bom_name, job_no, revision,
description, file_size, parsed_count, upload_date, is_active
FROM files
WHERE is_active = TRUE AND job_no = :job_no
ORDER BY upload_date DESC
"""
result = db.execute(text(query), {"job_no": project_code})
files = result.fetchall()
return [
{
"id": file.id,
"filename": file.filename,
"original_filename": file.original_filename,
"bom_name": file.bom_name,
"job_no": file.job_no,
"revision": file.revision,
"description": file.description,
"file_size": file.file_size,
"parsed_count": file.parsed_count,
"upload_date": file.upload_date,
"created_at": file.upload_date,
"is_active": file.is_active
}
for file in files
]
except Exception as e:
raise HTTPException(status_code=500, detail=f"프로젝트 파일 조회 실패: {str(e)}")
@router.get("/stats")
async def get_files_stats(db: Session = Depends(get_db), current_user: dict = Depends(get_current_user)):
"""파일 및 자재 통계 조회"""
try:
# 총 파일 수
files_query = text("SELECT COUNT(*) FROM files WHERE is_active = true")
total_files = db.execute(files_query).fetchone()[0]
# 총 자재 수
materials_query = text("SELECT COUNT(*) FROM materials")
total_materials = db.execute(materials_query).fetchone()[0]
# 최근 업로드 (최근 5개)
recent_query = text("""
SELECT f.original_filename, f.upload_date, f.parsed_count, j.job_name
FROM files f
LEFT JOIN jobs j ON f.job_no = j.job_no
WHERE f.is_active = true
ORDER BY f.upload_date DESC
LIMIT 5
""")
recent_uploads = db.execute(recent_query).fetchall()
return {
"success": True,
"totalFiles": total_files,
"totalMaterials": total_materials,
"recentUploads": [
{
"filename": upload.original_filename,
"created_at": upload.upload_date,
"parsed_count": upload.parsed_count or 0,
"project_name": upload.job_name or "Unknown"
}
for upload in recent_uploads
]
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"통계 조회 실패: {str(e)}")
@router.delete("/delete/{file_id}")
async def delete_file(file_id: int, db: Session = Depends(get_db), current_user: dict = Depends(get_current_user)):
"""파일 삭제"""
try:
# 자재 먼저 삭제
db.execute(text("DELETE FROM materials WHERE file_id = :file_id"), {"file_id": file_id})
# 파일 삭제
result = db.execute(text("DELETE FROM files WHERE id = :file_id"), {"file_id": file_id})
if result.rowcount == 0:
raise HTTPException(status_code=404, detail="파일을 찾을 수 없습니다")
db.commit()
return {
"success": True,
"message": "파일이 삭제되었습니다"
}
except Exception as e:
db.rollback()
raise HTTPException(status_code=500, detail=f"파일 삭제 실패: {str(e)}")
@router.get("/materials-v2") # 완전히 새로운 엔드포인트
async def get_materials(
project_id: Optional[int] = None,
file_id: Optional[int] = None,
job_no: Optional[str] = None,
filename: Optional[str] = None,
revision: Optional[str] = None,
skip: int = 0,
limit: int = 100,
search: Optional[str] = None,
item_type: Optional[str] = None,
material_grade: Optional[str] = None,
size_spec: Optional[str] = None,
file_filter: Optional[str] = None,
sort_by: Optional[str] = None,
exclude_requested: bool = True, # 구매신청된 자재 제외 여부
group_by_spec: bool = False, # 같은 사양끼리 그룹화
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
저장된 자재 목록 조회 (job_no, filename, revision 3가지로 필터링 가능) - 신버전
"""
try:
# 로그 제거 - 과도한 출력 방지
query = """
SELECT m.id, m.file_id, m.original_description, m.quantity, m.unit,
m.size_spec, m.main_nom, m.red_nom, m.material_grade, m.full_material_grade, m.line_number, m.row_number,
m.drawing_name, m.line_no, m.revision_status,
m.created_at, m.classified_category, m.classification_confidence,
m.classification_details,
m.is_verified, m.verified_by, m.verified_at,
m.brand, m.user_requirement,
f.original_filename, f.project_id, f.job_no, f.revision,
p.official_project_code, p.project_name,
pd.outer_diameter, pd.schedule, pd.material_spec, pd.manufacturing_method,
pd.end_preparation, pd.length_mm,
pep.end_preparation_type, pep.end_preparation_code, pep.machining_required,
pep.cutting_note, pep.clean_description as pipe_clean_description,
fd.fitting_type, fd.fitting_subtype, fd.connection_method, fd.pressure_rating,
fd.material_standard, fd.material_grade as fitting_material_grade, fd.main_size,
fd.reduced_size, fd.length_mm as fitting_length_mm, fd.schedule as fitting_schedule,
gd.gasket_type, gd.gasket_subtype, gd.material_type as gasket_material_type,
gd.filler_material, gd.pressure_rating as gasket_pressure_rating, gd.size_inches as gasket_size_inches,
gd.thickness as gasket_thickness, gd.temperature_range as gasket_temperature_range, gd.fire_safe,
mpt.confirmed_quantity, mpt.purchase_status, mpt.confirmed_by, mpt.confirmed_at,
-- 구매수량 계산에서 분류된 정보를 우선 사용
CASE
WHEN mpt.id IS NOT NULL THEN
CASE
WHEN mpt.description LIKE '%PIPE%' OR mpt.description LIKE '%파이프%' THEN 'PIPE'
WHEN mpt.description LIKE '%FITTING%' OR mpt.description LIKE '%피팅%' OR mpt.description LIKE '%NIPPLE%' OR mpt.description LIKE '%ELBOW%' OR mpt.description LIKE '%TEE%' OR mpt.description LIKE '%REDUCER%' THEN 'FITTING'
WHEN mpt.description LIKE '%VALVE%' OR mpt.description LIKE '%밸브%' THEN 'VALVE'
WHEN mpt.description LIKE '%FLANGE%' OR mpt.description LIKE '%플랜지%' THEN 'FLANGE'
WHEN mpt.description LIKE '%BOLT%' OR mpt.description LIKE '%볼트%' OR mpt.description LIKE '%STUD%' THEN 'BOLT'
WHEN mpt.description LIKE '%GASKET%' OR mpt.description LIKE '%가스켓%' THEN 'GASKET'
WHEN mpt.description LIKE '%INSTRUMENT%' OR mpt.description LIKE '%계기%' THEN 'INSTRUMENT'
ELSE m.classified_category
END
ELSE m.classified_category
END as final_classified_category,
-- 구매수량 계산 완료 여부
CASE WHEN mpt.id IS NOT NULL THEN true ELSE m.is_verified END as final_is_verified,
CASE WHEN mpt.id IS NOT NULL THEN 'purchase_calculation' ELSE m.verified_by END as final_verified_by
FROM materials m
LEFT JOIN files f ON m.file_id = f.id
LEFT JOIN projects p ON f.project_id = p.id
-- 구매신청된 자재 제외
LEFT JOIN purchase_request_items pri ON m.id = pri.material_id
LEFT JOIN pipe_details pd ON m.id = pd.material_id
LEFT JOIN pipe_end_preparations pep ON m.id = pep.material_id
LEFT JOIN fitting_details fd ON m.id = fd.material_id
LEFT JOIN valve_details vd ON m.id = vd.material_id
LEFT JOIN gasket_details gd ON m.id = gd.material_id
LEFT JOIN material_purchase_tracking mpt ON (
m.material_hash = mpt.material_hash
AND f.job_no = mpt.job_no
AND f.revision = mpt.revision
)
WHERE 1=1
"""
params = {}
# 구매신청된 자재 제외
if exclude_requested:
query += " AND pri.material_id IS NULL"
if project_id:
query += " AND f.project_id = :project_id"
params["project_id"] = project_id
if file_id:
query += " AND m.file_id = :file_id"
params["file_id"] = file_id
if job_no:
query += " AND f.job_no = :job_no"
params["job_no"] = job_no
if filename:
query += " AND f.original_filename = :filename"
params["filename"] = filename
if revision:
query += " AND f.revision = :revision"
params["revision"] = revision
if search:
query += " AND (m.original_description ILIKE :search OR m.material_grade ILIKE :search)"
params["search"] = f"%{search}%"
if item_type:
query += " AND m.classified_category = :item_type"
params["item_type"] = item_type
if material_grade:
query += " AND m.material_grade ILIKE :material_grade"
params["material_grade"] = f"%{material_grade}%"
if size_spec:
query += " AND m.size_spec ILIKE :size_spec"
params["size_spec"] = f"%{size_spec}%"
if file_filter:
query += " AND f.original_filename ILIKE :file_filter"
params["file_filter"] = f"%{file_filter}%"
# 정렬 처리
if sort_by:
if sort_by == "quantity_desc":
query += " ORDER BY m.quantity DESC"
elif sort_by == "quantity_asc":
query += " ORDER BY m.quantity ASC"
elif sort_by == "name_asc":
query += " ORDER BY m.original_description ASC"
elif sort_by == "name_desc":
query += " ORDER BY m.original_description DESC"
elif sort_by == "created_desc":
query += " ORDER BY m.created_at DESC"
elif sort_by == "created_asc":
query += " ORDER BY m.created_at ASC"
else:
query += " ORDER BY m.line_number ASC"
else:
query += " ORDER BY m.line_number ASC"
query += " LIMIT :limit OFFSET :skip"
params["limit"] = limit
params["skip"] = skip
result = db.execute(text(query), params)
materials = result.fetchall()
# 전체 개수 조회
count_query = """
SELECT COUNT(*) as total
FROM materials m
LEFT JOIN files f ON m.file_id = f.id
WHERE 1=1
"""
count_params = {}
if project_id:
count_query += " AND f.project_id = :project_id"
count_params["project_id"] = project_id
if file_id:
count_query += " AND m.file_id = :file_id"
count_params["file_id"] = file_id
if search:
count_query += " AND (m.original_description ILIKE :search OR m.material_grade ILIKE :search)"
count_params["search"] = f"%{search}%"
if item_type:
count_query += " AND m.classified_category = :item_type"
count_params["item_type"] = item_type
if material_grade:
count_query += " AND m.material_grade ILIKE :material_grade"
count_params["material_grade"] = f"%{material_grade}%"
if size_spec:
count_query += " AND m.size_spec ILIKE :size_spec"
count_params["size_spec"] = f"%{size_spec}%"
if file_filter:
count_query += " AND f.original_filename ILIKE :file_filter"
count_params["file_filter"] = f"%{file_filter}%"
count_result = db.execute(text(count_query), count_params)
total_count = count_result.fetchone()[0]
# 파이프 그룹핑을 위한 딕셔너리
pipe_groups = {}
# 니플 그룹핑을 위한 딕셔너리 (길이 기반)
nipple_groups = {}
# 일반 피팅 그룹핑을 위한 딕셔너리 (수량 기반)
fitting_groups = {}
# 플랜지 그룹핑을 위한 딕셔너리
flange_groups = {}
# 밸브 그룹핑을 위한 딕셔너리
valve_groups = {}
# 볼트 그룹핑을 위한 딕셔너리
bolt_groups = {}
# 가스켓 그룹핑을 위한 딕셔너리
gasket_groups = {}
# UNKNOWN 그룹핑을 위한 딕셔너리
unknown_groups = {}
# 각 자재의 상세 정보도 가져오기
material_list = []
valve_count = 0
for m in materials:
if m.classified_category == 'VALVE':
valve_count += 1
# 디버깅: 첫 번째 자재의 모든 속성 출력
if len(material_list) == 0:
# 로그 제거
pass
# 개선된 재질 정보 추출
final_category = m.final_classified_category or m.classified_category
enhanced_material_grade = extract_enhanced_material_grade(
m.original_description,
m.material_grade,
final_category
)
material_dict = {
"id": m.id,
"file_id": m.file_id,
"filename": m.original_filename,
"project_id": m.project_id,
"project_code": m.official_project_code,
"project_name": m.project_name,
"original_description": m.original_description,
"quantity": float(m.quantity) if m.quantity else 0,
"unit": m.unit,
"size_spec": m.size_spec,
"main_nom": m.main_nom,
"red_nom": m.red_nom,
"material_grade": m.full_material_grade or enhanced_material_grade,
"original_material_grade": m.material_grade,
"full_material_grade": m.full_material_grade,
"drawing_name": m.drawing_name,
"line_no": m.line_no,
"revision_status": m.revision_status or 'active',
"line_number": m.line_number,
"row_number": m.row_number,
# 구매수량 계산에서 분류된 정보를 우선 사용
"classified_category": final_category,
"classification_confidence": float(m.classification_confidence) if m.classification_confidence else 0.0,
"classification_details": m.classification_details,
"is_verified": m.final_is_verified if m.final_is_verified is not None else m.is_verified,
"verified_by": m.final_verified_by or m.verified_by,
"verified_at": m.verified_at,
"purchase_confirmed": bool(m.confirmed_quantity),
"confirmed_quantity": float(m.confirmed_quantity) if m.confirmed_quantity else None,
"purchase_status": m.purchase_status,
"purchase_confirmed_by": m.confirmed_by,
"purchase_confirmed_at": m.confirmed_at,
"created_at": m.created_at,
# 브랜드와 사용자 요구사항 필드 추가
"brand": m.brand,
"user_requirement": m.user_requirement
}
# 카테고리별 상세 정보 추가 (JOIN 결과 사용)
if m.classified_category == 'PIPE':
# JOIN된 결과에서 pipe_details 정보 가져오기
if hasattr(m, 'outer_diameter') and m.outer_diameter is not None:
pipe_details = {
"outer_diameter": m.outer_diameter,
"schedule": m.schedule,
"material_spec": m.material_spec,
"manufacturing_method": m.manufacturing_method,
"end_preparation": m.end_preparation,
"length_mm": float(m.length_mm) if m.length_mm else None
}
# 파이프 그룹핑 키 생성 (끝단 가공 정보 제외하고 그룹핑)
# pep 테이블에서 clean_description을 가져오거나, 없으면 직접 계산
if hasattr(m, 'pipe_clean_description') and m.pipe_clean_description:
clean_description = m.pipe_clean_description
else:
from ..services.pipe_classifier import get_purchase_pipe_description
clean_description = get_purchase_pipe_description(m.original_description)
pipe_key = f"{clean_description}|{m.size_spec}|{m.material_grade}"
# 로그 제거 - 과도한 출력 방지
if pipe_key not in pipe_groups:
pipe_groups[pipe_key] = {
"total_length_mm": 0,
"total_quantity": 0,
"materials": []
}
# 개별 파이프 길이 합산 (DB에 저장된 실제 길이 사용)
if pipe_details["length_mm"]:
# ✅ DB에서 가져온 length_mm는 이미 개별 파이프의 실제 길이이므로 수량을 곱하지 않음
individual_length = float(pipe_details["length_mm"])
pipe_groups[pipe_key]["total_length_mm"] += individual_length
pipe_groups[pipe_key]["total_quantity"] += 1 # 파이프 개수는 1개씩 증가
pipe_groups[pipe_key]["materials"].append(material_dict)
# 개별 길이 정보를 pipe_details에 추가
pipe_details["individual_total_length"] = individual_length
# 구매용 깨끗한 설명도 추가
material_dict['clean_description'] = clean_description
material_dict['pipe_details'] = pipe_details
elif m.classified_category == 'FITTING':
# CAP과 PLUG 먼저 처리 (fitting_type이 없을 수 있음)
if 'CAP' in m.original_description.upper() or 'PLUG' in m.original_description.upper():
# CAP과 PLUG 그룹핑
from ..services.pipe_classifier import get_purchase_pipe_description
clean_description = get_purchase_pipe_description(m.original_description)
fitting_key = f"{clean_description}|{m.size_spec}|{m.material_grade}"
if fitting_key not in fitting_groups:
fitting_groups[fitting_key] = {
"total_quantity": 0,
"materials": []
}
fitting_groups[fitting_key]["total_quantity"] += material_dict["quantity"]
fitting_groups[fitting_key]["materials"].append(material_dict)
material_dict['clean_description'] = clean_description
# JOIN된 fitting_details 데이터 직접 사용
elif hasattr(m, 'fitting_type') and m.fitting_type is not None:
# 로그 제거 - 과도한 출력 방지
fitting_details = {
"fitting_type": m.fitting_type,
"fitting_subtype": m.fitting_subtype,
"connection_method": m.connection_method,
"pressure_rating": m.pressure_rating,
"material_standard": m.material_standard,
"material_grade": m.fitting_material_grade,
"main_size": m.main_size,
"reduced_size": m.reduced_size,
"length_mm": float(m.fitting_length_mm) if m.fitting_length_mm else None,
"schedule": m.fitting_schedule
}
material_dict['fitting_details'] = fitting_details
# 니플인 경우 길이 기반 그룹핑
if 'NIPPLE' in m.original_description.upper() and m.fitting_length_mm:
# 끝단 가공 정보 제거
from ..services.pipe_classifier import get_purchase_pipe_description
clean_description = get_purchase_pipe_description(m.original_description)
nipple_key = f"{clean_description}|{m.size_spec}|{m.material_grade}|{m.fitting_length_mm}mm"
# 로그 제거 - 과도한 출력 방지
if nipple_key not in nipple_groups:
nipple_groups[nipple_key] = {
"total_length_mm": 0,
"total_quantity": 0,
"materials": []
}
# 개별 니플 길이 합산 (수량 × 단위길이) - 타입 변환
individual_total_length = float(material_dict["quantity"]) * float(m.fitting_length_mm)
nipple_groups[nipple_key]["total_length_mm"] += individual_total_length
nipple_groups[nipple_key]["total_quantity"] += material_dict["quantity"]
nipple_groups[nipple_key]["materials"].append(material_dict)
# 총길이 정보를 fitting_details에 추가
fitting_details["individual_total_length"] = individual_total_length
fitting_details["is_nipple"] = True
# 구매용 깨끗한 설명도 추가
material_dict['clean_description'] = clean_description
else:
# 일반 피팅 (니플이 아닌 경우) - 수량 기반 그룹핑
from ..services.pipe_classifier import get_purchase_pipe_description
clean_description = get_purchase_pipe_description(m.original_description)
fitting_key = f"{clean_description}|{m.size_spec}|{m.material_grade}"
if fitting_key not in fitting_groups:
fitting_groups[fitting_key] = {
"total_quantity": 0,
"materials": []
}
fitting_groups[fitting_key]["total_quantity"] += material_dict["quantity"]
fitting_groups[fitting_key]["materials"].append(material_dict)
# 구매용 깨끗한 설명도 추가
material_dict['clean_description'] = clean_description
elif m.classified_category == 'FLANGE':
flange_query = text("SELECT * FROM flange_details WHERE material_id = :material_id")
flange_result = db.execute(flange_query, {"material_id": m.id})
flange_detail = flange_result.fetchone()
if flange_detail:
# 개선된 플랜지 타입 (PIPE측 연결면 포함)
enhanced_flange_type = extract_enhanced_flange_type(
m.original_description,
flange_detail.flange_type
)
material_dict['flange_details'] = {
"flange_type": enhanced_flange_type, # 개선된 타입 사용
"original_flange_type": flange_detail.flange_type, # 원본 타입 보존
"facing_type": flange_detail.facing_type,
"pressure_rating": flange_detail.pressure_rating,
"material_standard": flange_detail.material_standard,
"material_grade": flange_detail.material_grade,
"size_inches": flange_detail.size_inches
}
# 플랜지 그룹핑 추가
from ..services.pipe_classifier import get_purchase_pipe_description
clean_description = get_purchase_pipe_description(m.original_description)
flange_key = f"{m.size_spec}|{m.material_grade}|{flange_detail.pressure_rating if flange_detail else ''}|{flange_detail.flange_type if flange_detail else ''}"
if flange_key not in flange_groups:
flange_groups[flange_key] = {
"total_quantity": 0,
"materials": []
}
flange_groups[flange_key]["total_quantity"] += material_dict["quantity"]
flange_groups[flange_key]["materials"].append(material_dict)
material_dict['clean_description'] = clean_description
elif m.classified_category == 'GASKET':
# 이미 JOIN된 gasket_details 데이터 사용
if m.gasket_type: # gasket_details가 있는 경우
material_dict['gasket_details'] = {
"gasket_type": m.gasket_type,
"gasket_subtype": m.gasket_subtype,
"material_type": m.gasket_material_type,
"filler_material": m.filler_material,
"pressure_rating": m.gasket_pressure_rating,
"size_inches": m.gasket_size_inches,
"thickness": m.gasket_thickness,
"temperature_range": m.gasket_temperature_range,
"fire_safe": m.fire_safe
}
# 가스켓 그룹핑 - 크기, 압력, 재질로 그룹핑
# original_description에서 주요 정보 추출
description = m.original_description or ''
gasket_key = f"{m.size_spec}|{description}"
if gasket_key not in gasket_groups:
gasket_groups[gasket_key] = {
"total_quantity": 0,
"materials": []
}
gasket_groups[gasket_key]["total_quantity"] += material_dict["quantity"]
gasket_groups[gasket_key]["materials"].append(material_dict)
elif m.classified_category == 'VALVE':
valve_query = text("SELECT * FROM valve_details WHERE material_id = :material_id")
valve_result = db.execute(valve_query, {"material_id": m.id})
valve_detail = valve_result.fetchone()
if valve_detail:
material_dict['valve_details'] = {
"valve_type": valve_detail.valve_type,
"valve_subtype": valve_detail.valve_subtype,
"actuator_type": valve_detail.actuator_type,
"connection_method": valve_detail.connection_method,
"pressure_rating": valve_detail.pressure_rating,
"body_material": valve_detail.body_material,
"size_inches": valve_detail.size_inches
}
# 밸브 그룹핑 추가
from ..services.pipe_classifier import get_purchase_pipe_description
clean_description = get_purchase_pipe_description(m.original_description)
valve_key = f"{clean_description}|{m.size_spec}|{m.material_grade}"
if valve_key not in valve_groups:
valve_groups[valve_key] = {
"total_quantity": 0,
"materials": []
}
valve_groups[valve_key]["total_quantity"] += material_dict["quantity"]
valve_groups[valve_key]["materials"].append(material_dict)
material_dict['clean_description'] = clean_description
elif m.classified_category == 'BOLT':
bolt_query = text("SELECT * FROM bolt_details WHERE material_id = :material_id")
bolt_result = db.execute(bolt_query, {"material_id": m.id})
bolt_detail = bolt_result.fetchone()
if bolt_detail:
material_dict['bolt_details'] = {
"bolt_type": bolt_detail.bolt_type,
"thread_type": bolt_detail.thread_type,
"diameter": bolt_detail.diameter,
"length": bolt_detail.length,
"material_standard": bolt_detail.material_standard,
"material_grade": bolt_detail.material_grade,
"coating_type": bolt_detail.coating_type,
"pressure_rating": bolt_detail.pressure_rating
}
# 볼트 그룹핑 추가 - 크기, 재질, 길이로 그룹핑
# 원본 설명에서 길이 추출
import re
length_match = re.search(r'(\d+(?:\.\d+)?)\s*(?:LG|MM)', m.original_description.upper())
bolt_length = length_match.group(1) if length_match else 'UNKNOWN'
bolt_key = f"{m.size_spec}|{m.material_grade}|{bolt_length}"
if bolt_key not in bolt_groups:
bolt_groups[bolt_key] = {
"total_quantity": 0,
"materials": []
}
bolt_groups[bolt_key]["total_quantity"] += material_dict["quantity"]
bolt_groups[bolt_key]["materials"].append(material_dict)
# 파이프, 니플, 일반 피팅, 플랜지가 아닌 경우만 바로 추가 (이들은 그룹핑 후 추가)
is_nipple = (m.classified_category == 'FITTING' and
('NIPPLE' in m.original_description.upper() or
(hasattr(m, 'fitting_type') and m.fitting_type == 'NIPPLE')))
# CAP과 PLUG도 일반 피팅으로 처리
is_cap_or_plug = (m.classified_category == 'FITTING' and
('CAP' in m.original_description.upper() or 'PLUG' in m.original_description.upper()))
is_general_fitting = (m.classified_category == 'FITTING' and not is_nipple and
((hasattr(m, 'fitting_type') and m.fitting_type is not None) or is_cap_or_plug))
is_flange = (m.classified_category == 'FLANGE')
is_valve = (m.classified_category == 'VALVE')
is_bolt = (m.classified_category == 'BOLT')
is_gasket = (m.classified_category == 'GASKET')
# UNKNOWN 카테고리 그룹핑 처리
if m.classified_category == 'UNKNOWN':
unknown_key = m.original_description or 'UNKNOWN'
if unknown_key not in unknown_groups:
unknown_groups[unknown_key] = {
"total_quantity": 0,
"materials": []
}
unknown_groups[unknown_key]["total_quantity"] += material_dict["quantity"]
unknown_groups[unknown_key]["materials"].append(material_dict)
elif m.classified_category != 'PIPE' and not is_nipple and not is_general_fitting and not is_flange and not is_valve and not is_bolt and not is_gasket:
material_list.append(material_dict)
# 파이프 그룹별로 대표 파이프 하나만 추가 (그룹핑된 정보로)
for pipe_key, group_info in pipe_groups.items():
if group_info["materials"]:
# 그룹의 첫 번째 파이프를 대표로 사용
representative_pipe = group_info["materials"][0].copy()
# 그룹핑된 정보로 업데이트
representative_pipe['quantity'] = group_info["total_quantity"]
representative_pipe['original_description'] = representative_pipe['clean_description'] # 깨끗한 설명 사용
if 'pipe_details' in representative_pipe:
representative_pipe['pipe_details']['total_length_mm'] = group_info["total_length_mm"]
representative_pipe['pipe_details']['pipe_count'] = group_info["total_quantity"] # ✅ pipe_count 추가
representative_pipe['pipe_details']['group_total_quantity'] = group_info["total_quantity"]
# 평균 단위 길이 계산
if group_info["total_quantity"] > 0:
representative_pipe['pipe_details']['avg_length_mm'] = group_info["total_length_mm"] / group_info["total_quantity"]
# 개별 파이프 길이 정보 수집
individual_pipes = []
for mat in group_info["materials"]:
if 'pipe_details' in mat and mat['pipe_details'].get('length_mm'):
individual_pipes.append({
'length': mat['pipe_details']['length_mm'],
'quantity': 1,
'id': mat['id']
})
representative_pipe['pipe_details']['individual_pipes'] = individual_pipes
# 그룹화된 모든 자재 ID 저장
representative_pipe['grouped_ids'] = [mat['id'] for mat in group_info["materials"]]
material_list.append(representative_pipe)
# 니플 그룹별로 대표 니플 하나만 추가 (그룹핑된 정보로)
try:
for nipple_key, group_info in nipple_groups.items():
if group_info["materials"]:
# 그룹의 첫 번째 니플을 대표로 사용
representative_nipple = group_info["materials"][0].copy()
# 그룹핑된 정보로 업데이트
representative_nipple['quantity'] = group_info["total_quantity"]
representative_nipple['original_description'] = representative_nipple.get('clean_description', representative_nipple['original_description']) # 깨끗한 설명 사용
if 'fitting_details' in representative_nipple:
representative_nipple['fitting_details']['total_length_mm'] = group_info["total_length_mm"]
representative_nipple['fitting_details']['group_total_quantity'] = group_info["total_quantity"]
# 평균 단위 길이 계산
if group_info["total_quantity"] > 0:
representative_nipple['fitting_details']['avg_length_mm'] = group_info["total_length_mm"] / group_info["total_quantity"]
material_list.append(representative_nipple)
except Exception as nipple_error:
# 로그 제거
# 니플 그룹핑 실패시에도 계속 진행
pass
# 일반 피팅 그룹별로 대표 피팅 하나만 추가 (그룹핑된 정보로)
try:
for fitting_key, group_info in fitting_groups.items():
if group_info["materials"]:
representative_fitting = group_info["materials"][0].copy()
representative_fitting['quantity'] = group_info["total_quantity"]
representative_fitting['original_description'] = representative_fitting.get('clean_description', representative_fitting['original_description'])
if 'fitting_details' in representative_fitting:
representative_fitting['fitting_details']['group_total_quantity'] = group_info["total_quantity"]
material_list.append(representative_fitting)
except Exception as fitting_error:
# 로그 제거
# 피팅 그룹핑 실패시에도 계속 진행
pass
# 플랜지 그룹별로 대표 플랜지 하나만 추가 (그룹핑된 정보로)
try:
for flange_key, group_info in flange_groups.items():
if group_info["materials"]:
representative_flange = group_info["materials"][0].copy()
representative_flange['quantity'] = group_info["total_quantity"]
# original_description은 그대로 유지 (SCH 정보 보존)
# representative_flange['original_description'] = representative_flange.get('clean_description', representative_flange['original_description'])
if 'flange_details' in representative_flange:
representative_flange['flange_details']['group_total_quantity'] = group_info["total_quantity"]
material_list.append(representative_flange)
except Exception as flange_error:
# 플랜지 그룹핑 실패시에도 계속 진행
pass
# 밸브 그룹별로 대표 밸브 하나만 추가 (그룹핑된 정보로)
logger.info(f"전체 밸브 수: {valve_count}, valve_groups 수: {len(valve_groups)}")
try:
for valve_key, group_info in valve_groups.items():
if group_info["materials"]:
representative_valve = group_info["materials"][0].copy()
representative_valve['quantity'] = group_info["total_quantity"]
if 'valve_details' in representative_valve:
representative_valve['valve_details']['group_total_quantity'] = group_info["total_quantity"]
material_list.append(representative_valve)
logger.info(f"밸브 추가됨 - {valve_key}, 수량: {group_info['total_quantity']}")
except Exception as valve_error:
logger.error(f"밸브 그룹핑 실패 - {valve_error}")
# 밸브 그룹핑 실패시에도 계속 진행
pass
# 볼트 그룹별로 대표 볼트 하나만 추가 (그룹핑된 정보로)
logger.info(f"bolt_groups 수: {len(bolt_groups)}")
try:
for bolt_key, group_info in bolt_groups.items():
if group_info["materials"]:
representative_bolt = group_info["materials"][0].copy()
representative_bolt['quantity'] = group_info["total_quantity"]
if 'bolt_details' in representative_bolt:
representative_bolt['bolt_details']['group_total_quantity'] = group_info["total_quantity"]
material_list.append(representative_bolt)
logger.info(f"볼트 추가됨 - {bolt_key}, 수량: {group_info['total_quantity']}")
except Exception as bolt_error:
logger.error(f"볼트 그룹핑 실패 - {bolt_error}")
# 볼트 그룹핑 실패시에도 계속 진행
pass
# 가스켓 그룹별로 대표 가스켓 하나만 추가 (그룹핑된 정보로)
logger.info(f"gasket_groups 수: {len(gasket_groups)}")
try:
for gasket_key, group_info in gasket_groups.items():
if group_info["materials"]:
representative_gasket = group_info["materials"][0].copy()
representative_gasket['quantity'] = group_info["total_quantity"]
if 'gasket_details' in representative_gasket:
representative_gasket['gasket_details']['group_total_quantity'] = group_info["total_quantity"]
material_list.append(representative_gasket)
logger.info(f"가스켓 추가됨 - {gasket_key}, 수량: {group_info['total_quantity']}")
except Exception as gasket_error:
logger.error(f"가스켓 그룹핑 실패 - {gasket_error}")
# 가스켓 그룹핑 실패시에도 계속 진행
pass
# UNKNOWN 그룹별로 대표 항목 하나만 추가 (그룹핑된 정보로)
logger.info(f"unknown_groups 수: {len(unknown_groups)}")
try:
for unknown_key, group_info in unknown_groups.items():
if group_info["materials"]:
representative_unknown = group_info["materials"][0].copy()
representative_unknown['quantity'] = group_info["total_quantity"]
material_list.append(representative_unknown)
logger.info(f"UNKNOWN 추가됨 - {unknown_key[:50]}, 수량: {group_info['total_quantity']}")
except Exception as unknown_error:
logger.error(f"UNKNOWN 그룹핑 실패 - {unknown_error}")
# UNKNOWN 그룹핑 실패시에도 계속 진행
pass
return {
"success": True,
"total_count": total_count,
"returned_count": len(materials),
"skip": skip,
"limit": limit,
"materials": material_list
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"자재 조회 실패: {str(e)}")
@router.get("/materials/summary")
async def get_materials_summary(
project_id: Optional[int] = None,
file_id: Optional[int] = None,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""자재 요약 통계"""
try:
query = """
SELECT
COUNT(*) as total_items,
COUNT(DISTINCT m.original_description) as unique_descriptions,
COUNT(DISTINCT m.size_spec) as unique_sizes,
COUNT(DISTINCT m.material_grade) as unique_materials,
SUM(m.quantity) as total_quantity,
AVG(m.quantity) as avg_quantity,
MIN(m.created_at) as earliest_upload,
MAX(m.created_at) as latest_upload
FROM materials m
LEFT JOIN files f ON m.file_id = f.id
WHERE 1=1
"""
params = {}
if project_id:
query += " AND f.project_id = :project_id"
params["project_id"] = project_id
if file_id:
query += " AND m.file_id = :file_id"
params["file_id"] = file_id
result = db.execute(text(query), params)
summary = result.fetchone()
return {
"success": True,
"summary": {
"total_items": summary.total_items,
"unique_descriptions": summary.unique_descriptions,
"unique_sizes": summary.unique_sizes,
"unique_materials": summary.unique_materials,
"total_quantity": float(summary.total_quantity) if summary.total_quantity else 0,
"avg_quantity": round(float(summary.avg_quantity), 2) if summary.avg_quantity else 0,
"earliest_upload": summary.earliest_upload,
"latest_upload": summary.latest_upload
}
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"요약 조회 실패: {str(e)}")
@router.get("/materials/compare-revisions")
async def compare_revisions(
job_no: str,
filename: str,
old_revision: str,
new_revision: str,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
리비전 간 자재 비교
"""
try:
# 기존 리비전 자재 조회
old_materials_query = text("""
SELECT m.original_description, m.quantity, m.unit, m.size_spec,
m.material_grade, m.classified_category, m.classification_confidence,
m.main_nom, m.red_nom, m.drawing_name, m.line_no
FROM materials m
JOIN files f ON m.file_id = f.id
WHERE f.job_no = :job_no
AND f.original_filename = :filename
AND f.revision = :old_revision
""")
old_result = db.execute(old_materials_query, {
"job_no": job_no,
"filename": filename,
"old_revision": old_revision
})
old_materials = old_result.fetchall()
# 새 리비전 자재 조회
new_materials_query = text("""
SELECT m.original_description, m.quantity, m.unit, m.size_spec,
m.material_grade, m.classified_category, m.classification_confidence,
m.main_nom, m.red_nom, m.drawing_name, m.line_no
FROM materials m
JOIN files f ON m.file_id = f.id
WHERE f.job_no = :job_no
AND f.original_filename = :filename
AND f.revision = :new_revision
""")
new_result = db.execute(new_materials_query, {
"job_no": job_no,
"filename": filename,
"new_revision": new_revision
})
new_materials = new_result.fetchall()
# 자재 키 생성 함수 (도면번호 + 자재 정보)
def create_material_key(material):
# 도면번호가 있으면 도면번호 + 자재 설명으로 고유 키 생성
# (같은 도면에 여러 자재가 있을 수 있으므로)
if hasattr(material, 'drawing_name') and material.drawing_name:
return f"{material.drawing_name}|{material.original_description}|{material.size_spec or ''}|{material.material_grade or ''}"
elif hasattr(material, 'line_no') and material.line_no:
return f"{material.line_no}|{material.original_description}|{material.size_spec or ''}|{material.material_grade or ''}"
else:
# 도면번호 없으면 기존 방식 (설명 + 크기 + 재질)
return f"{material.original_description}|{material.size_spec or ''}|{material.material_grade or ''}"
# 기존 자재를 딕셔너리로 변환 (수량 합산)
old_materials_dict = {}
for material in old_materials:
key = create_material_key(material)
if key in old_materials_dict:
# 동일한 자재가 있으면 수량 합산
old_materials_dict[key]["quantity"] += float(material.quantity) if material.quantity else 0
else:
old_materials_dict[key] = {
"original_description": material.original_description,
"quantity": float(material.quantity) if material.quantity else 0,
"unit": material.unit,
"size_spec": material.size_spec,
"material_grade": material.material_grade,
"classified_category": material.classified_category,
"classification_confidence": material.classification_confidence,
"main_nom": material.main_nom,
"red_nom": material.red_nom,
"drawing_name": material.drawing_name,
"line_no": material.line_no
}
# 새 자재를 딕셔너리로 변환 (수량 합산)
new_materials_dict = {}
for material in new_materials:
key = create_material_key(material)
if key in new_materials_dict:
# 동일한 자재가 있으면 수량 합산
new_materials_dict[key]["quantity"] += float(material.quantity) if material.quantity else 0
else:
new_materials_dict[key] = {
"original_description": material.original_description,
"quantity": float(material.quantity) if material.quantity else 0,
"unit": material.unit,
"size_spec": material.size_spec,
"material_grade": material.material_grade,
"classified_category": material.classified_category,
"classification_confidence": material.classification_confidence,
"main_nom": material.main_nom,
"red_nom": material.red_nom,
"drawing_name": material.drawing_name,
"line_no": material.line_no
}
# 변경 사항 분석
all_keys = set(old_materials_dict.keys()) | set(new_materials_dict.keys())
added_items = []
removed_items = []
changed_items = []
for key in all_keys:
old_item = old_materials_dict.get(key)
new_item = new_materials_dict.get(key)
if old_item and not new_item:
# 삭제된 항목
removed_items.append({
"key": key,
"item": old_item,
"change_type": "removed"
})
elif not old_item and new_item:
# 추가된 항목
added_items.append({
"key": key,
"item": new_item,
"change_type": "added"
})
elif old_item and new_item:
# 변경 사항 감지: 수량, 재질, 크기, 카테고리 등
changes_detected = []
old_qty = old_item["quantity"]
new_qty = new_item["quantity"]
qty_diff = new_qty - old_qty
# 1. 수량 변경 확인
if abs(qty_diff) > 0.001:
changes_detected.append({
"type": "quantity",
"old_value": old_qty,
"new_value": new_qty,
"diff": qty_diff
})
# 2. 재질 변경 확인
if old_item.get("material_grade") != new_item.get("material_grade"):
changes_detected.append({
"type": "material",
"old_value": old_item.get("material_grade", "-"),
"new_value": new_item.get("material_grade", "-")
})
# 3. 크기 변경 확인
if old_item.get("main_nom") != new_item.get("main_nom") or old_item.get("size_spec") != new_item.get("size_spec"):
changes_detected.append({
"type": "size",
"old_value": old_item.get("main_nom") or old_item.get("size_spec", "-"),
"new_value": new_item.get("main_nom") or new_item.get("size_spec", "-")
})
# 4. 카테고리 변경 확인 (자재 종류가 바뀜)
if old_item.get("classified_category") != new_item.get("classified_category"):
changes_detected.append({
"type": "category",
"old_value": old_item.get("classified_category", "-"),
"new_value": new_item.get("classified_category", "-")
})
# 변경사항이 있으면 changed_items에 추가
if changes_detected:
# 변경 유형 결정
has_qty_change = any(c["type"] == "quantity" for c in changes_detected)
has_spec_change = any(c["type"] in ["material", "size", "category"] for c in changes_detected)
if has_spec_change:
change_type = "specification_changed" # 자재 사양 변경
elif has_qty_change:
change_type = "quantity_changed" # 수량만 변경
else:
change_type = "modified"
changed_items.append({
"key": key,
"old_item": old_item,
"new_item": new_item,
"changes": changes_detected,
"change_type": change_type,
"quantity_change": qty_diff if has_qty_change else 0,
"drawing_name": new_item.get("drawing_name") or old_item.get("drawing_name"),
"line_no": new_item.get("line_no") or old_item.get("line_no")
})
# 분류별 통계
def calculate_category_stats(items):
stats = {}
for item in items:
category = item.get("item", {}).get("classified_category", "OTHER")
if category not in stats:
stats[category] = {"count": 0, "total_quantity": 0}
stats[category]["count"] += 1
stats[category]["total_quantity"] += item.get("item", {}).get("quantity", 0)
return stats
added_stats = calculate_category_stats(added_items)
removed_stats = calculate_category_stats(removed_items)
changed_stats = calculate_category_stats(changed_items)
# 누락된 도면 감지
old_drawings = set()
new_drawings = set()
for material in old_materials:
if hasattr(material, 'drawing_name') and material.drawing_name:
old_drawings.add(material.drawing_name)
elif hasattr(material, 'line_no') and material.line_no:
old_drawings.add(material.line_no)
for material in new_materials:
if hasattr(material, 'drawing_name') and material.drawing_name:
new_drawings.add(material.drawing_name)
elif hasattr(material, 'line_no') and material.line_no:
new_drawings.add(material.line_no)
missing_drawings = old_drawings - new_drawings # 이전에는 있었는데 새 파일에 없는 도면
new_only_drawings = new_drawings - old_drawings # 새로 추가된 도면
# 누락된 도면의 자재 목록
missing_drawing_materials = []
if missing_drawings:
for material in old_materials:
drawing = getattr(material, 'drawing_name', None) or getattr(material, 'line_no', None)
if drawing in missing_drawings:
missing_drawing_materials.append({
"drawing_name": drawing,
"description": material.original_description,
"quantity": float(material.quantity) if material.quantity else 0,
"category": material.classified_category,
"size": material.main_nom or material.size_spec,
"material_grade": material.material_grade
})
return {
"success": True,
"comparison": {
"old_revision": old_revision,
"new_revision": new_revision,
"filename": filename,
"job_no": job_no,
"summary": {
"added_count": len(added_items),
"removed_count": len(removed_items),
"changed_count": len(changed_items),
"total_changes": len(added_items) + len(removed_items) + len(changed_items),
"missing_drawings_count": len(missing_drawings),
"new_drawings_count": len(new_only_drawings)
},
"missing_drawings": {
"drawings": list(missing_drawings),
"materials": missing_drawing_materials,
"count": len(missing_drawings),
"requires_confirmation": len(missing_drawings) > 0
},
"changes": {
"added": added_items,
"removed": removed_items,
"changed": changed_items
},
"category_stats": {
"added": added_stats,
"removed": removed_stats,
"changed": changed_stats
}
}
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"리비전 비교 실패: {str(e)}")
@router.get("/pipe-details")
async def get_pipe_details(
file_id: Optional[int] = None,
job_no: Optional[str] = None,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
PIPE 상세 정보 조회
"""
try:
query = """
SELECT pd.*, f.original_filename, f.job_no, f.revision,
m.original_description, m.quantity, m.unit
FROM pipe_details pd
LEFT JOIN files f ON pd.file_id = f.id
LEFT JOIN materials m ON pd.file_id = m.file_id
AND m.classified_category = 'PIPE'
WHERE 1=1
"""
params = {}
if file_id:
query += " AND pd.file_id = :file_id"
params["file_id"] = file_id
if job_no:
query += " AND f.job_no = :job_no"
params["job_no"] = job_no
query += " ORDER BY pd.created_at DESC"
result = db.execute(text(query), params)
pipe_details = result.fetchall()
return [
{
"id": pd.id,
"file_id": pd.file_id,
"original_filename": pd.original_filename,
"job_no": pd.job_no,
"revision": pd.revision,
"original_description": pd.original_description,
"quantity": pd.quantity,
"unit": pd.unit,
"material_spec": pd.material_spec,
"manufacturing_method": pd.manufacturing_method,
"end_preparation": pd.end_preparation,
"schedule": pd.schedule,
"outer_diameter": pd.outer_diameter,
"length_mm": pd.length_mm,
"created_at": pd.created_at,
"updated_at": pd.updated_at
}
for pd in pipe_details
]
except Exception as e:
raise HTTPException(status_code=500, detail=f"PIPE 상세 정보 조회 실패: {str(e)}")
@router.get("/fitting-details")
async def get_fitting_details(
file_id: Optional[int] = None,
job_no: Optional[str] = None,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
FITTING 상세 정보 조회
"""
try:
query = """
SELECT fd.*, f.original_filename, f.job_no, f.revision,
m.original_description, m.quantity, m.unit
FROM fitting_details fd
LEFT JOIN files f ON fd.file_id = f.id
LEFT JOIN materials m ON fd.material_id = m.id
WHERE 1=1
"""
params = {}
if file_id:
query += " AND fd.file_id = :file_id"
params["file_id"] = file_id
if job_no:
query += " AND f.job_no = :job_no"
params["job_no"] = job_no
query += " ORDER BY fd.created_at DESC"
result = db.execute(text(query), params)
fitting_details = result.fetchall()
return [
{
"id": fd.id,
"file_id": fd.file_id,
"fitting_type": fd.fitting_type,
"fitting_subtype": fd.fitting_subtype,
"connection_method": fd.connection_method,
"pressure_rating": fd.pressure_rating,
"material_standard": fd.material_standard,
"material_grade": fd.material_grade,
"main_size": fd.main_size,
"reduced_size": fd.reduced_size,
"classification_confidence": fd.classification_confidence,
"original_description": fd.original_description,
"quantity": fd.quantity
}
for fd in fitting_details
]
except Exception as e:
raise HTTPException(status_code=500, detail=f"FITTING 상세 정보 조회 실패: {str(e)}")
@router.get("/valve-details")
async def get_valve_details(
file_id: Optional[int] = None,
job_no: Optional[str] = None,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
VALVE 상세 정보 조회
"""
try:
query = """
SELECT vd.*, f.original_filename, f.job_no, f.revision,
m.original_description, m.quantity, m.unit
FROM valve_details vd
LEFT JOIN files f ON vd.file_id = f.id
LEFT JOIN materials m ON vd.material_id = m.id
WHERE 1=1
"""
params = {}
if file_id:
query += " AND vd.file_id = :file_id"
params["file_id"] = file_id
if job_no:
query += " AND f.job_no = :job_no"
params["job_no"] = job_no
query += " ORDER BY vd.created_at DESC"
result = db.execute(text(query), params)
valve_details = result.fetchall()
return [
{
"id": vd.id,
"file_id": vd.file_id,
"valve_type": vd.valve_type,
"valve_subtype": vd.valve_subtype,
"actuator_type": vd.actuator_type,
"connection_method": vd.connection_method,
"pressure_rating": vd.pressure_rating,
"body_material": vd.body_material,
"size_inches": vd.size_inches,
"fire_safe": vd.fire_safe,
"classification_confidence": vd.classification_confidence,
"original_description": vd.original_description,
"quantity": vd.quantity
}
for vd in valve_details
]
except Exception as e:
raise HTTPException(status_code=500, detail=f"VALVE 상세 정보 조회 실패: {str(e)}")
@router.get("/{file_id}/user-requirements")
async def get_user_requirements(
file_id: int,
job_no: Optional[str] = None,
status: Optional[str] = None,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
사용자 요구사항 조회
"""
try:
query = """
SELECT ur.*, f.original_filename, f.job_no, f.revision,
rt.type_name, rt.category
FROM user_requirements ur
LEFT JOIN files f ON ur.file_id = f.id
LEFT JOIN requirement_types rt ON ur.requirement_type = rt.type_code
WHERE 1=1
"""
params = {}
if file_id:
query += " AND ur.file_id = :file_id"
params["file_id"] = file_id
if job_no:
query += " AND f.job_no = :job_no"
params["job_no"] = job_no
if status:
query += " AND ur.status = :status"
params["status"] = status
query += " ORDER BY ur.created_at DESC"
result = db.execute(text(query), params)
requirements = result.fetchall()
return [
{
"id": req.id,
"file_id": req.file_id,
"material_id": req.material_id,
"original_filename": req.original_filename,
"job_no": req.job_no,
"revision": req.revision,
"requirement_type": req.requirement_type,
"type_name": req.type_name,
"category": req.category,
"requirement_title": req.requirement_title,
"requirement_description": req.requirement_description,
"requirement_spec": req.requirement_spec,
"status": req.status,
"priority": req.priority,
"assigned_to": req.assigned_to,
"due_date": req.due_date,
"created_at": req.created_at,
"updated_at": req.updated_at
}
for req in requirements
]
except Exception as e:
raise HTTPException(status_code=500, detail=f"사용자 요구사항 조회 실패: {str(e)}")
class UserRequirementCreate(BaseModel):
file_id: int
material_id: Optional[int] = None
requirement_type: str
requirement_title: str
requirement_description: Optional[str] = None
requirement_spec: Optional[str] = None
priority: str = "NORMAL"
assigned_to: Optional[str] = None
due_date: Optional[str] = None
@router.post("/user-requirements")
async def create_user_requirement(
requirement: UserRequirementCreate,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
사용자 요구사항 생성
"""
try:
insert_query = text("""
INSERT INTO user_requirements (
file_id, material_id, requirement_type, requirement_title, requirement_description,
requirement_spec, priority, assigned_to, due_date
)
VALUES (
:file_id, :material_id, :requirement_type, :requirement_title, :requirement_description,
:requirement_spec, :priority, :assigned_to, :due_date
)
RETURNING id
""")
result = db.execute(insert_query, {
"file_id": requirement.file_id,
"material_id": requirement.material_id,
"requirement_type": requirement.requirement_type,
"requirement_title": requirement.requirement_title,
"requirement_description": requirement.requirement_description,
"requirement_spec": requirement.requirement_spec,
"priority": requirement.priority,
"assigned_to": requirement.assigned_to,
"due_date": requirement.due_date
})
requirement_id = result.fetchone()[0]
db.commit()
return {
"success": True,
"message": "요구사항이 생성되었습니다",
"requirement_id": requirement_id
}
except Exception as e:
db.rollback()
raise HTTPException(status_code=500, detail=f"요구사항 생성 실패: {str(e)}")
@router.delete("/user-requirements")
async def delete_user_requirements(
file_id: Optional[int] = None,
material_id: Optional[int] = None,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
사용자 요구사항 삭제 (파일별 또는 자재별)
"""
try:
if file_id:
# 파일별 삭제
delete_query = text("DELETE FROM user_requirements WHERE file_id = :file_id")
result = db.execute(delete_query, {"file_id": file_id})
deleted_count = result.rowcount
elif material_id:
# 자재별 삭제
delete_query = text("DELETE FROM user_requirements WHERE material_id = :material_id")
result = db.execute(delete_query, {"material_id": material_id})
deleted_count = result.rowcount
else:
raise HTTPException(status_code=400, detail="file_id 또는 material_id가 필요합니다")
db.commit()
return {
"success": True,
"message": f"{deleted_count}개의 요구사항이 삭제되었습니다",
"deleted_count": deleted_count
}
except Exception as e:
db.rollback()
raise HTTPException(status_code=500, detail=f"요구사항 삭제 실패: {str(e)}")
@router.post("/materials/{material_id}/verify")
async def verify_material_classification(
material_id: int,
request: Request,
verified_category: Optional[str] = None,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
자재 분류 결과 검증
"""
try:
username = current_user.get('username', 'unknown')
# 자재 존재 확인
material_query = text("SELECT * FROM materials WHERE id = :material_id")
material_result = db.execute(material_query, {"material_id": material_id})
material = material_result.fetchone()
if not material:
raise HTTPException(status_code=404, detail="자재를 찾을 수 없습니다")
# 검증 정보 업데이트
update_query = text("""
UPDATE materials
SET is_verified = TRUE,
verified_by = :username,
verified_at = CURRENT_TIMESTAMP,
classified_category = COALESCE(:verified_category, classified_category)
WHERE id = :material_id
""")
db.execute(update_query, {
"material_id": material_id,
"username": username,
"verified_category": verified_category
})
# 활동 로그 기록
try:
from ..services.activity_logger import log_activity_from_request
log_activity_from_request(
db, request, username,
"MATERIAL_VERIFY",
f"자재 분류 검증: {material.original_description}"
)
except Exception as e:
logger.error(f"활동 로그 기록 실패: {str(e)}")
db.commit()
return {
"success": True,
"message": "자재 분류가 검증되었습니다",
"material_id": material_id,
"verified_by": username
}
except Exception as e:
db.rollback()
raise HTTPException(status_code=500, detail=f"자재 검증 실패: {str(e)}")
@router.put("/materials/{material_id}/update-classification")
async def update_material_classification(
material_id: int,
request: Request,
classified_category: str = Form(...),
classified_subcategory: str = Form(None),
material_grade: str = Form(None),
schedule: str = Form(None),
size_spec: str = Form(None),
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""BOM 관리 페이지에서 사용자가 분류를 수정하는 API"""
try:
username = current_user.get("username", "unknown")
# 자재 존재 확인
check_query = text("SELECT id, original_description FROM materials WHERE id = :material_id")
result = db.execute(check_query, {"material_id": material_id})
material = result.fetchone()
if not material:
raise HTTPException(status_code=404, detail="자재를 찾을 수 없습니다")
# 분류 정보 업데이트
update_query = text("""
UPDATE materials
SET classified_category = :classified_category,
classified_subcategory = :classified_subcategory,
material_grade = :material_grade,
schedule = :schedule,
size_spec = :size_spec,
is_verified = true,
verified_by = :verified_by,
verified_at = NOW(),
updated_at = NOW()
WHERE id = :material_id
""")
db.execute(update_query, {
"material_id": material_id,
"classified_category": classified_category,
"classified_subcategory": classified_subcategory or "",
"material_grade": material_grade or "",
"schedule": schedule or "",
"size_spec": size_spec or "",
"verified_by": username
})
db.commit()
# 활동 로그 기록
await log_activity_from_request(
request,
db,
"material_classification_update",
f"자재 분류 수정: {material.original_description} -> {classified_category}",
{"material_id": material_id, "category": classified_category}
)
return {
"success": True,
"message": "자재 분류가 성공적으로 업데이트되었습니다",
"material_id": material_id,
"classified_category": classified_category
}
except Exception as e:
db.rollback()
logger.error(f"자재 분류 업데이트 실패: {str(e)}")
raise HTTPException(status_code=500, detail=f"자재 분류 업데이트 실패: {str(e)}")
@router.post("/materials/confirm-purchase")
async def confirm_material_purchase_api(
request: Request,
job_no: str = Query(...),
revision: str = Query(...),
confirmed_by: str = Query("user"),
confirmations_data: List[Dict] = Body(...),
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""자재 구매수량 확정 API (프론트엔드 호환)"""
try:
# 입력 데이터 검증
if not job_no or not revision:
raise HTTPException(status_code=400, detail="Job 번호와 리비전은 필수입니다")
if not confirmations_data:
raise HTTPException(status_code=400, detail="확정할 자재가 없습니다")
# 각 확정 항목 검증
for i, confirmation in enumerate(confirmations_data):
if not confirmation.get("material_hash"):
raise HTTPException(status_code=400, detail=f"{i+1}번째 항목의 material_hash가 없습니다")
confirmed_qty = confirmation.get("confirmed_quantity")
if confirmed_qty is None or confirmed_qty < 0:
raise HTTPException(status_code=400, detail=f"{i+1}번째 항목의 확정 수량이 유효하지 않습니다")
confirmed_items = []
for confirmation in confirmations_data:
# 발주 추적 테이블에 저장/업데이트
upsert_query = text("""
INSERT INTO material_purchase_tracking (
job_no, material_hash, revision, description, size_spec, unit,
bom_quantity, calculated_quantity, confirmed_quantity,
purchase_status, supplier_name, unit_price, total_price,
confirmed_by, confirmed_at
)
SELECT
:job_no, m.material_hash, :revision, m.original_description,
m.size_spec, m.unit, m.quantity, :calculated_qty, :confirmed_qty,
'CONFIRMED', :supplier_name, :unit_price, :total_price,
:confirmed_by, CURRENT_TIMESTAMP
FROM materials m
WHERE m.material_hash = :material_hash
AND m.file_id = (
SELECT id FROM files
WHERE job_no = :job_no AND revision = :revision
ORDER BY upload_date DESC LIMIT 1
)
LIMIT 1
ON CONFLICT (job_no, material_hash, revision)
DO UPDATE SET
confirmed_quantity = :confirmed_qty,
purchase_status = 'CONFIRMED',
supplier_name = :supplier_name,
unit_price = :unit_price,
total_price = :total_price,
confirmed_by = :confirmed_by,
confirmed_at = CURRENT_TIMESTAMP,
updated_at = CURRENT_TIMESTAMP
RETURNING id, description, confirmed_quantity
""")
calculated_qty = confirmation.get("calculated_quantity", confirmation["confirmed_quantity"])
total_price = confirmation["confirmed_quantity"] * confirmation.get("unit_price", 0)
result = db.execute(upsert_query, {
"job_no": job_no,
"revision": revision,
"material_hash": confirmation["material_hash"],
"calculated_qty": calculated_qty,
"confirmed_qty": confirmation["confirmed_quantity"],
"supplier_name": confirmation.get("supplier_name", ""),
"unit_price": confirmation.get("unit_price", 0),
"total_price": total_price,
"confirmed_by": confirmed_by
})
confirmed_item = result.fetchone()
if confirmed_item:
confirmed_items.append({
"id": confirmed_item[0],
"description": confirmed_item[1],
"confirmed_quantity": confirmed_item[2]
})
db.commit()
# 활동 로그 기록
await log_activity_from_request(
request,
db,
"material_purchase_confirm",
f"구매수량 확정: {job_no} {revision} - {len(confirmed_items)}개 품목",
{"job_no": job_no, "revision": revision, "items_count": len(confirmed_items)}
)
return {
"success": True,
"message": f"{len(confirmed_items)}개 품목의 구매수량이 확정되었습니다",
"job_no": job_no,
"revision": revision,
"confirmed_items": confirmed_items
}
except Exception as e:
db.rollback()
logger.error(f"구매수량 확정 실패: {str(e)}")
raise HTTPException(status_code=500, detail=f"구매수량 확정 실패: {str(e)}")
@router.put("/{file_id}")
async def update_file_info(
file_id: int,
bom_name: Optional[str] = Body(None),
description: Optional[str] = Body(None),
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""파일 정보 수정"""
try:
# 파일 존재 확인 및 관련 정보 조회
file_query = text("""
SELECT id, bom_name, description, job_no, original_filename
FROM files
WHERE id = :file_id
""")
file_result = db.execute(file_query, {"file_id": file_id}).fetchone()
if not file_result:
raise HTTPException(status_code=404, detail="파일을 찾을 수 없습니다")
# 업데이트할 필드 준비
update_fields = []
params = {}
if bom_name is not None:
update_fields.append("bom_name = :bom_name")
params["bom_name"] = bom_name
if description is not None:
update_fields.append("description = :description")
params["description"] = description
if not update_fields:
raise HTTPException(status_code=400, detail="수정할 정보가 없습니다")
# BOM 이름 수정인 경우, 같은 job_no의 모든 리비전을 함께 업데이트
if bom_name is not None and file_result.job_no:
# 같은 job_no의 모든 파일 업데이트
update_query = text(f"""
UPDATE files
SET {', '.join(update_fields)}, updated_at = CURRENT_TIMESTAMP
WHERE job_no = :job_no
""")
params["job_no"] = file_result.job_no
else:
# 단일 파일만 업데이트
update_query = text(f"""
UPDATE files
SET {', '.join(update_fields)}, updated_at = CURRENT_TIMESTAMP
WHERE id = :file_id
""")
params["file_id"] = file_id
db.execute(update_query, params)
db.commit()
# 활동 로그 기록 - 간단하게 처리
logger.info(f"파일 정보 수정 완료: 사용자={current_user['username']}, 파일ID={file_id}, BOM명={bom_name or 'N/A'}")
return {"message": "파일 정보가 성공적으로 수정되었습니다"}
except Exception as e:
logger.error(f"파일 정보 수정 실패: {str(e)}")
db.rollback()
raise HTTPException(status_code=500, detail=f"파일 정보 수정 실패: {str(e)}")
@router.get("/{file_id}/export-excel")
async def export_materials_to_excel(
file_id: int,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""자재 목록을 엑셀로 내보내기"""
try:
# 파일 정보 조회
file_query = text("""
SELECT f.id, f.original_filename, f.bom_name, f.job_no, f.revision,
p.project_name, p.official_project_code
FROM files f
LEFT JOIN projects p ON f.project_id = p.id
WHERE f.id = :file_id
""")
file_result = db.execute(file_query, {"file_id": file_id}).fetchone()
if not file_result:
raise HTTPException(status_code=404, detail="파일을 찾을 수 없습니다")
# 자재 목록 조회
materials_query = text("""
SELECT
m.line_number,
m.original_description,
m.quantity,
m.unit,
m.size_spec,
m.main_nom,
m.red_nom,
m.material_grade,
m.classified_category,
m.classification_confidence,
m.is_verified,
m.verified_by,
m.created_at
FROM materials m
WHERE m.file_id = :file_id
ORDER BY m.line_number ASC
""")
materials_result = db.execute(materials_query, {"file_id": file_id}).fetchall()
# 엑셀 데이터 준비
excel_data = []
for material in materials_result:
excel_data.append({
"라인번호": material.line_number,
"품명": material.original_description,
"수량": material.quantity,
"단위": material.unit,
"사이즈": material.size_spec,
"주요NOM": material.main_nom,
"축소NOM": material.red_nom,
"재질등급": material.material_grade,
"분류": material.classified_category,
"신뢰도": material.classification_confidence,
"검증여부": "검증완료" if material.is_verified else "미검증",
"검증자": material.verified_by or "",
"등록일": material.created_at.strftime("%Y-%m-%d %H:%M:%S") if material.created_at else ""
})
# 활동 로그 기록
activity_logger = ActivityLogger(db)
activity_logger.log_activity(
username=current_user["username"],
activity_type="엑셀 내보내기",
activity_description=f"자재 목록 엑셀 내보내기: {file_result.original_filename}",
target_type="file",
target_id=file_id,
metadata={
"file_name": file_result.original_filename,
"bom_name": file_result.bom_name,
"job_no": file_result.job_no,
"revision": file_result.revision,
"materials_count": len(excel_data)
}
)
return {
"message": "엑셀 내보내기 준비 완료",
"file_info": {
"filename": file_result.original_filename,
"bom_name": file_result.bom_name,
"job_no": file_result.job_no,
"revision": file_result.revision,
"project_name": file_result.project_name
},
"materials": excel_data,
"total_count": len(excel_data)
}
except Exception as e:
logger.error(f"엑셀 내보내기 실패: {str(e)}")
raise HTTPException(status_code=500, detail=f"엑셀 내보내기 실패: {str(e)}")
@router.get("/{file_id}/materials/view-log")
async def log_materials_view(
file_id: int,
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""자재 목록 조회 로그 기록"""
try:
# 파일 정보 조회
file_query = text("SELECT original_filename, bom_name FROM files WHERE id = :file_id")
file_result = db.execute(file_query, {"file_id": file_id}).fetchone()
if file_result:
# 활동 로그 기록
activity_logger = ActivityLogger(db)
activity_logger.log_activity(
username=current_user["username"],
activity_type="자재 목록 조회",
activity_description=f"자재 목록 조회: {file_result.original_filename}",
target_type="file",
target_id=file_id,
metadata={
"file_name": file_result.original_filename,
"bom_name": file_result.bom_name
}
)
return {"message": "조회 로그 기록 완료"}
except Exception as e:
logger.error(f"조회 로그 기록 실패: {str(e)}")
return {"message": "조회 로그 기록 실패"}
@router.post("/{file_id}/process-missing-drawings")
async def process_missing_drawings(
file_id: int,
action: str = "delete",
drawings: List[str] = [],
db: Session = Depends(get_db),
current_user: dict = Depends(get_current_user)
):
"""
누락된 도면 처리
- action='delete': 누락된 도면의 이전 리비전 자재 삭제 처리
- action='keep': 누락된 도면의 이전 리비전 자재 유지
"""
try:
# 현재 파일 정보 확인
from pydantic import BaseModel
class MissingDrawingsRequest(BaseModel):
action: str
drawings: List[str]
# parent_file_id 조회는 files 테이블에 없을 수 있으므로 revision으로 판단
file_query = text("""
SELECT f.id, f.job_no, f.revision, f.original_filename
FROM files f
WHERE f.id = :file_id
""")
file_result = db.execute(file_query, {"file_id": file_id}).fetchone()
if not file_result:
raise HTTPException(status_code=404, detail="파일을 찾을 수 없습니다")
# 이전 리비전 파일 찾기
prev_revision_query = text("""
SELECT id FROM files
WHERE job_no = :job_no
AND original_filename = :filename
AND revision < :current_revision
ORDER BY revision DESC
LIMIT 1
""")
prev_result = db.execute(prev_revision_query, {
"job_no": file_result.job_no,
"filename": file_result.original_filename,
"current_revision": file_result.revision
}).fetchone()
if not prev_result:
raise HTTPException(status_code=400, detail="이전 리비전을 찾을 수 없습니다")
parent_file_id = prev_result.id
if action == "delete":
# 누락된 도면의 자재를 deleted_not_purchased 상태로 변경
for drawing in drawings:
update_query = text("""
UPDATE materials
SET revision_status = 'deleted_not_purchased'
WHERE file_id = :parent_file_id
AND (drawing_name = :drawing OR line_no = :drawing)
""")
db.execute(update_query, {
"parent_file_id": parent_file_id,
"drawing": drawing
})
db.commit()
return {
"success": True,
"message": f"{len(drawings)}개 도면의 자재가 삭제 처리되었습니다",
"action": "deleted",
"drawings_count": len(drawings)
}
else:
# keep - 이미 처리됨 (inventory 또는 active 상태 유지)
return {
"success": True,
"message": "누락된 도면의 자재가 유지됩니다",
"action": "kept",
"drawings_count": len(drawings)
}
except Exception as e:
db.rollback()
raise HTTPException(status_code=500, detail=f"도면 처리 실패: {str(e)}")
@router.post("/save-excel")
async def save_excel_file(
request: ExcelSaveRequest,
current_user: dict = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""
엑셀 파일을 서버에 저장하고 메타데이터를 기록
"""
try:
# 엑셀 저장 디렉토리 생성
excel_dir = Path("uploads/excel_exports")
excel_dir.mkdir(parents=True, exist_ok=True)
# 파일 경로 생성
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
safe_filename = f"{request.category}_{timestamp}_{request.filename}"
file_path = excel_dir / safe_filename
# 엑셀 파일 생성 (openpyxl 사용)
import openpyxl
from openpyxl.styles import Font, PatternFill, Alignment
wb = openpyxl.Workbook()
ws = wb.active
ws.title = request.category
# 헤더 설정
headers = ['TAGNO', '품목명', '수량', '통화구분', '단가', '크기', '압력등급', '스케줄',
'재질', '상세내역', '사용자요구', '관리항목1', '관리항목2', '관리항목3',
'관리항목4', '납기일(YYYY-MM-DD)']
# 헤더 스타일
header_font = Font(bold=True, color="FFFFFF")
header_fill = PatternFill(start_color="366092", end_color="366092", fill_type="solid")
header_alignment = Alignment(horizontal="center", vertical="center")
# 헤더 작성
for col, header in enumerate(headers, 1):
cell = ws.cell(row=1, column=col, value=header)
cell.font = header_font
cell.fill = header_fill
cell.alignment = header_alignment
# 데이터 작성
for row_idx, material in enumerate(request.materials, 2):
# 기본 데이터
data = [
'', # TAGNO
request.category, # 품목명
material.get('quantity', 0), # 수량
'KRW', # 통화구분
1, # 단가
material.get('size_spec', '-'), # 크기
'-', # 압력등급
material.get('schedule', '-'), # 스케줄
material.get('full_material_grade', material.get('material_grade', '-')), # 재질
'-', # 상세내역
material.get('user_requirement', ''), # 사용자요구
'', '', '', '', # 관리항목들
datetime.now().strftime('%Y-%m-%d') # 납기일
]
# 데이터 입력
for col, value in enumerate(data, 1):
ws.cell(row=row_idx, column=col, value=value)
# 엑셀 파일 저장
wb.save(file_path)
# 데이터베이스에 메타데이터 저장 (테이블이 없으면 무시)
try:
save_query = text("""
INSERT INTO excel_exports (
file_id, category, filename, file_path,
material_count, created_by, created_at
) VALUES (
:file_id, :category, :filename, :file_path,
:material_count, :user_id, :created_at
)
""")
db.execute(save_query, {
"file_id": request.file_id,
"category": request.category,
"filename": safe_filename,
"file_path": str(file_path),
"material_count": len(request.materials),
"user_id": current_user.get('id'),
"created_at": datetime.now()
})
db.commit()
except Exception as db_error:
logger.warning(f"엑셀 메타데이터 저장 실패 (파일은 저장됨): {str(db_error)}")
# 메타데이터 저장 실패해도 파일은 저장되었으므로 계속 진행
logger.info(f"엑셀 파일 저장 완료: {safe_filename}")
return {
"success": True,
"message": "엑셀 파일이 성공적으로 저장되었습니다.",
"filename": safe_filename,
"file_path": str(file_path),
"material_count": len(request.materials)
}
except Exception as e:
logger.error(f"엑셀 파일 저장 실패: {str(e)}")
raise HTTPException(status_code=500, detail=f"엑셀 파일 저장 실패: {str(e)}")
@router.get("/excel-exports")
async def get_excel_exports(
file_id: Optional[int] = None,
category: Optional[str] = None,
current_user: dict = Depends(get_current_user),
db: Session = Depends(get_db)
):
"""
저장된 엑셀 파일 목록 조회
"""
try:
query = text("""
SELECT
id, file_id, category, filename, file_path,
material_count, created_by, created_at
FROM excel_exports
WHERE 1=1
""")
params = {}
if file_id:
query = text(str(query) + " AND file_id = :file_id")
params["file_id"] = file_id
if category:
query = text(str(query) + " AND category = :category")
params["category"] = category
query = text(str(query) + " ORDER BY created_at DESC")
result = db.execute(query, params).fetchall()
exports = []
for row in result:
exports.append({
"id": row.id,
"file_id": row.file_id,
"category": row.category,
"filename": row.filename,
"file_path": row.file_path,
"material_count": row.material_count,
"created_by": row.created_by,
"created_at": row.created_at.isoformat() if row.created_at else None
})
return {
"success": True,
"exports": exports
}
except Exception as e:
logger.error(f"엑셀 내보내기 목록 조회 실패: {str(e)}")
return {
"success": False,
"exports": [],
"message": "엑셀 내보내기 목록을 조회할 수 없습니다."
}
def perform_simple_revision_comparison(db: Session, job_no: str, parent_file_id: int, new_materials: List[Dict]) -> Dict:
"""
개선된 리비전 비교 로직
요구사항:
1. 구매확정된 자재는 완전히 제외 (수량 변경 여부와 무관)
2. 삭제된 항목은 별도 리스트로 반환
3. 추가된 항목만 분류 대상
Args:
db: 데이터베이스 세션
job_no: 프로젝트 번호
parent_file_id: 이전 파일 ID
new_materials: 신규 자재 목록
Returns:
비교 결과 {
has_purchased_materials: bool,
purchased_count: int,
unpurchased_count: int,
new_count: int,
removed_count: int,
materials_to_classify: List[Dict], # 신규 자재만
removed_materials: List[Dict], # 삭제된 자재
total_previous: int,
total_new: int
}
"""
try:
# 1. 이전 파일의 모든 자재 조회 (구매확정 정보 포함)
previous_materials_query = text("""
SELECT original_description, classified_category, size_spec, material_grade,
main_nom, red_nom, drawing_name, line_no,
COALESCE(total_quantity, quantity, 0) as quantity, unit,
purchase_confirmed, purchase_confirmed_at, purchase_confirmed_by
FROM materials
WHERE file_id = :parent_file_id
ORDER BY id
""")
previous_result = db.execute(previous_materials_query, {"parent_file_id": parent_file_id})
previous_materials = previous_result.fetchall()
if not previous_materials:
logger.info("이전 자료가 없음 - 전체 자재 분류")
return {
"has_purchased_materials": False,
"message": "이전 자료가 없습니다.",
"materials_to_classify": new_materials,
"new_count": len(new_materials),
"removed_materials": [],
"removed_count": 0
}
# 2. 이전 자재를 키별로 그룹화
previous_dict = {}
purchased_dict = {} # 구매확정된 자재 별도 관리
unpurchased_dict = {} # 미구매 자재 별도 관리
for material in previous_materials:
# 자재 식별 키 생성 (description + size)
key = f"{material.original_description.strip().upper()}|{material.size_spec or ''}"
material_data = {
"original_description": material.original_description,
"classified_category": material.classified_category,
"size_spec": material.size_spec,
"material_grade": material.material_grade,
"main_nom": material.main_nom,
"red_nom": material.red_nom,
"drawing_name": material.drawing_name,
"line_no": material.line_no,
"quantity": float(material.quantity or 0),
"unit": material.unit,
"purchase_confirmed": material.purchase_confirmed,
"purchase_confirmed_at": material.purchase_confirmed_at,
"purchase_confirmed_by": material.purchase_confirmed_by
}
if key in previous_dict:
# 동일 자재가 있으면 수량 합산
previous_dict[key]["quantity"] += float(material.quantity or 0)
else:
previous_dict[key] = material_data
# 구매확정 여부에 따라 분류
if material.purchase_confirmed:
purchased_dict[key] = previous_dict[key]
else:
unpurchased_dict[key] = previous_dict[key]
# 3. 신규 자재를 키별로 그룹화
new_dict = {}
for material in new_materials:
key = f"{material.get('original_description', '').strip().upper()}|{material.get('size_spec', '') or ''}"
# total_quantity 우선 사용
quantity = float(material.get("total_quantity") or material.get("quantity", 0))
if key in new_dict:
new_dict[key]["quantity"] += quantity
else:
new_dict[key] = material.copy()
new_dict[key]["quantity"] = quantity
# 4. 비교 수행
materials_to_classify = [] # 분류가 필요한 신규 자재만
removed_materials = [] # 삭제된 자재 (이전에는 있었지만 신규에는 없는)
new_count = 0
excluded_purchased_count = 0
logger.info(f"\n{'='*60}")
logger.info(f"리비전 비교 시작")
logger.info(f"{'='*60}")
logger.info(f"이전 자재: {len(previous_dict)}개 (구매확정: {len(purchased_dict)}개, 미구매: {len(unpurchased_dict)}개)")
logger.info(f"신규 자재: {len(new_dict)}")
# 4-1. 신규 자재 확인 (구매확정된 자재는 제외)
for key, new_material in new_dict.items():
if key in purchased_dict:
# ✅ 구매확정된 자재는 완전히 제외
excluded_purchased_count += 1
logger.info(f"구매확정 자재 제외: {new_material.get('original_description', '')[:50]}...")
elif key in unpurchased_dict:
# 미구매 자재인데 새 리비전에도 있음
previous_material = unpurchased_dict[key]
# 🔧 PIPE 특별 처리: 6,000mm(1본) 단위로 비교
if previous_material.get("classified_category") == "PIPE":
import math
# 이전 수량으로 필요한 본수 계산
prev_qty = previous_material.get("quantity", 0)
new_qty = new_material.get("quantity", 0)
prev_pipes_needed = math.ceil(prev_qty / 6000)
new_pipes_needed = math.ceil(new_qty / 6000)
if prev_pipes_needed != new_pipes_needed:
# 필요한 본수가 변경됨 - 분류 필요
logger.info(f"PIPE 본수 변경: {new_material.get('original_description', '')[:40]}... "
f"{prev_qty}mm({prev_pipes_needed}본) -> {new_qty}mm({new_pipes_needed}본)")
materials_to_classify.append(new_material)
new_count += 1
else:
# 필요한 본수 동일 - 기존 분류 유지
logger.info(f"PIPE 본수 유지: {new_material.get('original_description', '')[:40]}... "
f"{prev_qty}mm -> {new_qty}mm ({new_pipes_needed}본)")
else:
# 기타 자재: 기존 분류 유지
logger.info(f"기존 미구매 자재 유지: {new_material.get('original_description', '')[:50]}...")
else:
# ✅ 완전히 새로운 자재 - 분류 필요
logger.info(f"신규 자재: {new_material.get('original_description', '')[:50]}...")
materials_to_classify.append(new_material)
new_count += 1
# 4-2. 삭제된 자재 확인 (미구매 자재 중에서만)
for key, previous_material in unpurchased_dict.items():
if key not in new_dict:
# ✅ 이전에는 있었지만 새 리비전에는 없음
removed_materials.append(previous_material)
logger.info(f"삭제된 자재: {previous_material.get('original_description', '')[:50]}...")
logger.info(f"\n{'='*60}")
logger.info(f"비교 결과")
logger.info(f"{'='*60}")
logger.info(f"신규 자재: {new_count}개 (분류 필요)")
logger.info(f"삭제 자재: {len(removed_materials)}")
logger.info(f"구매확정 제외: {excluded_purchased_count}")
logger.info(f"{'='*60}\n")
return {
"has_purchased_materials": len(purchased_dict) > 0,
"purchased_count": len(purchased_dict),
"unpurchased_count": len(unpurchased_dict),
"new_count": new_count,
"removed_count": len(removed_materials),
"excluded_purchased_count": excluded_purchased_count,
"materials_to_classify": materials_to_classify, # 신규 자재만
"removed_materials": removed_materials, # 삭제된 자재
"purchased_materials_map": purchased_dict, # 🔥 구매확정된 자재 매핑 정보
"total_previous": len(previous_dict),
"total_new": len(new_dict)
}
except Exception as e:
logger.error(f"리비전 비교 실패: {str(e)}")
import traceback
traceback.print_exc()
return {
"has_purchased_materials": False,
"message": f"비교 실패: {str(e)}",
"materials_to_classify": new_materials,
"removed_materials": [],
"new_count": len(new_materials),
"removed_count": 0
}