✅ 완성된 기능: - Job 관리 CRUD API 구현 (생성/조회/수정/삭제) - PostgreSQL jobs 테이블 생성 및 더미 데이터 - files.py project_id → job_no 변경으로 완전 통합 - Job 검증 로직으로 업로드 시 유효성 확인 - Job-Files-Materials 3단계 데이터 연동 완료 📁 추가된 파일: - scripts/create_jobs.sql: jobs 테이블 스키마 - scripts/insert_dummy_jobs.py: 더미 데이터 생성 - app/routers/jobs.py: Job 관리 API - app/routers/files.py: BOM 업로드 (job_no 연동) 🚀 다음 단계: - 자재 분류 시스템 통합 (classification.py) - 검토 시스템 구현 (행별 분류 확인/수정) - Job별 자재 통계 및 진행률 API - 프론트엔드 UI 개발 🎯 테스트 완료: - J24-001 Job에 BOM 파일 업로드 성공 - Job 검증 및 오류 처리 작동 확인 - PostgreSQL 데이터 저장 및 조회 정상
400 lines
14 KiB
Plaintext
400 lines
14 KiB
Plaintext
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form
|
|
from sqlalchemy.orm import Session
|
|
from sqlalchemy import text
|
|
from typing import List, Optional
|
|
import os
|
|
import shutil
|
|
from datetime import datetime
|
|
import uuid
|
|
import pandas as pd
|
|
import re
|
|
from pathlib import Path
|
|
|
|
from ..database import get_db
|
|
|
|
router = APIRouter()
|
|
|
|
UPLOAD_DIR = Path("uploads")
|
|
UPLOAD_DIR.mkdir(exist_ok=True)
|
|
ALLOWED_EXTENSIONS = {".xlsx", ".xls", ".csv"}
|
|
|
|
@router.get("/")
|
|
async def get_files_info():
|
|
return {
|
|
"message": "파일 관리 API",
|
|
"allowed_extensions": list(ALLOWED_EXTENSIONS),
|
|
"upload_directory": str(UPLOAD_DIR)
|
|
}
|
|
|
|
@router.get("/test")
|
|
async def test_endpoint():
|
|
return {"status": "파일 API가 정상 작동합니다!"}
|
|
|
|
@router.post("/add-missing-columns")
|
|
async def add_missing_columns(db: Session = Depends(get_db)):
|
|
"""누락된 컬럼들 추가"""
|
|
try:
|
|
db.execute(text("ALTER TABLE files ADD COLUMN IF NOT EXISTS parsed_count INTEGER DEFAULT 0"))
|
|
db.execute(text("ALTER TABLE materials ADD COLUMN IF NOT EXISTS row_number INTEGER"))
|
|
db.commit()
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "누락된 컬럼들이 추가되었습니다",
|
|
"added_columns": ["files.parsed_count", "materials.row_number"]
|
|
}
|
|
except Exception as e:
|
|
db.rollback()
|
|
return {"success": False, "error": f"컬럼 추가 실패: {str(e)}"}
|
|
|
|
def validate_file_extension(filename: str) -> bool:
|
|
return Path(filename).suffix.lower() in ALLOWED_EXTENSIONS
|
|
|
|
def generate_unique_filename(original_filename: str) -> str:
|
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
unique_id = str(uuid.uuid4())[:8]
|
|
stem = Path(original_filename).stem
|
|
suffix = Path(original_filename).suffix
|
|
return f"{stem}_{timestamp}_{unique_id}{suffix}"
|
|
|
|
def parse_dataframe(df):
|
|
df = df.dropna(how='all')
|
|
df.columns = df.columns.str.strip().str.lower()
|
|
|
|
column_mapping = {
|
|
'description': ['description', 'item', 'material', '품명', '자재명'],
|
|
'quantity': ['qty', 'quantity', 'ea', '수량'],
|
|
'main_size': ['main_nom', 'nominal_diameter', 'nd', '주배관'],
|
|
'red_size': ['red_nom', 'reduced_diameter', '축소배관'],
|
|
'length': ['length', 'len', '길이'],
|
|
'weight': ['weight', 'wt', '중량'],
|
|
'dwg_name': ['dwg_name', 'drawing', '도면명'],
|
|
'line_num': ['line_num', 'line_number', '라인번호']
|
|
}
|
|
|
|
mapped_columns = {}
|
|
for standard_col, possible_names in column_mapping.items():
|
|
for possible_name in possible_names:
|
|
if possible_name in df.columns:
|
|
mapped_columns[standard_col] = possible_name
|
|
break
|
|
|
|
materials = []
|
|
for index, row in df.iterrows():
|
|
description = str(row.get(mapped_columns.get('description', ''), ''))
|
|
quantity_raw = row.get(mapped_columns.get('quantity', ''), 0)
|
|
|
|
try:
|
|
quantity = float(quantity_raw) if pd.notna(quantity_raw) else 0
|
|
except:
|
|
quantity = 0
|
|
|
|
material_grade = ""
|
|
if "ASTM" in description.upper():
|
|
astm_match = re.search(r'ASTM\s+([A-Z0-9\s]+)', description.upper())
|
|
if astm_match:
|
|
material_grade = astm_match.group(0).strip()
|
|
|
|
main_size = str(row.get(mapped_columns.get('main_size', ''), ''))
|
|
red_size = str(row.get(mapped_columns.get('red_size', ''), ''))
|
|
|
|
if main_size != 'nan' and red_size != 'nan' and red_size != '':
|
|
size_spec = f"{main_size} x {red_size}"
|
|
elif main_size != 'nan' and main_size != '':
|
|
size_spec = main_size
|
|
else:
|
|
size_spec = ""
|
|
|
|
if description and description not in ['nan', 'None', '']:
|
|
materials.append({
|
|
'original_description': description,
|
|
'quantity': quantity,
|
|
'unit': "EA",
|
|
'size_spec': size_spec,
|
|
'material_grade': material_grade,
|
|
'line_number': index + 1,
|
|
'row_number': index + 1
|
|
})
|
|
|
|
return materials
|
|
|
|
def parse_file_data(file_path):
|
|
file_extension = Path(file_path).suffix.lower()
|
|
|
|
try:
|
|
if file_extension == ".csv":
|
|
df = pd.read_csv(file_path, encoding='utf-8')
|
|
elif file_extension in [".xlsx", ".xls"]:
|
|
df = pd.read_excel(file_path, sheet_name=0)
|
|
else:
|
|
raise HTTPException(status_code=400, detail="지원하지 않는 파일 형식")
|
|
|
|
return parse_dataframe(df)
|
|
except Exception as e:
|
|
raise HTTPException(status_code=400, detail=f"파일 파싱 실패: {str(e)}")
|
|
|
|
@router.post("/upload")
|
|
async def upload_file(
|
|
file: UploadFile = File(...),
|
|
job_no: str = Form(...),
|
|
revision: str = Form("Rev.0"),
|
|
db: Session = Depends(get_db)
|
|
):
|
|
if not validate_file_extension(file.filename):
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"지원하지 않는 파일 형식입니다. 허용된 확장자: {', '.join(ALLOWED_EXTENSIONS)}"
|
|
)
|
|
|
|
if file.size and file.size > 10 * 1024 * 1024:
|
|
raise HTTPException(status_code=400, detail="파일 크기는 10MB를 초과할 수 없습니다")
|
|
|
|
unique_filename = generate_unique_filename(file.filename)
|
|
file_path = UPLOAD_DIR / unique_filename
|
|
|
|
try:
|
|
with open(file_path, "wb") as buffer:
|
|
shutil.copyfileobj(file.file, buffer)
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"파일 저장 실패: {str(e)}")
|
|
|
|
try:
|
|
materials_data = parse_file_data(str(file_path))
|
|
parsed_count = len(materials_data)
|
|
|
|
# 파일 정보 저장
|
|
file_insert_query = text("""
|
|
INSERT INTO files (filename, original_filename, file_path, job_no, revision, description, file_size, parsed_count, is_active)
|
|
VALUES (:filename, :original_filename, :file_path, :job_no, :revision, :description, :file_size, :parsed_count, :is_active)
|
|
RETURNING id
|
|
""")
|
|
|
|
file_result = db.execute(file_insert_query, {
|
|
"filename": unique_filename,
|
|
"original_filename": file.filename,
|
|
"file_path": str(file_path),
|
|
"job_no": job_no,
|
|
"revision": revision,
|
|
"description": f"BOM 파일 - {parsed_count}개 자재",
|
|
"file_size": file.size,
|
|
"parsed_count": parsed_count,
|
|
"is_active": True
|
|
})
|
|
|
|
file_id = file_result.fetchone()[0]
|
|
|
|
# 자재 데이터 저장
|
|
materials_inserted = 0
|
|
for material_data in materials_data:
|
|
material_insert_query = text("""
|
|
INSERT INTO materials (
|
|
file_id, original_description, quantity, unit, size_spec,
|
|
material_grade, line_number, row_number, classified_category,
|
|
classification_confidence, is_verified, created_at
|
|
)
|
|
VALUES (
|
|
:file_id, :original_description, :quantity, :unit, :size_spec,
|
|
:material_grade, :line_number, :row_number, :classified_category,
|
|
:classification_confidence, :is_verified, :created_at
|
|
)
|
|
""")
|
|
|
|
db.execute(material_insert_query, {
|
|
"file_id": file_id,
|
|
"original_description": material_data["original_description"],
|
|
"quantity": material_data["quantity"],
|
|
"unit": material_data["unit"],
|
|
"size_spec": material_data["size_spec"],
|
|
"material_grade": material_data["material_grade"],
|
|
"line_number": material_data["line_number"],
|
|
"row_number": material_data["row_number"],
|
|
"classified_category": None,
|
|
"classification_confidence": None,
|
|
"is_verified": False,
|
|
"created_at": datetime.now()
|
|
})
|
|
materials_inserted += 1
|
|
|
|
db.commit()
|
|
|
|
return {
|
|
"success": True,
|
|
"message": f"완전한 DB 저장 성공! {materials_inserted}개 자재 저장됨",
|
|
"original_filename": file.filename,
|
|
"file_id": file_id,
|
|
"parsed_materials_count": parsed_count,
|
|
"saved_materials_count": materials_inserted,
|
|
"sample_materials": materials_data[:3] if materials_data else []
|
|
}
|
|
|
|
except Exception as e:
|
|
db.rollback()
|
|
if os.path.exists(file_path):
|
|
os.remove(file_path)
|
|
raise HTTPException(status_code=500, detail=f"파일 처리 실패: {str(e)}")
|
|
@router.get("/materials")
|
|
async def get_materials(
|
|
job_no: Optional[str] = None,
|
|
file_id: Optional[str] = None,
|
|
skip: int = 0,
|
|
limit: int = 100,
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""저장된 자재 목록 조회"""
|
|
try:
|
|
query = """
|
|
SELECT m.id, m.file_id, m.original_description, m.quantity, m.unit,
|
|
m.size_spec, m.material_grade, m.line_number, m.row_number,
|
|
m.created_at,
|
|
f.original_filename, f.job_no,
|
|
j.job_no, j.job_name
|
|
FROM materials m
|
|
LEFT JOIN files f ON m.file_id = f.id
|
|
LEFT JOIN jobs j ON f.job_no = j.job_no
|
|
WHERE 1=1
|
|
"""
|
|
|
|
params = {}
|
|
|
|
if job_no:
|
|
query += " AND f.job_no = :job_no"
|
|
params["job_no"] = job_no
|
|
|
|
if file_id:
|
|
query += " AND m.file_id = :file_id"
|
|
params["file_id"] = file_id
|
|
|
|
query += " ORDER BY m.line_number ASC LIMIT :limit OFFSET :skip"
|
|
params["limit"] = limit
|
|
params["skip"] = skip
|
|
|
|
result = db.execute(text(query), params)
|
|
materials = result.fetchall()
|
|
|
|
# 전체 개수 조회
|
|
count_query = """
|
|
SELECT COUNT(*) as total
|
|
FROM materials m
|
|
LEFT JOIN files f ON m.file_id = f.id
|
|
WHERE 1=1
|
|
"""
|
|
count_params = {}
|
|
|
|
if job_no:
|
|
count_query += " AND f.job_no = :job_no"
|
|
count_params["job_no"] = job_no
|
|
|
|
if file_id:
|
|
count_query += " AND m.file_id = :file_id"
|
|
count_params["file_id"] = file_id
|
|
|
|
count_result = db.execute(text(count_query), count_params)
|
|
total_count = count_result.fetchone()[0]
|
|
|
|
return {
|
|
"success": True,
|
|
"total_count": total_count,
|
|
"returned_count": len(materials),
|
|
"skip": skip,
|
|
"limit": limit,
|
|
"materials": [
|
|
{
|
|
"id": m.id,
|
|
"file_id": m.file_id,
|
|
"filename": m.original_filename,
|
|
"job_no": m.job_no,
|
|
"project_code": m.official_project_code,
|
|
"project_name": m.project_name,
|
|
"original_description": m.original_description,
|
|
"quantity": float(m.quantity) if m.quantity else 0,
|
|
"unit": m.unit,
|
|
"size_spec": m.size_spec,
|
|
"material_grade": m.material_grade,
|
|
"line_number": m.line_number,
|
|
"row_number": m.row_number,
|
|
"created_at": m.created_at
|
|
}
|
|
for m in materials
|
|
]
|
|
}
|
|
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"자재 조회 실패: {str(e)}")
|
|
|
|
@router.get("/materials/summary")
|
|
async def get_materials_summary(
|
|
job_no: Optional[str] = None,
|
|
file_id: Optional[str] = None,
|
|
db: Session = Depends(get_db)
|
|
):
|
|
"""자재 요약 통계"""
|
|
try:
|
|
query = """
|
|
SELECT
|
|
COUNT(*) as total_items,
|
|
COUNT(DISTINCT m.original_description) as unique_descriptions,
|
|
COUNT(DISTINCT m.size_spec) as unique_sizes,
|
|
COUNT(DISTINCT m.material_grade) as unique_materials,
|
|
SUM(m.quantity) as total_quantity,
|
|
AVG(m.quantity) as avg_quantity,
|
|
MIN(m.created_at) as earliest_upload,
|
|
MAX(m.created_at) as latest_upload
|
|
FROM materials m
|
|
LEFT JOIN files f ON m.file_id = f.id
|
|
WHERE 1=1
|
|
"""
|
|
|
|
params = {}
|
|
|
|
if job_no:
|
|
query += " AND f.job_no = :job_no"
|
|
params["job_no"] = job_no
|
|
|
|
if file_id:
|
|
query += " AND m.file_id = :file_id"
|
|
params["file_id"] = file_id
|
|
|
|
result = db.execute(text(query), params)
|
|
summary = result.fetchone()
|
|
|
|
return {
|
|
"success": True,
|
|
"summary": {
|
|
"total_items": summary.total_items,
|
|
"unique_descriptions": summary.unique_descriptions,
|
|
"unique_sizes": summary.unique_sizes,
|
|
"unique_materials": summary.unique_materials,
|
|
"total_quantity": float(summary.total_quantity) if summary.total_quantity else 0,
|
|
"avg_quantity": round(float(summary.avg_quantity), 2) if summary.avg_quantity else 0,
|
|
"earliest_upload": summary.earliest_upload,
|
|
"latest_upload": summary.latest_upload
|
|
}
|
|
}
|
|
|
|
except Exception as e:
|
|
raise HTTPException(status_code=500, detail=f"요약 조회 실패: {str(e)}")
|
|
# Job 검증 함수 (파일 끝에 추가할 예정)
|
|
async def validate_job_exists(job_no: str, db: Session):
|
|
"""Job 존재 여부 및 활성 상태 확인"""
|
|
try:
|
|
query = text("SELECT job_no, job_name, status FROM jobs WHERE job_no = :job_no AND is_active = true")
|
|
job = db.execute(query, {"job_no": job_no}).fetchone()
|
|
|
|
if not job:
|
|
return {"valid": False, "error": f"Job No. '{job_no}'를 찾을 수 없습니다"}
|
|
|
|
if job.status == '완료':
|
|
return {"valid": False, "error": f"완료된 Job '{job.job_name}'에는 파일을 업로드할 수 없습니다"}
|
|
|
|
return {
|
|
"valid": True,
|
|
"job": {
|
|
"job_no": job.job_no,
|
|
"job_name": job.job_name,
|
|
"status": job.status
|
|
}
|
|
}
|
|
|
|
except Exception as e:
|
|
return {"valid": False, "error": f"Job 검증 실패: {str(e)}"}
|