diff --git a/backend/app/main.py b/backend/app/main.py index bc70673..5b961e7 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -1,25 +1,14 @@ -from fastapi import FastAPI, Depends, HTTPException +from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware -from sqlalchemy.orm import Session -from sqlalchemy import text -from typing import List -from datetime import datetime - -from .database import get_db, engine -from .models import Base, Project -from .schemas import ProjectCreate, ProjectResponse -from .api import files - -Base.metadata.create_all(bind=engine) +# FastAPI 앱 생성 app = FastAPI( - title="TK-MP-Project API", - description="BOM 시스템 개발 프로젝트 - Phase 3: 파일 처리 시스템", - version="1.0.0", - docs_url="/docs", - redoc_url="/redoc" + title="TK-MP BOM Management API", + description="자재 분류 및 프로젝트 관리 시스템", + version="1.0.0" ) +# CORS 설정 app.add_middleware( CORSMiddleware, allow_origins=["*"], @@ -28,100 +17,30 @@ app.add_middleware( allow_headers=["*"], ) -app.include_router(files.router, prefix="/api/files", tags=["파일 관리"]) +# 라우터들 import 및 등록 +try: + from .routers import files + app.include_router(files.router, prefix="/files", tags=["files"]) +except ImportError: + print("files 라우터를 찾을 수 없습니다") + +try: + from .routers import jobs + app.include_router(jobs.router, prefix="/jobs", tags=["jobs"]) +except ImportError: + print("jobs 라우터를 찾을 수 없습니다") @app.get("/") async def root(): return { - "message": "TK-MP-Project API Server", - "version": "1.0.0 - Phase 3", - "status": "running", - "timestamp": datetime.now().isoformat(), - "new_features": [ - "✅ Phase 1: 기반 시스템 구축", - "✅ Phase 2: 데이터베이스 연동", - "🔄 Phase 3: 파일 처리 시스템 개발 중" - ] + "message": "TK-MP BOM Management API", + "version": "1.0.0", + "endpoints": ["/docs", "/jobs", "/files"] } @app.get("/health") -async def health_check(db: Session = Depends(get_db)): - try: - result = db.execute(text("SELECT 1 as test")) - test_value = result.fetchone()[0] - - return { - "status": "healthy", - "database": "connected", - "test_query": test_value == 1, - "timestamp": datetime.now().isoformat(), - "phase": "Phase 3 - 파일 처리 시스템" - } - except Exception as e: - raise HTTPException(status_code=500, detail=f"데이터베이스 연결 실패: {str(e)}") - -@app.get("/api/projects", response_model=List[ProjectResponse]) -async def get_projects(db: Session = Depends(get_db)): - try: - result = db.execute(text(""" - SELECT id, official_project_code, project_name, design_project_code, - is_code_matched, status, created_at, updated_at - FROM projects - ORDER BY created_at DESC - """)) - projects = result.fetchall() - - return [ - ProjectResponse( - id=project.id, - official_project_code=project.official_project_code, - project_name=project.project_name, - design_project_code=project.design_project_code, - is_code_matched=project.is_code_matched, - status=project.status, - created_at=project.created_at, - updated_at=project.updated_at - ) - for project in projects - ] - except Exception as e: - raise HTTPException(status_code=500, detail=f"프로젝트 조회 실패: {str(e)}") - -@app.post("/api/projects", response_model=ProjectResponse) -async def create_project(project: ProjectCreate, db: Session = Depends(get_db)): - try: - insert_query = text(""" - INSERT INTO projects (official_project_code, project_name, design_project_code, is_code_matched, status, created_at) - VALUES (:official_code, :project_name, :design_code, :is_matched, :status, :created_at) - RETURNING id, official_project_code, project_name, design_project_code, is_code_matched, status, created_at, updated_at - """) - - result = db.execute(insert_query, { - "official_code": project.official_project_code, - "project_name": project.project_name, - "design_code": project.design_project_code, - "is_matched": project.is_code_matched, - "status": project.status, - "created_at": datetime.now() - }) - - new_project = result.fetchone() - db.commit() - - return ProjectResponse( - id=new_project.id, - official_project_code=new_project.official_project_code, - project_name=new_project.project_name, - design_project_code=new_project.design_project_code, - is_code_matched=new_project.is_code_matched, - status=new_project.status, - created_at=new_project.created_at, - updated_at=new_project.updated_at - ) - - except Exception as e: - db.rollback() - raise HTTPException(status_code=500, detail=f"프로젝트 생성 실패: {str(e)}") +async def health_check(): + return {"status": "healthy", "timestamp": "2024-07-15"} if __name__ == "__main__": import uvicorn diff --git a/backend/app/routers/__init__.py b/backend/app/routers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py new file mode 100644 index 0000000..096520c --- /dev/null +++ b/backend/app/routers/files.py @@ -0,0 +1,327 @@ +from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form +from sqlalchemy.orm import Session +from sqlalchemy import text +from typing import List, Optional +import os +import shutil +from datetime import datetime +import uuid +import pandas as pd +import re +from pathlib import Path + +from ..database import get_db + +router = APIRouter() + +UPLOAD_DIR = Path("uploads") +UPLOAD_DIR.mkdir(exist_ok=True) +ALLOWED_EXTENSIONS = {".xlsx", ".xls", ".csv"} + +@router.get("/") +async def get_files_info(): + return { + "message": "파일 관리 API", + "allowed_extensions": list(ALLOWED_EXTENSIONS), + "upload_directory": str(UPLOAD_DIR) + } + +@router.get("/test") +async def test_endpoint(): + return {"status": "파일 API가 정상 작동합니다!"} + +@router.post("/add-missing-columns") +async def add_missing_columns(db: Session = Depends(get_db)): + """누락된 컬럼들 추가""" + try: + db.execute(text("ALTER TABLE files ADD COLUMN IF NOT EXISTS parsed_count INTEGER DEFAULT 0")) + db.execute(text("ALTER TABLE materials ADD COLUMN IF NOT EXISTS row_number INTEGER")) + db.commit() + + return { + "success": True, + "message": "누락된 컬럼들이 추가되었습니다", + "added_columns": ["files.parsed_count", "materials.row_number"] + } + except Exception as e: + db.rollback() + return {"success": False, "error": f"컬럼 추가 실패: {str(e)}"} + +def validate_file_extension(filename: str) -> bool: + return Path(filename).suffix.lower() in ALLOWED_EXTENSIONS + +def generate_unique_filename(original_filename: str) -> str: + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + unique_id = str(uuid.uuid4())[:8] + stem = Path(original_filename).stem + suffix = Path(original_filename).suffix + return f"{stem}_{timestamp}_{unique_id}{suffix}" + +def parse_dataframe(df): + df = df.dropna(how='all') + df.columns = df.columns.str.strip().str.lower() + + column_mapping = { + 'description': ['description', 'item', 'material', '품명', '자재명'], + 'quantity': ['qty', 'quantity', 'ea', '수량'], + 'main_size': ['main_nom', 'nominal_diameter', 'nd', '주배관'], + 'red_size': ['red_nom', 'reduced_diameter', '축소배관'], + 'length': ['length', 'len', '길이'], + 'weight': ['weight', 'wt', '중량'], + 'dwg_name': ['dwg_name', 'drawing', '도면명'], + 'line_num': ['line_num', 'line_number', '라인번호'] + } + + mapped_columns = {} + for standard_col, possible_names in column_mapping.items(): + for possible_name in possible_names: + if possible_name in df.columns: + mapped_columns[standard_col] = possible_name + break + + materials = [] + for index, row in df.iterrows(): + description = str(row.get(mapped_columns.get('description', ''), '')) + quantity_raw = row.get(mapped_columns.get('quantity', ''), 0) + + try: + quantity = float(quantity_raw) if pd.notna(quantity_raw) else 0 + except: + quantity = 0 + + material_grade = "" + if "ASTM" in description.upper(): + astm_match = re.search(r'ASTM\s+([A-Z0-9\s]+)', description.upper()) + if astm_match: + material_grade = astm_match.group(0).strip() + + main_size = str(row.get(mapped_columns.get('main_size', ''), '')) + red_size = str(row.get(mapped_columns.get('red_size', ''), '')) + + if main_size != 'nan' and red_size != 'nan' and red_size != '': + size_spec = f"{main_size} x {red_size}" + elif main_size != 'nan' and main_size != '': + size_spec = main_size + else: + size_spec = "" + + if description and description not in ['nan', 'None', '']: + materials.append({ + 'original_description': description, + 'quantity': quantity, + 'unit': "EA", + 'size_spec': size_spec, + 'material_grade': material_grade, + 'line_number': index + 1, + 'row_number': index + 1 + }) + + return materials + +def parse_file_data(file_path): + file_extension = Path(file_path).suffix.lower() + + try: + if file_extension == ".csv": + df = pd.read_csv(file_path, encoding='utf-8') + elif file_extension in [".xlsx", ".xls"]: + df = pd.read_excel(file_path, sheet_name=0) + else: + raise HTTPException(status_code=400, detail="지원하지 않는 파일 형식") + + return parse_dataframe(df) + except Exception as e: + raise HTTPException(status_code=400, detail=f"파일 파싱 실패: {str(e)}") + +@router.post("/upload") +async def upload_file( + file: UploadFile = File(...), + job_no: str = Form(...), + revision: str = Form("Rev.0"), + db: Session = Depends(get_db) +): + # 1. Job 검증 + job_validation = await validate_job_exists(job_no, db) + if not job_validation["valid"]: + raise HTTPException( + status_code=400, + detail=f"Job 오류: {job_validation['error']}" + ) + + job_info = job_validation["job"] + + # 2. 파일 검증 + if not validate_file_extension(file.filename): + raise HTTPException( + status_code=400, + detail=f"지원하지 않는 파일 형식입니다. 허용된 확장자: {', '.join(ALLOWED_EXTENSIONS)}" + ) + + if file.size and file.size > 10 * 1024 * 1024: + raise HTTPException(status_code=400, detail="파일 크기는 10MB를 초과할 수 없습니다") + + # 3. 파일 저장 + unique_filename = generate_unique_filename(file.filename) + file_path = UPLOAD_DIR / unique_filename + + try: + with open(file_path, "wb") as buffer: + shutil.copyfileobj(file.file, buffer) + except Exception as e: + raise HTTPException(status_code=500, detail=f"파일 저장 실패: {str(e)}") + + # 4. 파일 파싱 및 자재 추출 + try: + materials_data = parse_file_data(str(file_path)) + parsed_count = len(materials_data) + + # 파일 정보 저장 + file_insert_query = text(""" + INSERT INTO files (filename, original_filename, file_path, job_no, revision, description, file_size, parsed_count, is_active) + VALUES (:filename, :original_filename, :file_path, :job_no, :revision, :description, :file_size, :parsed_count, :is_active) + RETURNING id + """) + + file_result = db.execute(file_insert_query, { + "filename": unique_filename, + "original_filename": file.filename, + "file_path": str(file_path), + "job_no": job_no, + "revision": revision, + "description": f"BOM 파일 - {parsed_count}개 자재 ({job_info['job_name']})", + "file_size": file.size, + "parsed_count": parsed_count, + "is_active": True + }) + + file_id = file_result.fetchone()[0] + + # 자재 데이터 저장 + materials_inserted = 0 + for material_data in materials_data: + material_insert_query = text(""" + INSERT INTO materials ( + file_id, original_description, quantity, unit, size_spec, + material_grade, line_number, row_number, classified_category, + classification_confidence, is_verified, created_at + ) + VALUES ( + :file_id, :original_description, :quantity, :unit, :size_spec, + :material_grade, :line_number, :row_number, :classified_category, + :classification_confidence, :is_verified, :created_at + ) + """) + + db.execute(material_insert_query, { + "file_id": file_id, + "original_description": material_data["original_description"], + "quantity": material_data["quantity"], + "unit": material_data["unit"], + "size_spec": material_data["size_spec"], + "material_grade": material_data["material_grade"], + "line_number": material_data["line_number"], + "row_number": material_data["row_number"], + "classified_category": None, + "classification_confidence": None, + "is_verified": False, + "created_at": datetime.now() + }) + materials_inserted += 1 + + db.commit() + + return { + "success": True, + "message": f"Job '{job_info['job_name']}'에 BOM 파일 업로드 완료!", + "job": job_info, + "file": { + "id": file_id, + "original_filename": file.filename, + "parsed_count": parsed_count, + "saved_count": materials_inserted + }, + "sample_materials": materials_data[:3] if materials_data else [] + } + + except Exception as e: + db.rollback() + if os.path.exists(file_path): + os.remove(file_path) + raise HTTPException(status_code=500, detail=f"파일 처리 실패: {str(e)}") + +@router.get("/materials/summary") +async def get_materials_summary( + job_no: Optional[str] = None, + file_id: Optional[str] = None, + db: Session = Depends(get_db) +): + """자재 요약 통계""" + try: + query = """ + SELECT + COUNT(*) as total_items, + COUNT(DISTINCT m.original_description) as unique_descriptions, + COUNT(DISTINCT m.size_spec) as unique_sizes, + COUNT(DISTINCT m.material_grade) as unique_materials, + SUM(m.quantity) as total_quantity, + AVG(m.quantity) as avg_quantity, + MIN(m.created_at) as earliest_upload, + MAX(m.created_at) as latest_upload + FROM materials m + LEFT JOIN files f ON m.file_id = f.id + WHERE 1=1 + """ + + params = {} + + if job_no: + query += " AND f.job_no = :job_no" + params["job_no"] = job_no + + if file_id: + query += " AND m.file_id = :file_id" + params["file_id"] = file_id + + result = db.execute(text(query), params) + summary = result.fetchone() + + return { + "success": True, + "summary": { + "total_items": summary.total_items, + "unique_descriptions": summary.unique_descriptions, + "unique_sizes": summary.unique_sizes, + "unique_materials": summary.unique_materials, + "total_quantity": float(summary.total_quantity) if summary.total_quantity else 0, + "avg_quantity": round(float(summary.avg_quantity), 2) if summary.avg_quantity else 0, + "earliest_upload": summary.earliest_upload, + "latest_upload": summary.latest_upload + } + } + + except Exception as e: + raise HTTPException(status_code=500, detail=f"요약 조회 실패: {str(e)}") +# Job 검증 함수 (파일 끝에 추가할 예정) +async def validate_job_exists(job_no: str, db: Session): + """Job 존재 여부 및 활성 상태 확인""" + try: + query = text("SELECT job_no, job_name, status FROM jobs WHERE job_no = :job_no AND is_active = true") + job = db.execute(query, {"job_no": job_no}).fetchone() + + if not job: + return {"valid": False, "error": f"Job No. '{job_no}'를 찾을 수 없습니다"} + + if job.status == '완료': + return {"valid": False, "error": f"완료된 Job '{job.job_name}'에는 파일을 업로드할 수 없습니다"} + + return { + "valid": True, + "job": { + "job_no": job.job_no, + "job_name": job.job_name, + "status": job.status + } + } + + except Exception as e: + return {"valid": False, "error": f"Job 검증 실패: {str(e)}"} diff --git a/backend/app/routers/files.py.backup2 b/backend/app/routers/files.py.backup2 new file mode 100644 index 0000000..3af406b --- /dev/null +++ b/backend/app/routers/files.py.backup2 @@ -0,0 +1,399 @@ +from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form +from sqlalchemy.orm import Session +from sqlalchemy import text +from typing import List, Optional +import os +import shutil +from datetime import datetime +import uuid +import pandas as pd +import re +from pathlib import Path + +from ..database import get_db + +router = APIRouter() + +UPLOAD_DIR = Path("uploads") +UPLOAD_DIR.mkdir(exist_ok=True) +ALLOWED_EXTENSIONS = {".xlsx", ".xls", ".csv"} + +@router.get("/") +async def get_files_info(): + return { + "message": "파일 관리 API", + "allowed_extensions": list(ALLOWED_EXTENSIONS), + "upload_directory": str(UPLOAD_DIR) + } + +@router.get("/test") +async def test_endpoint(): + return {"status": "파일 API가 정상 작동합니다!"} + +@router.post("/add-missing-columns") +async def add_missing_columns(db: Session = Depends(get_db)): + """누락된 컬럼들 추가""" + try: + db.execute(text("ALTER TABLE files ADD COLUMN IF NOT EXISTS parsed_count INTEGER DEFAULT 0")) + db.execute(text("ALTER TABLE materials ADD COLUMN IF NOT EXISTS row_number INTEGER")) + db.commit() + + return { + "success": True, + "message": "누락된 컬럼들이 추가되었습니다", + "added_columns": ["files.parsed_count", "materials.row_number"] + } + except Exception as e: + db.rollback() + return {"success": False, "error": f"컬럼 추가 실패: {str(e)}"} + +def validate_file_extension(filename: str) -> bool: + return Path(filename).suffix.lower() in ALLOWED_EXTENSIONS + +def generate_unique_filename(original_filename: str) -> str: + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + unique_id = str(uuid.uuid4())[:8] + stem = Path(original_filename).stem + suffix = Path(original_filename).suffix + return f"{stem}_{timestamp}_{unique_id}{suffix}" + +def parse_dataframe(df): + df = df.dropna(how='all') + df.columns = df.columns.str.strip().str.lower() + + column_mapping = { + 'description': ['description', 'item', 'material', '품명', '자재명'], + 'quantity': ['qty', 'quantity', 'ea', '수량'], + 'main_size': ['main_nom', 'nominal_diameter', 'nd', '주배관'], + 'red_size': ['red_nom', 'reduced_diameter', '축소배관'], + 'length': ['length', 'len', '길이'], + 'weight': ['weight', 'wt', '중량'], + 'dwg_name': ['dwg_name', 'drawing', '도면명'], + 'line_num': ['line_num', 'line_number', '라인번호'] + } + + mapped_columns = {} + for standard_col, possible_names in column_mapping.items(): + for possible_name in possible_names: + if possible_name in df.columns: + mapped_columns[standard_col] = possible_name + break + + materials = [] + for index, row in df.iterrows(): + description = str(row.get(mapped_columns.get('description', ''), '')) + quantity_raw = row.get(mapped_columns.get('quantity', ''), 0) + + try: + quantity = float(quantity_raw) if pd.notna(quantity_raw) else 0 + except: + quantity = 0 + + material_grade = "" + if "ASTM" in description.upper(): + astm_match = re.search(r'ASTM\s+([A-Z0-9\s]+)', description.upper()) + if astm_match: + material_grade = astm_match.group(0).strip() + + main_size = str(row.get(mapped_columns.get('main_size', ''), '')) + red_size = str(row.get(mapped_columns.get('red_size', ''), '')) + + if main_size != 'nan' and red_size != 'nan' and red_size != '': + size_spec = f"{main_size} x {red_size}" + elif main_size != 'nan' and main_size != '': + size_spec = main_size + else: + size_spec = "" + + if description and description not in ['nan', 'None', '']: + materials.append({ + 'original_description': description, + 'quantity': quantity, + 'unit': "EA", + 'size_spec': size_spec, + 'material_grade': material_grade, + 'line_number': index + 1, + 'row_number': index + 1 + }) + + return materials + +def parse_file_data(file_path): + file_extension = Path(file_path).suffix.lower() + + try: + if file_extension == ".csv": + df = pd.read_csv(file_path, encoding='utf-8') + elif file_extension in [".xlsx", ".xls"]: + df = pd.read_excel(file_path, sheet_name=0) + else: + raise HTTPException(status_code=400, detail="지원하지 않는 파일 형식") + + return parse_dataframe(df) + except Exception as e: + raise HTTPException(status_code=400, detail=f"파일 파싱 실패: {str(e)}") + +@router.post("/upload") +async def upload_file( + file: UploadFile = File(...), + job_no: str = Form(...), + revision: str = Form("Rev.0"), + db: Session = Depends(get_db) +): + if not validate_file_extension(file.filename): + raise HTTPException( + status_code=400, + detail=f"지원하지 않는 파일 형식입니다. 허용된 확장자: {', '.join(ALLOWED_EXTENSIONS)}" + ) + + if file.size and file.size > 10 * 1024 * 1024: + raise HTTPException(status_code=400, detail="파일 크기는 10MB를 초과할 수 없습니다") + + unique_filename = generate_unique_filename(file.filename) + file_path = UPLOAD_DIR / unique_filename + + try: + with open(file_path, "wb") as buffer: + shutil.copyfileobj(file.file, buffer) + except Exception as e: + raise HTTPException(status_code=500, detail=f"파일 저장 실패: {str(e)}") + + try: + materials_data = parse_file_data(str(file_path)) + parsed_count = len(materials_data) + + # 파일 정보 저장 + file_insert_query = text(""" + INSERT INTO files (filename, original_filename, file_path, job_no, revision, description, file_size, parsed_count, is_active) + VALUES (:filename, :original_filename, :file_path, :job_no, :revision, :description, :file_size, :parsed_count, :is_active) + RETURNING id + """) + + file_result = db.execute(file_insert_query, { + "filename": unique_filename, + "original_filename": file.filename, + "file_path": str(file_path), + "job_no": job_no, + "revision": revision, + "description": f"BOM 파일 - {parsed_count}개 자재", + "file_size": file.size, + "parsed_count": parsed_count, + "is_active": True + }) + + file_id = file_result.fetchone()[0] + + # 자재 데이터 저장 + materials_inserted = 0 + for material_data in materials_data: + material_insert_query = text(""" + INSERT INTO materials ( + file_id, original_description, quantity, unit, size_spec, + material_grade, line_number, row_number, classified_category, + classification_confidence, is_verified, created_at + ) + VALUES ( + :file_id, :original_description, :quantity, :unit, :size_spec, + :material_grade, :line_number, :row_number, :classified_category, + :classification_confidence, :is_verified, :created_at + ) + """) + + db.execute(material_insert_query, { + "file_id": file_id, + "original_description": material_data["original_description"], + "quantity": material_data["quantity"], + "unit": material_data["unit"], + "size_spec": material_data["size_spec"], + "material_grade": material_data["material_grade"], + "line_number": material_data["line_number"], + "row_number": material_data["row_number"], + "classified_category": None, + "classification_confidence": None, + "is_verified": False, + "created_at": datetime.now() + }) + materials_inserted += 1 + + db.commit() + + return { + "success": True, + "message": f"완전한 DB 저장 성공! {materials_inserted}개 자재 저장됨", + "original_filename": file.filename, + "file_id": file_id, + "parsed_materials_count": parsed_count, + "saved_materials_count": materials_inserted, + "sample_materials": materials_data[:3] if materials_data else [] + } + + except Exception as e: + db.rollback() + if os.path.exists(file_path): + os.remove(file_path) + raise HTTPException(status_code=500, detail=f"파일 처리 실패: {str(e)}") +@router.get("/materials") +async def get_materials( + job_no: Optional[str] = None, + file_id: Optional[str] = None, + skip: int = 0, + limit: int = 100, + db: Session = Depends(get_db) +): + """저장된 자재 목록 조회""" + try: + query = """ + SELECT m.id, m.file_id, m.original_description, m.quantity, m.unit, + m.size_spec, m.material_grade, m.line_number, m.row_number, + m.created_at, + f.original_filename, f.job_no, + j.job_no, j.job_name + FROM materials m + LEFT JOIN files f ON m.file_id = f.id + LEFT JOIN jobs j ON f.job_no = j.job_no + WHERE 1=1 + """ + + params = {} + + if job_no: + query += " AND f.job_no = :job_no" + params["job_no"] = job_no + + if file_id: + query += " AND m.file_id = :file_id" + params["file_id"] = file_id + + query += " ORDER BY m.line_number ASC LIMIT :limit OFFSET :skip" + params["limit"] = limit + params["skip"] = skip + + result = db.execute(text(query), params) + materials = result.fetchall() + + # 전체 개수 조회 + count_query = """ + SELECT COUNT(*) as total + FROM materials m + LEFT JOIN files f ON m.file_id = f.id + WHERE 1=1 + """ + count_params = {} + + if job_no: + count_query += " AND f.job_no = :job_no" + count_params["job_no"] = job_no + + if file_id: + count_query += " AND m.file_id = :file_id" + count_params["file_id"] = file_id + + count_result = db.execute(text(count_query), count_params) + total_count = count_result.fetchone()[0] + + return { + "success": True, + "total_count": total_count, + "returned_count": len(materials), + "skip": skip, + "limit": limit, + "materials": [ + { + "id": m.id, + "file_id": m.file_id, + "filename": m.original_filename, + "job_no": m.job_no, + "project_code": m.official_project_code, + "project_name": m.project_name, + "original_description": m.original_description, + "quantity": float(m.quantity) if m.quantity else 0, + "unit": m.unit, + "size_spec": m.size_spec, + "material_grade": m.material_grade, + "line_number": m.line_number, + "row_number": m.row_number, + "created_at": m.created_at + } + for m in materials + ] + } + + except Exception as e: + raise HTTPException(status_code=500, detail=f"자재 조회 실패: {str(e)}") + +@router.get("/materials/summary") +async def get_materials_summary( + job_no: Optional[str] = None, + file_id: Optional[str] = None, + db: Session = Depends(get_db) +): + """자재 요약 통계""" + try: + query = """ + SELECT + COUNT(*) as total_items, + COUNT(DISTINCT m.original_description) as unique_descriptions, + COUNT(DISTINCT m.size_spec) as unique_sizes, + COUNT(DISTINCT m.material_grade) as unique_materials, + SUM(m.quantity) as total_quantity, + AVG(m.quantity) as avg_quantity, + MIN(m.created_at) as earliest_upload, + MAX(m.created_at) as latest_upload + FROM materials m + LEFT JOIN files f ON m.file_id = f.id + WHERE 1=1 + """ + + params = {} + + if job_no: + query += " AND f.job_no = :job_no" + params["job_no"] = job_no + + if file_id: + query += " AND m.file_id = :file_id" + params["file_id"] = file_id + + result = db.execute(text(query), params) + summary = result.fetchone() + + return { + "success": True, + "summary": { + "total_items": summary.total_items, + "unique_descriptions": summary.unique_descriptions, + "unique_sizes": summary.unique_sizes, + "unique_materials": summary.unique_materials, + "total_quantity": float(summary.total_quantity) if summary.total_quantity else 0, + "avg_quantity": round(float(summary.avg_quantity), 2) if summary.avg_quantity else 0, + "earliest_upload": summary.earliest_upload, + "latest_upload": summary.latest_upload + } + } + + except Exception as e: + raise HTTPException(status_code=500, detail=f"요약 조회 실패: {str(e)}") +# Job 검증 함수 (파일 끝에 추가할 예정) +async def validate_job_exists(job_no: str, db: Session): + """Job 존재 여부 및 활성 상태 확인""" + try: + query = text("SELECT job_no, job_name, status FROM jobs WHERE job_no = :job_no AND is_active = true") + job = db.execute(query, {"job_no": job_no}).fetchone() + + if not job: + return {"valid": False, "error": f"Job No. '{job_no}'를 찾을 수 없습니다"} + + if job.status == '완료': + return {"valid": False, "error": f"완료된 Job '{job.job_name}'에는 파일을 업로드할 수 없습니다"} + + return { + "valid": True, + "job": { + "job_no": job.job_no, + "job_name": job.job_name, + "status": job.status + } + } + + except Exception as e: + return {"valid": False, "error": f"Job 검증 실패: {str(e)}"} diff --git a/backend/app/routers/jobs.py b/backend/app/routers/jobs.py new file mode 100644 index 0000000..779f573 --- /dev/null +++ b/backend/app/routers/jobs.py @@ -0,0 +1,176 @@ +from fastapi import APIRouter, Depends, HTTPException, Query +from sqlalchemy.orm import Session +from sqlalchemy import text +from typing import Optional +from datetime import datetime, date +from pydantic import BaseModel + +from ..database import get_db + +router = APIRouter() + +# Pydantic 모델들 +class JobCreate(BaseModel): + job_no: str + job_name: str + client_name: str + end_user: Optional[str] = None + epc_company: Optional[str] = None + project_site: Optional[str] = None + contract_date: Optional[date] = None + delivery_date: Optional[date] = None + delivery_terms: Optional[str] = None + description: Optional[str] = None + +@router.get("/") +async def get_jobs( + skip: int = Query(0, ge=0), + limit: int = Query(100, ge=1, le=1000), + search: Optional[str] = Query(None), + db: Session = Depends(get_db) +): + """Job 목록 조회""" + try: + query = """ + SELECT job_no, job_name, client_name, end_user, epc_company, + project_site, contract_date, delivery_date, delivery_terms, + status, description, created_by, created_at, updated_at, is_active + FROM jobs + WHERE is_active = true + """ + + params = {} + + if search: + query += " AND (job_no ILIKE :search OR job_name ILIKE :search OR client_name ILIKE :search)" + params["search"] = f"%{search}%" + + query += " ORDER BY created_at DESC LIMIT :limit OFFSET :skip" + params["limit"] = limit + params["skip"] = skip + + result = db.execute(text(query), params) + jobs = result.fetchall() + + return { + "success": True, + "total_count": len(jobs), + "jobs": [ + { + "job_no": job.job_no, + "job_name": job.job_name, + "client_name": job.client_name, + "end_user": job.end_user, + "epc_company": job.epc_company, + "project_site": job.project_site, + "contract_date": job.contract_date, + "delivery_date": job.delivery_date, + "delivery_terms": job.delivery_terms, + "status": job.status, + "description": job.description, + "created_at": job.created_at + } + for job in jobs + ] + } + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Job 목록 조회 실패: {str(e)}") + +@router.get("/{job_no}") +async def get_job(job_no: str, db: Session = Depends(get_db)): + """Job 상세 정보 조회""" + try: + query = text(""" + SELECT job_no, job_name, client_name, end_user, epc_company, + project_site, contract_date, delivery_date, delivery_terms, + status, description, created_by, created_at, updated_at, is_active + FROM jobs + WHERE job_no = :job_no AND is_active = true + """) + + result = db.execute(query, {"job_no": job_no}) + job = result.fetchone() + + if not job: + raise HTTPException(status_code=404, detail="Job을 찾을 수 없습니다") + + return { + "success": True, + "job": { + "job_no": job.job_no, + "job_name": job.job_name, + "client_name": job.client_name, + "end_user": job.end_user, + "epc_company": job.epc_company, + "project_site": job.project_site, + "contract_date": job.contract_date, + "delivery_date": job.delivery_date, + "delivery_terms": job.delivery_terms, + "status": job.status, + "description": job.description, + "created_by": job.created_by, + "created_at": job.created_at + } + } + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Job 조회 실패: {str(e)}") + +@router.post("/") +async def create_job(job: JobCreate, db: Session = Depends(get_db)): + """새 Job 생성""" + try: + # Job No. 중복 확인 + check_query = text("SELECT job_no FROM jobs WHERE job_no = :job_no") + existing = db.execute(check_query, {"job_no": job.job_no}).fetchone() + + if existing: + raise HTTPException( + status_code=400, + detail=f"Job No. '{job.job_no}'가 이미 존재합니다" + ) + + # 새 Job 생성 + insert_query = text(""" + INSERT INTO jobs ( + job_no, job_name, client_name, end_user, epc_company, + project_site, contract_date, delivery_date, delivery_terms, + description, created_by, status, is_active + ) + VALUES ( + :job_no, :job_name, :client_name, :end_user, :epc_company, + :project_site, :contract_date, :delivery_date, :delivery_terms, + :description, :created_by, :status, :is_active + ) + RETURNING job_no, job_name, client_name + """) + + result = db.execute(insert_query, { + **job.dict(), + "created_by": "admin", + "status": "진행중", + "is_active": True + }) + + new_job = result.fetchone() + db.commit() + + return { + "success": True, + "message": "Job이 성공적으로 생성되었습니다", + "job": { + "job_no": new_job.job_no, + "job_name": new_job.job_name, + "client_name": new_job.client_name + } + } + + except HTTPException: + db.rollback() + raise + except Exception as e: + db.rollback() + raise HTTPException(status_code=500, detail=f"Job 생성 실패: {str(e)}") diff --git a/backend/test_bom.csv b/backend/test_bom.csv new file mode 100644 index 0000000..15b983f --- /dev/null +++ b/backend/test_bom.csv @@ -0,0 +1,5 @@ +Description,Quantity,Unit,Size +"PIPE ASTM A106 GR.B",10,EA,4" +"ELBOW 90° ASTM A234",5,EA,4" +"VALVE GATE ASTM A216",2,EA,4" +"FLANGE WELD NECK",8,EA,4"