Compare commits
2 Commits
cddccccf50
...
c9e0d90de4
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c9e0d90de4 | ||
|
|
ffe4f0f969 |
@@ -1,25 +1,14 @@
|
||||
from fastapi import FastAPI, Depends, HTTPException
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import text
|
||||
from typing import List
|
||||
from datetime import datetime
|
||||
|
||||
from .database import get_db, engine
|
||||
from .models import Base, Project
|
||||
from .schemas import ProjectCreate, ProjectResponse
|
||||
from .api import files
|
||||
|
||||
Base.metadata.create_all(bind=engine)
|
||||
|
||||
# FastAPI 앱 생성
|
||||
app = FastAPI(
|
||||
title="TK-MP-Project API",
|
||||
description="BOM 시스템 개발 프로젝트 - Phase 3: 파일 처리 시스템",
|
||||
version="1.0.0",
|
||||
docs_url="/docs",
|
||||
redoc_url="/redoc"
|
||||
title="TK-MP BOM Management API",
|
||||
description="자재 분류 및 프로젝트 관리 시스템",
|
||||
version="1.0.0"
|
||||
)
|
||||
|
||||
# CORS 설정
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"],
|
||||
@@ -28,100 +17,30 @@ app.add_middleware(
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
app.include_router(files.router, prefix="/api/files", tags=["파일 관리"])
|
||||
# 라우터들 import 및 등록
|
||||
try:
|
||||
from .routers import files
|
||||
app.include_router(files.router, prefix="/files", tags=["files"])
|
||||
except ImportError:
|
||||
print("files 라우터를 찾을 수 없습니다")
|
||||
|
||||
try:
|
||||
from .routers import jobs
|
||||
app.include_router(jobs.router, prefix="/jobs", tags=["jobs"])
|
||||
except ImportError:
|
||||
print("jobs 라우터를 찾을 수 없습니다")
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return {
|
||||
"message": "TK-MP-Project API Server",
|
||||
"version": "1.0.0 - Phase 3",
|
||||
"status": "running",
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"new_features": [
|
||||
"✅ Phase 1: 기반 시스템 구축",
|
||||
"✅ Phase 2: 데이터베이스 연동",
|
||||
"🔄 Phase 3: 파일 처리 시스템 개발 중"
|
||||
]
|
||||
"message": "TK-MP BOM Management API",
|
||||
"version": "1.0.0",
|
||||
"endpoints": ["/docs", "/jobs", "/files"]
|
||||
}
|
||||
|
||||
@app.get("/health")
|
||||
async def health_check(db: Session = Depends(get_db)):
|
||||
try:
|
||||
result = db.execute(text("SELECT 1 as test"))
|
||||
test_value = result.fetchone()[0]
|
||||
|
||||
return {
|
||||
"status": "healthy",
|
||||
"database": "connected",
|
||||
"test_query": test_value == 1,
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"phase": "Phase 3 - 파일 처리 시스템"
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"데이터베이스 연결 실패: {str(e)}")
|
||||
|
||||
@app.get("/api/projects", response_model=List[ProjectResponse])
|
||||
async def get_projects(db: Session = Depends(get_db)):
|
||||
try:
|
||||
result = db.execute(text("""
|
||||
SELECT id, official_project_code, project_name, design_project_code,
|
||||
is_code_matched, status, created_at, updated_at
|
||||
FROM projects
|
||||
ORDER BY created_at DESC
|
||||
"""))
|
||||
projects = result.fetchall()
|
||||
|
||||
return [
|
||||
ProjectResponse(
|
||||
id=project.id,
|
||||
official_project_code=project.official_project_code,
|
||||
project_name=project.project_name,
|
||||
design_project_code=project.design_project_code,
|
||||
is_code_matched=project.is_code_matched,
|
||||
status=project.status,
|
||||
created_at=project.created_at,
|
||||
updated_at=project.updated_at
|
||||
)
|
||||
for project in projects
|
||||
]
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"프로젝트 조회 실패: {str(e)}")
|
||||
|
||||
@app.post("/api/projects", response_model=ProjectResponse)
|
||||
async def create_project(project: ProjectCreate, db: Session = Depends(get_db)):
|
||||
try:
|
||||
insert_query = text("""
|
||||
INSERT INTO projects (official_project_code, project_name, design_project_code, is_code_matched, status, created_at)
|
||||
VALUES (:official_code, :project_name, :design_code, :is_matched, :status, :created_at)
|
||||
RETURNING id, official_project_code, project_name, design_project_code, is_code_matched, status, created_at, updated_at
|
||||
""")
|
||||
|
||||
result = db.execute(insert_query, {
|
||||
"official_code": project.official_project_code,
|
||||
"project_name": project.project_name,
|
||||
"design_code": project.design_project_code,
|
||||
"is_matched": project.is_code_matched,
|
||||
"status": project.status,
|
||||
"created_at": datetime.now()
|
||||
})
|
||||
|
||||
new_project = result.fetchone()
|
||||
db.commit()
|
||||
|
||||
return ProjectResponse(
|
||||
id=new_project.id,
|
||||
official_project_code=new_project.official_project_code,
|
||||
project_name=new_project.project_name,
|
||||
design_project_code=new_project.design_project_code,
|
||||
is_code_matched=new_project.is_code_matched,
|
||||
status=new_project.status,
|
||||
created_at=new_project.created_at,
|
||||
updated_at=new_project.updated_at
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"프로젝트 생성 실패: {str(e)}")
|
||||
async def health_check():
|
||||
return {"status": "healthy", "timestamp": "2024-07-15"}
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
||||
0
backend/app/routers/__init__.py
Normal file
0
backend/app/routers/__init__.py
Normal file
327
backend/app/routers/files.py
Normal file
327
backend/app/routers/files.py
Normal file
@@ -0,0 +1,327 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import text
|
||||
from typing import List, Optional
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
import pandas as pd
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
from ..database import get_db
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
UPLOAD_DIR = Path("uploads")
|
||||
UPLOAD_DIR.mkdir(exist_ok=True)
|
||||
ALLOWED_EXTENSIONS = {".xlsx", ".xls", ".csv"}
|
||||
|
||||
@router.get("/")
|
||||
async def get_files_info():
|
||||
return {
|
||||
"message": "파일 관리 API",
|
||||
"allowed_extensions": list(ALLOWED_EXTENSIONS),
|
||||
"upload_directory": str(UPLOAD_DIR)
|
||||
}
|
||||
|
||||
@router.get("/test")
|
||||
async def test_endpoint():
|
||||
return {"status": "파일 API가 정상 작동합니다!"}
|
||||
|
||||
@router.post("/add-missing-columns")
|
||||
async def add_missing_columns(db: Session = Depends(get_db)):
|
||||
"""누락된 컬럼들 추가"""
|
||||
try:
|
||||
db.execute(text("ALTER TABLE files ADD COLUMN IF NOT EXISTS parsed_count INTEGER DEFAULT 0"))
|
||||
db.execute(text("ALTER TABLE materials ADD COLUMN IF NOT EXISTS row_number INTEGER"))
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "누락된 컬럼들이 추가되었습니다",
|
||||
"added_columns": ["files.parsed_count", "materials.row_number"]
|
||||
}
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
return {"success": False, "error": f"컬럼 추가 실패: {str(e)}"}
|
||||
|
||||
def validate_file_extension(filename: str) -> bool:
|
||||
return Path(filename).suffix.lower() in ALLOWED_EXTENSIONS
|
||||
|
||||
def generate_unique_filename(original_filename: str) -> str:
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
unique_id = str(uuid.uuid4())[:8]
|
||||
stem = Path(original_filename).stem
|
||||
suffix = Path(original_filename).suffix
|
||||
return f"{stem}_{timestamp}_{unique_id}{suffix}"
|
||||
|
||||
def parse_dataframe(df):
|
||||
df = df.dropna(how='all')
|
||||
df.columns = df.columns.str.strip().str.lower()
|
||||
|
||||
column_mapping = {
|
||||
'description': ['description', 'item', 'material', '품명', '자재명'],
|
||||
'quantity': ['qty', 'quantity', 'ea', '수량'],
|
||||
'main_size': ['main_nom', 'nominal_diameter', 'nd', '주배관'],
|
||||
'red_size': ['red_nom', 'reduced_diameter', '축소배관'],
|
||||
'length': ['length', 'len', '길이'],
|
||||
'weight': ['weight', 'wt', '중량'],
|
||||
'dwg_name': ['dwg_name', 'drawing', '도면명'],
|
||||
'line_num': ['line_num', 'line_number', '라인번호']
|
||||
}
|
||||
|
||||
mapped_columns = {}
|
||||
for standard_col, possible_names in column_mapping.items():
|
||||
for possible_name in possible_names:
|
||||
if possible_name in df.columns:
|
||||
mapped_columns[standard_col] = possible_name
|
||||
break
|
||||
|
||||
materials = []
|
||||
for index, row in df.iterrows():
|
||||
description = str(row.get(mapped_columns.get('description', ''), ''))
|
||||
quantity_raw = row.get(mapped_columns.get('quantity', ''), 0)
|
||||
|
||||
try:
|
||||
quantity = float(quantity_raw) if pd.notna(quantity_raw) else 0
|
||||
except:
|
||||
quantity = 0
|
||||
|
||||
material_grade = ""
|
||||
if "ASTM" in description.upper():
|
||||
astm_match = re.search(r'ASTM\s+([A-Z0-9\s]+)', description.upper())
|
||||
if astm_match:
|
||||
material_grade = astm_match.group(0).strip()
|
||||
|
||||
main_size = str(row.get(mapped_columns.get('main_size', ''), ''))
|
||||
red_size = str(row.get(mapped_columns.get('red_size', ''), ''))
|
||||
|
||||
if main_size != 'nan' and red_size != 'nan' and red_size != '':
|
||||
size_spec = f"{main_size} x {red_size}"
|
||||
elif main_size != 'nan' and main_size != '':
|
||||
size_spec = main_size
|
||||
else:
|
||||
size_spec = ""
|
||||
|
||||
if description and description not in ['nan', 'None', '']:
|
||||
materials.append({
|
||||
'original_description': description,
|
||||
'quantity': quantity,
|
||||
'unit': "EA",
|
||||
'size_spec': size_spec,
|
||||
'material_grade': material_grade,
|
||||
'line_number': index + 1,
|
||||
'row_number': index + 1
|
||||
})
|
||||
|
||||
return materials
|
||||
|
||||
def parse_file_data(file_path):
|
||||
file_extension = Path(file_path).suffix.lower()
|
||||
|
||||
try:
|
||||
if file_extension == ".csv":
|
||||
df = pd.read_csv(file_path, encoding='utf-8')
|
||||
elif file_extension in [".xlsx", ".xls"]:
|
||||
df = pd.read_excel(file_path, sheet_name=0)
|
||||
else:
|
||||
raise HTTPException(status_code=400, detail="지원하지 않는 파일 형식")
|
||||
|
||||
return parse_dataframe(df)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"파일 파싱 실패: {str(e)}")
|
||||
|
||||
@router.post("/upload")
|
||||
async def upload_file(
|
||||
file: UploadFile = File(...),
|
||||
job_no: str = Form(...),
|
||||
revision: str = Form("Rev.0"),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
# 1. Job 검증
|
||||
job_validation = await validate_job_exists(job_no, db)
|
||||
if not job_validation["valid"]:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Job 오류: {job_validation['error']}"
|
||||
)
|
||||
|
||||
job_info = job_validation["job"]
|
||||
|
||||
# 2. 파일 검증
|
||||
if not validate_file_extension(file.filename):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"지원하지 않는 파일 형식입니다. 허용된 확장자: {', '.join(ALLOWED_EXTENSIONS)}"
|
||||
)
|
||||
|
||||
if file.size and file.size > 10 * 1024 * 1024:
|
||||
raise HTTPException(status_code=400, detail="파일 크기는 10MB를 초과할 수 없습니다")
|
||||
|
||||
# 3. 파일 저장
|
||||
unique_filename = generate_unique_filename(file.filename)
|
||||
file_path = UPLOAD_DIR / unique_filename
|
||||
|
||||
try:
|
||||
with open(file_path, "wb") as buffer:
|
||||
shutil.copyfileobj(file.file, buffer)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"파일 저장 실패: {str(e)}")
|
||||
|
||||
# 4. 파일 파싱 및 자재 추출
|
||||
try:
|
||||
materials_data = parse_file_data(str(file_path))
|
||||
parsed_count = len(materials_data)
|
||||
|
||||
# 파일 정보 저장
|
||||
file_insert_query = text("""
|
||||
INSERT INTO files (filename, original_filename, file_path, job_no, revision, description, file_size, parsed_count, is_active)
|
||||
VALUES (:filename, :original_filename, :file_path, :job_no, :revision, :description, :file_size, :parsed_count, :is_active)
|
||||
RETURNING id
|
||||
""")
|
||||
|
||||
file_result = db.execute(file_insert_query, {
|
||||
"filename": unique_filename,
|
||||
"original_filename": file.filename,
|
||||
"file_path": str(file_path),
|
||||
"job_no": job_no,
|
||||
"revision": revision,
|
||||
"description": f"BOM 파일 - {parsed_count}개 자재 ({job_info['job_name']})",
|
||||
"file_size": file.size,
|
||||
"parsed_count": parsed_count,
|
||||
"is_active": True
|
||||
})
|
||||
|
||||
file_id = file_result.fetchone()[0]
|
||||
|
||||
# 자재 데이터 저장
|
||||
materials_inserted = 0
|
||||
for material_data in materials_data:
|
||||
material_insert_query = text("""
|
||||
INSERT INTO materials (
|
||||
file_id, original_description, quantity, unit, size_spec,
|
||||
material_grade, line_number, row_number, classified_category,
|
||||
classification_confidence, is_verified, created_at
|
||||
)
|
||||
VALUES (
|
||||
:file_id, :original_description, :quantity, :unit, :size_spec,
|
||||
:material_grade, :line_number, :row_number, :classified_category,
|
||||
:classification_confidence, :is_verified, :created_at
|
||||
)
|
||||
""")
|
||||
|
||||
db.execute(material_insert_query, {
|
||||
"file_id": file_id,
|
||||
"original_description": material_data["original_description"],
|
||||
"quantity": material_data["quantity"],
|
||||
"unit": material_data["unit"],
|
||||
"size_spec": material_data["size_spec"],
|
||||
"material_grade": material_data["material_grade"],
|
||||
"line_number": material_data["line_number"],
|
||||
"row_number": material_data["row_number"],
|
||||
"classified_category": None,
|
||||
"classification_confidence": None,
|
||||
"is_verified": False,
|
||||
"created_at": datetime.now()
|
||||
})
|
||||
materials_inserted += 1
|
||||
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Job '{job_info['job_name']}'에 BOM 파일 업로드 완료!",
|
||||
"job": job_info,
|
||||
"file": {
|
||||
"id": file_id,
|
||||
"original_filename": file.filename,
|
||||
"parsed_count": parsed_count,
|
||||
"saved_count": materials_inserted
|
||||
},
|
||||
"sample_materials": materials_data[:3] if materials_data else []
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
raise HTTPException(status_code=500, detail=f"파일 처리 실패: {str(e)}")
|
||||
|
||||
@router.get("/materials/summary")
|
||||
async def get_materials_summary(
|
||||
job_no: Optional[str] = None,
|
||||
file_id: Optional[str] = None,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""자재 요약 통계"""
|
||||
try:
|
||||
query = """
|
||||
SELECT
|
||||
COUNT(*) as total_items,
|
||||
COUNT(DISTINCT m.original_description) as unique_descriptions,
|
||||
COUNT(DISTINCT m.size_spec) as unique_sizes,
|
||||
COUNT(DISTINCT m.material_grade) as unique_materials,
|
||||
SUM(m.quantity) as total_quantity,
|
||||
AVG(m.quantity) as avg_quantity,
|
||||
MIN(m.created_at) as earliest_upload,
|
||||
MAX(m.created_at) as latest_upload
|
||||
FROM materials m
|
||||
LEFT JOIN files f ON m.file_id = f.id
|
||||
WHERE 1=1
|
||||
"""
|
||||
|
||||
params = {}
|
||||
|
||||
if job_no:
|
||||
query += " AND f.job_no = :job_no"
|
||||
params["job_no"] = job_no
|
||||
|
||||
if file_id:
|
||||
query += " AND m.file_id = :file_id"
|
||||
params["file_id"] = file_id
|
||||
|
||||
result = db.execute(text(query), params)
|
||||
summary = result.fetchone()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"summary": {
|
||||
"total_items": summary.total_items,
|
||||
"unique_descriptions": summary.unique_descriptions,
|
||||
"unique_sizes": summary.unique_sizes,
|
||||
"unique_materials": summary.unique_materials,
|
||||
"total_quantity": float(summary.total_quantity) if summary.total_quantity else 0,
|
||||
"avg_quantity": round(float(summary.avg_quantity), 2) if summary.avg_quantity else 0,
|
||||
"earliest_upload": summary.earliest_upload,
|
||||
"latest_upload": summary.latest_upload
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"요약 조회 실패: {str(e)}")
|
||||
# Job 검증 함수 (파일 끝에 추가할 예정)
|
||||
async def validate_job_exists(job_no: str, db: Session):
|
||||
"""Job 존재 여부 및 활성 상태 확인"""
|
||||
try:
|
||||
query = text("SELECT job_no, job_name, status FROM jobs WHERE job_no = :job_no AND is_active = true")
|
||||
job = db.execute(query, {"job_no": job_no}).fetchone()
|
||||
|
||||
if not job:
|
||||
return {"valid": False, "error": f"Job No. '{job_no}'를 찾을 수 없습니다"}
|
||||
|
||||
if job.status == '완료':
|
||||
return {"valid": False, "error": f"완료된 Job '{job.job_name}'에는 파일을 업로드할 수 없습니다"}
|
||||
|
||||
return {
|
||||
"valid": True,
|
||||
"job": {
|
||||
"job_no": job.job_no,
|
||||
"job_name": job.job_name,
|
||||
"status": job.status
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
return {"valid": False, "error": f"Job 검증 실패: {str(e)}"}
|
||||
399
backend/app/routers/files.py.backup2
Normal file
399
backend/app/routers/files.py.backup2
Normal file
@@ -0,0 +1,399 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Form
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import text
|
||||
from typing import List, Optional
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
import uuid
|
||||
import pandas as pd
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
from ..database import get_db
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
UPLOAD_DIR = Path("uploads")
|
||||
UPLOAD_DIR.mkdir(exist_ok=True)
|
||||
ALLOWED_EXTENSIONS = {".xlsx", ".xls", ".csv"}
|
||||
|
||||
@router.get("/")
|
||||
async def get_files_info():
|
||||
return {
|
||||
"message": "파일 관리 API",
|
||||
"allowed_extensions": list(ALLOWED_EXTENSIONS),
|
||||
"upload_directory": str(UPLOAD_DIR)
|
||||
}
|
||||
|
||||
@router.get("/test")
|
||||
async def test_endpoint():
|
||||
return {"status": "파일 API가 정상 작동합니다!"}
|
||||
|
||||
@router.post("/add-missing-columns")
|
||||
async def add_missing_columns(db: Session = Depends(get_db)):
|
||||
"""누락된 컬럼들 추가"""
|
||||
try:
|
||||
db.execute(text("ALTER TABLE files ADD COLUMN IF NOT EXISTS parsed_count INTEGER DEFAULT 0"))
|
||||
db.execute(text("ALTER TABLE materials ADD COLUMN IF NOT EXISTS row_number INTEGER"))
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "누락된 컬럼들이 추가되었습니다",
|
||||
"added_columns": ["files.parsed_count", "materials.row_number"]
|
||||
}
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
return {"success": False, "error": f"컬럼 추가 실패: {str(e)}"}
|
||||
|
||||
def validate_file_extension(filename: str) -> bool:
|
||||
return Path(filename).suffix.lower() in ALLOWED_EXTENSIONS
|
||||
|
||||
def generate_unique_filename(original_filename: str) -> str:
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
unique_id = str(uuid.uuid4())[:8]
|
||||
stem = Path(original_filename).stem
|
||||
suffix = Path(original_filename).suffix
|
||||
return f"{stem}_{timestamp}_{unique_id}{suffix}"
|
||||
|
||||
def parse_dataframe(df):
|
||||
df = df.dropna(how='all')
|
||||
df.columns = df.columns.str.strip().str.lower()
|
||||
|
||||
column_mapping = {
|
||||
'description': ['description', 'item', 'material', '품명', '자재명'],
|
||||
'quantity': ['qty', 'quantity', 'ea', '수량'],
|
||||
'main_size': ['main_nom', 'nominal_diameter', 'nd', '주배관'],
|
||||
'red_size': ['red_nom', 'reduced_diameter', '축소배관'],
|
||||
'length': ['length', 'len', '길이'],
|
||||
'weight': ['weight', 'wt', '중량'],
|
||||
'dwg_name': ['dwg_name', 'drawing', '도면명'],
|
||||
'line_num': ['line_num', 'line_number', '라인번호']
|
||||
}
|
||||
|
||||
mapped_columns = {}
|
||||
for standard_col, possible_names in column_mapping.items():
|
||||
for possible_name in possible_names:
|
||||
if possible_name in df.columns:
|
||||
mapped_columns[standard_col] = possible_name
|
||||
break
|
||||
|
||||
materials = []
|
||||
for index, row in df.iterrows():
|
||||
description = str(row.get(mapped_columns.get('description', ''), ''))
|
||||
quantity_raw = row.get(mapped_columns.get('quantity', ''), 0)
|
||||
|
||||
try:
|
||||
quantity = float(quantity_raw) if pd.notna(quantity_raw) else 0
|
||||
except:
|
||||
quantity = 0
|
||||
|
||||
material_grade = ""
|
||||
if "ASTM" in description.upper():
|
||||
astm_match = re.search(r'ASTM\s+([A-Z0-9\s]+)', description.upper())
|
||||
if astm_match:
|
||||
material_grade = astm_match.group(0).strip()
|
||||
|
||||
main_size = str(row.get(mapped_columns.get('main_size', ''), ''))
|
||||
red_size = str(row.get(mapped_columns.get('red_size', ''), ''))
|
||||
|
||||
if main_size != 'nan' and red_size != 'nan' and red_size != '':
|
||||
size_spec = f"{main_size} x {red_size}"
|
||||
elif main_size != 'nan' and main_size != '':
|
||||
size_spec = main_size
|
||||
else:
|
||||
size_spec = ""
|
||||
|
||||
if description and description not in ['nan', 'None', '']:
|
||||
materials.append({
|
||||
'original_description': description,
|
||||
'quantity': quantity,
|
||||
'unit': "EA",
|
||||
'size_spec': size_spec,
|
||||
'material_grade': material_grade,
|
||||
'line_number': index + 1,
|
||||
'row_number': index + 1
|
||||
})
|
||||
|
||||
return materials
|
||||
|
||||
def parse_file_data(file_path):
|
||||
file_extension = Path(file_path).suffix.lower()
|
||||
|
||||
try:
|
||||
if file_extension == ".csv":
|
||||
df = pd.read_csv(file_path, encoding='utf-8')
|
||||
elif file_extension in [".xlsx", ".xls"]:
|
||||
df = pd.read_excel(file_path, sheet_name=0)
|
||||
else:
|
||||
raise HTTPException(status_code=400, detail="지원하지 않는 파일 형식")
|
||||
|
||||
return parse_dataframe(df)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"파일 파싱 실패: {str(e)}")
|
||||
|
||||
@router.post("/upload")
|
||||
async def upload_file(
|
||||
file: UploadFile = File(...),
|
||||
job_no: str = Form(...),
|
||||
revision: str = Form("Rev.0"),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
if not validate_file_extension(file.filename):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"지원하지 않는 파일 형식입니다. 허용된 확장자: {', '.join(ALLOWED_EXTENSIONS)}"
|
||||
)
|
||||
|
||||
if file.size and file.size > 10 * 1024 * 1024:
|
||||
raise HTTPException(status_code=400, detail="파일 크기는 10MB를 초과할 수 없습니다")
|
||||
|
||||
unique_filename = generate_unique_filename(file.filename)
|
||||
file_path = UPLOAD_DIR / unique_filename
|
||||
|
||||
try:
|
||||
with open(file_path, "wb") as buffer:
|
||||
shutil.copyfileobj(file.file, buffer)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"파일 저장 실패: {str(e)}")
|
||||
|
||||
try:
|
||||
materials_data = parse_file_data(str(file_path))
|
||||
parsed_count = len(materials_data)
|
||||
|
||||
# 파일 정보 저장
|
||||
file_insert_query = text("""
|
||||
INSERT INTO files (filename, original_filename, file_path, job_no, revision, description, file_size, parsed_count, is_active)
|
||||
VALUES (:filename, :original_filename, :file_path, :job_no, :revision, :description, :file_size, :parsed_count, :is_active)
|
||||
RETURNING id
|
||||
""")
|
||||
|
||||
file_result = db.execute(file_insert_query, {
|
||||
"filename": unique_filename,
|
||||
"original_filename": file.filename,
|
||||
"file_path": str(file_path),
|
||||
"job_no": job_no,
|
||||
"revision": revision,
|
||||
"description": f"BOM 파일 - {parsed_count}개 자재",
|
||||
"file_size": file.size,
|
||||
"parsed_count": parsed_count,
|
||||
"is_active": True
|
||||
})
|
||||
|
||||
file_id = file_result.fetchone()[0]
|
||||
|
||||
# 자재 데이터 저장
|
||||
materials_inserted = 0
|
||||
for material_data in materials_data:
|
||||
material_insert_query = text("""
|
||||
INSERT INTO materials (
|
||||
file_id, original_description, quantity, unit, size_spec,
|
||||
material_grade, line_number, row_number, classified_category,
|
||||
classification_confidence, is_verified, created_at
|
||||
)
|
||||
VALUES (
|
||||
:file_id, :original_description, :quantity, :unit, :size_spec,
|
||||
:material_grade, :line_number, :row_number, :classified_category,
|
||||
:classification_confidence, :is_verified, :created_at
|
||||
)
|
||||
""")
|
||||
|
||||
db.execute(material_insert_query, {
|
||||
"file_id": file_id,
|
||||
"original_description": material_data["original_description"],
|
||||
"quantity": material_data["quantity"],
|
||||
"unit": material_data["unit"],
|
||||
"size_spec": material_data["size_spec"],
|
||||
"material_grade": material_data["material_grade"],
|
||||
"line_number": material_data["line_number"],
|
||||
"row_number": material_data["row_number"],
|
||||
"classified_category": None,
|
||||
"classification_confidence": None,
|
||||
"is_verified": False,
|
||||
"created_at": datetime.now()
|
||||
})
|
||||
materials_inserted += 1
|
||||
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"완전한 DB 저장 성공! {materials_inserted}개 자재 저장됨",
|
||||
"original_filename": file.filename,
|
||||
"file_id": file_id,
|
||||
"parsed_materials_count": parsed_count,
|
||||
"saved_materials_count": materials_inserted,
|
||||
"sample_materials": materials_data[:3] if materials_data else []
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
raise HTTPException(status_code=500, detail=f"파일 처리 실패: {str(e)}")
|
||||
@router.get("/materials")
|
||||
async def get_materials(
|
||||
job_no: Optional[str] = None,
|
||||
file_id: Optional[str] = None,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""저장된 자재 목록 조회"""
|
||||
try:
|
||||
query = """
|
||||
SELECT m.id, m.file_id, m.original_description, m.quantity, m.unit,
|
||||
m.size_spec, m.material_grade, m.line_number, m.row_number,
|
||||
m.created_at,
|
||||
f.original_filename, f.job_no,
|
||||
j.job_no, j.job_name
|
||||
FROM materials m
|
||||
LEFT JOIN files f ON m.file_id = f.id
|
||||
LEFT JOIN jobs j ON f.job_no = j.job_no
|
||||
WHERE 1=1
|
||||
"""
|
||||
|
||||
params = {}
|
||||
|
||||
if job_no:
|
||||
query += " AND f.job_no = :job_no"
|
||||
params["job_no"] = job_no
|
||||
|
||||
if file_id:
|
||||
query += " AND m.file_id = :file_id"
|
||||
params["file_id"] = file_id
|
||||
|
||||
query += " ORDER BY m.line_number ASC LIMIT :limit OFFSET :skip"
|
||||
params["limit"] = limit
|
||||
params["skip"] = skip
|
||||
|
||||
result = db.execute(text(query), params)
|
||||
materials = result.fetchall()
|
||||
|
||||
# 전체 개수 조회
|
||||
count_query = """
|
||||
SELECT COUNT(*) as total
|
||||
FROM materials m
|
||||
LEFT JOIN files f ON m.file_id = f.id
|
||||
WHERE 1=1
|
||||
"""
|
||||
count_params = {}
|
||||
|
||||
if job_no:
|
||||
count_query += " AND f.job_no = :job_no"
|
||||
count_params["job_no"] = job_no
|
||||
|
||||
if file_id:
|
||||
count_query += " AND m.file_id = :file_id"
|
||||
count_params["file_id"] = file_id
|
||||
|
||||
count_result = db.execute(text(count_query), count_params)
|
||||
total_count = count_result.fetchone()[0]
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"total_count": total_count,
|
||||
"returned_count": len(materials),
|
||||
"skip": skip,
|
||||
"limit": limit,
|
||||
"materials": [
|
||||
{
|
||||
"id": m.id,
|
||||
"file_id": m.file_id,
|
||||
"filename": m.original_filename,
|
||||
"job_no": m.job_no,
|
||||
"project_code": m.official_project_code,
|
||||
"project_name": m.project_name,
|
||||
"original_description": m.original_description,
|
||||
"quantity": float(m.quantity) if m.quantity else 0,
|
||||
"unit": m.unit,
|
||||
"size_spec": m.size_spec,
|
||||
"material_grade": m.material_grade,
|
||||
"line_number": m.line_number,
|
||||
"row_number": m.row_number,
|
||||
"created_at": m.created_at
|
||||
}
|
||||
for m in materials
|
||||
]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"자재 조회 실패: {str(e)}")
|
||||
|
||||
@router.get("/materials/summary")
|
||||
async def get_materials_summary(
|
||||
job_no: Optional[str] = None,
|
||||
file_id: Optional[str] = None,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""자재 요약 통계"""
|
||||
try:
|
||||
query = """
|
||||
SELECT
|
||||
COUNT(*) as total_items,
|
||||
COUNT(DISTINCT m.original_description) as unique_descriptions,
|
||||
COUNT(DISTINCT m.size_spec) as unique_sizes,
|
||||
COUNT(DISTINCT m.material_grade) as unique_materials,
|
||||
SUM(m.quantity) as total_quantity,
|
||||
AVG(m.quantity) as avg_quantity,
|
||||
MIN(m.created_at) as earliest_upload,
|
||||
MAX(m.created_at) as latest_upload
|
||||
FROM materials m
|
||||
LEFT JOIN files f ON m.file_id = f.id
|
||||
WHERE 1=1
|
||||
"""
|
||||
|
||||
params = {}
|
||||
|
||||
if job_no:
|
||||
query += " AND f.job_no = :job_no"
|
||||
params["job_no"] = job_no
|
||||
|
||||
if file_id:
|
||||
query += " AND m.file_id = :file_id"
|
||||
params["file_id"] = file_id
|
||||
|
||||
result = db.execute(text(query), params)
|
||||
summary = result.fetchone()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"summary": {
|
||||
"total_items": summary.total_items,
|
||||
"unique_descriptions": summary.unique_descriptions,
|
||||
"unique_sizes": summary.unique_sizes,
|
||||
"unique_materials": summary.unique_materials,
|
||||
"total_quantity": float(summary.total_quantity) if summary.total_quantity else 0,
|
||||
"avg_quantity": round(float(summary.avg_quantity), 2) if summary.avg_quantity else 0,
|
||||
"earliest_upload": summary.earliest_upload,
|
||||
"latest_upload": summary.latest_upload
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"요약 조회 실패: {str(e)}")
|
||||
# Job 검증 함수 (파일 끝에 추가할 예정)
|
||||
async def validate_job_exists(job_no: str, db: Session):
|
||||
"""Job 존재 여부 및 활성 상태 확인"""
|
||||
try:
|
||||
query = text("SELECT job_no, job_name, status FROM jobs WHERE job_no = :job_no AND is_active = true")
|
||||
job = db.execute(query, {"job_no": job_no}).fetchone()
|
||||
|
||||
if not job:
|
||||
return {"valid": False, "error": f"Job No. '{job_no}'를 찾을 수 없습니다"}
|
||||
|
||||
if job.status == '완료':
|
||||
return {"valid": False, "error": f"완료된 Job '{job.job_name}'에는 파일을 업로드할 수 없습니다"}
|
||||
|
||||
return {
|
||||
"valid": True,
|
||||
"job": {
|
||||
"job_no": job.job_no,
|
||||
"job_name": job.job_name,
|
||||
"status": job.status
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
return {"valid": False, "error": f"Job 검증 실패: {str(e)}"}
|
||||
176
backend/app/routers/jobs.py
Normal file
176
backend/app/routers/jobs.py
Normal file
@@ -0,0 +1,176 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import text
|
||||
from typing import Optional
|
||||
from datetime import datetime, date
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..database import get_db
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
# Pydantic 모델들
|
||||
class JobCreate(BaseModel):
|
||||
job_no: str
|
||||
job_name: str
|
||||
client_name: str
|
||||
end_user: Optional[str] = None
|
||||
epc_company: Optional[str] = None
|
||||
project_site: Optional[str] = None
|
||||
contract_date: Optional[date] = None
|
||||
delivery_date: Optional[date] = None
|
||||
delivery_terms: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
|
||||
@router.get("/")
|
||||
async def get_jobs(
|
||||
skip: int = Query(0, ge=0),
|
||||
limit: int = Query(100, ge=1, le=1000),
|
||||
search: Optional[str] = Query(None),
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""Job 목록 조회"""
|
||||
try:
|
||||
query = """
|
||||
SELECT job_no, job_name, client_name, end_user, epc_company,
|
||||
project_site, contract_date, delivery_date, delivery_terms,
|
||||
status, description, created_by, created_at, updated_at, is_active
|
||||
FROM jobs
|
||||
WHERE is_active = true
|
||||
"""
|
||||
|
||||
params = {}
|
||||
|
||||
if search:
|
||||
query += " AND (job_no ILIKE :search OR job_name ILIKE :search OR client_name ILIKE :search)"
|
||||
params["search"] = f"%{search}%"
|
||||
|
||||
query += " ORDER BY created_at DESC LIMIT :limit OFFSET :skip"
|
||||
params["limit"] = limit
|
||||
params["skip"] = skip
|
||||
|
||||
result = db.execute(text(query), params)
|
||||
jobs = result.fetchall()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"total_count": len(jobs),
|
||||
"jobs": [
|
||||
{
|
||||
"job_no": job.job_no,
|
||||
"job_name": job.job_name,
|
||||
"client_name": job.client_name,
|
||||
"end_user": job.end_user,
|
||||
"epc_company": job.epc_company,
|
||||
"project_site": job.project_site,
|
||||
"contract_date": job.contract_date,
|
||||
"delivery_date": job.delivery_date,
|
||||
"delivery_terms": job.delivery_terms,
|
||||
"status": job.status,
|
||||
"description": job.description,
|
||||
"created_at": job.created_at
|
||||
}
|
||||
for job in jobs
|
||||
]
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Job 목록 조회 실패: {str(e)}")
|
||||
|
||||
@router.get("/{job_no}")
|
||||
async def get_job(job_no: str, db: Session = Depends(get_db)):
|
||||
"""Job 상세 정보 조회"""
|
||||
try:
|
||||
query = text("""
|
||||
SELECT job_no, job_name, client_name, end_user, epc_company,
|
||||
project_site, contract_date, delivery_date, delivery_terms,
|
||||
status, description, created_by, created_at, updated_at, is_active
|
||||
FROM jobs
|
||||
WHERE job_no = :job_no AND is_active = true
|
||||
""")
|
||||
|
||||
result = db.execute(query, {"job_no": job_no})
|
||||
job = result.fetchone()
|
||||
|
||||
if not job:
|
||||
raise HTTPException(status_code=404, detail="Job을 찾을 수 없습니다")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"job": {
|
||||
"job_no": job.job_no,
|
||||
"job_name": job.job_name,
|
||||
"client_name": job.client_name,
|
||||
"end_user": job.end_user,
|
||||
"epc_company": job.epc_company,
|
||||
"project_site": job.project_site,
|
||||
"contract_date": job.contract_date,
|
||||
"delivery_date": job.delivery_date,
|
||||
"delivery_terms": job.delivery_terms,
|
||||
"status": job.status,
|
||||
"description": job.description,
|
||||
"created_by": job.created_by,
|
||||
"created_at": job.created_at
|
||||
}
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Job 조회 실패: {str(e)}")
|
||||
|
||||
@router.post("/")
|
||||
async def create_job(job: JobCreate, db: Session = Depends(get_db)):
|
||||
"""새 Job 생성"""
|
||||
try:
|
||||
# Job No. 중복 확인
|
||||
check_query = text("SELECT job_no FROM jobs WHERE job_no = :job_no")
|
||||
existing = db.execute(check_query, {"job_no": job.job_no}).fetchone()
|
||||
|
||||
if existing:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Job No. '{job.job_no}'가 이미 존재합니다"
|
||||
)
|
||||
|
||||
# 새 Job 생성
|
||||
insert_query = text("""
|
||||
INSERT INTO jobs (
|
||||
job_no, job_name, client_name, end_user, epc_company,
|
||||
project_site, contract_date, delivery_date, delivery_terms,
|
||||
description, created_by, status, is_active
|
||||
)
|
||||
VALUES (
|
||||
:job_no, :job_name, :client_name, :end_user, :epc_company,
|
||||
:project_site, :contract_date, :delivery_date, :delivery_terms,
|
||||
:description, :created_by, :status, :is_active
|
||||
)
|
||||
RETURNING job_no, job_name, client_name
|
||||
""")
|
||||
|
||||
result = db.execute(insert_query, {
|
||||
**job.dict(),
|
||||
"created_by": "admin",
|
||||
"status": "진행중",
|
||||
"is_active": True
|
||||
})
|
||||
|
||||
new_job = result.fetchone()
|
||||
db.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Job이 성공적으로 생성되었습니다",
|
||||
"job": {
|
||||
"job_no": new_job.job_no,
|
||||
"job_name": new_job.job_name,
|
||||
"client_name": new_job.client_name
|
||||
}
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
db.rollback()
|
||||
raise
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
raise HTTPException(status_code=500, detail=f"Job 생성 실패: {str(e)}")
|
||||
36
backend/scripts/01_create_jobs_table.sql
Normal file
36
backend/scripts/01_create_jobs_table.sql
Normal file
@@ -0,0 +1,36 @@
|
||||
-- jobs 테이블 생성
|
||||
CREATE TABLE IF NOT EXISTS jobs (
|
||||
-- 기본 정보
|
||||
job_no VARCHAR(50) PRIMARY KEY,
|
||||
job_name VARCHAR(200) NOT NULL,
|
||||
|
||||
-- 계약 관계 (핵심)
|
||||
client_name VARCHAR(100) NOT NULL,
|
||||
|
||||
-- 프로젝트 정보
|
||||
end_user VARCHAR(100),
|
||||
epc_company VARCHAR(100),
|
||||
project_site VARCHAR(200),
|
||||
|
||||
-- 상업 정보
|
||||
contract_date DATE,
|
||||
delivery_date DATE,
|
||||
delivery_terms VARCHAR(100),
|
||||
|
||||
-- 상태 관리 (핵심)
|
||||
status VARCHAR(20) DEFAULT '진행중',
|
||||
delivery_completed_date DATE,
|
||||
project_closed_date DATE,
|
||||
|
||||
-- 관리 정보
|
||||
description TEXT,
|
||||
created_by VARCHAR(50),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
|
||||
is_active BOOLEAN DEFAULT true
|
||||
);
|
||||
|
||||
-- 인덱스 생성
|
||||
CREATE INDEX IF NOT EXISTS idx_jobs_status ON jobs(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_jobs_client ON jobs(client_name);
|
||||
CREATE INDEX IF NOT EXISTS idx_jobs_created_at ON jobs(created_at);
|
||||
19
backend/scripts/02_modify_files_table.sql
Normal file
19
backend/scripts/02_modify_files_table.sql
Normal file
@@ -0,0 +1,19 @@
|
||||
-- files 테이블에 job_no 컬럼 추가 및 project_id 대체
|
||||
|
||||
-- 새 컬럼 추가
|
||||
ALTER TABLE files ADD COLUMN IF NOT EXISTS job_no VARCHAR(50);
|
||||
|
||||
-- 외래키 제약조건 추가 (MySQL/PostgreSQL 문법)
|
||||
-- MySQL의 경우:
|
||||
-- ALTER TABLE files ADD CONSTRAINT fk_files_job_no FOREIGN KEY (job_no) REFERENCES jobs(job_no);
|
||||
|
||||
-- PostgreSQL의 경우:
|
||||
-- ALTER TABLE files ADD CONSTRAINT fk_files_job_no FOREIGN KEY (job_no) REFERENCES jobs(job_no);
|
||||
|
||||
-- SQLite의 경우는 외래키 제약조건을 나중에 추가하기 어려우므로 생략
|
||||
|
||||
-- 인덱스 생성
|
||||
CREATE INDEX IF NOT EXISTS idx_files_job_no ON files(job_no);
|
||||
|
||||
-- 기존 project_id 컬럼은 일단 유지 (호환성을 위해)
|
||||
-- 나중에 완전 이전 후 DROP 할 예정
|
||||
108
backend/scripts/03_insert_dummy_data.py
Normal file
108
backend/scripts/03_insert_dummy_data.py
Normal file
@@ -0,0 +1,108 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
더미 프로젝트 데이터 생성 스크립트
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from datetime import datetime, date
|
||||
from sqlalchemy import create_engine, text
|
||||
|
||||
# 프로젝트 루트를 Python path에 추가
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
def create_dummy_jobs():
|
||||
"""더미 Job 데이터 생성"""
|
||||
|
||||
# 간단한 SQLite 연결 (실제 DB 설정에 맞게 수정)
|
||||
try:
|
||||
# 실제 프로젝트의 database.py 설정 사용
|
||||
from app.database import engine
|
||||
print("✅ 데이터베이스 연결 성공")
|
||||
except ImportError:
|
||||
# 직접 연결 (개발용)
|
||||
DATABASE_URL = "sqlite:///./test.db" # 실제 DB URL로 변경
|
||||
engine = create_engine(DATABASE_URL)
|
||||
print("⚠️ 직접 데이터베이스 연결")
|
||||
|
||||
# 더미 데이터 정의
|
||||
dummy_jobs = [
|
||||
{
|
||||
'job_no': 'J24-001',
|
||||
'job_name': '울산 SK에너지 정유시설 증설 배관공사',
|
||||
'client_name': '삼성엔지니어링',
|
||||
'end_user': 'SK에너지',
|
||||
'epc_company': '삼성엔지니어링',
|
||||
'project_site': '울산광역시 온산공단 SK에너지 정유공장',
|
||||
'contract_date': '2024-03-15',
|
||||
'delivery_date': '2024-08-30',
|
||||
'delivery_terms': 'FOB 울산항',
|
||||
'status': '진행중',
|
||||
'description': '정유시설 증설을 위한 배관 자재 공급 프로젝트. 고온고압 배관 및 특수 밸브 포함.',
|
||||
'created_by': 'admin'
|
||||
},
|
||||
{
|
||||
'job_no': 'J24-002',
|
||||
'job_name': '포스코 광양 제철소 배관 정비공사',
|
||||
'client_name': '포스코',
|
||||
'end_user': '포스코',
|
||||
'epc_company': None,
|
||||
'project_site': '전남 광양시 포스코 광양제철소',
|
||||
'contract_date': '2024-04-02',
|
||||
'delivery_date': '2024-07-15',
|
||||
'delivery_terms': 'DDP 광양제철소 현장',
|
||||
'status': '진행중',
|
||||
'description': '제철소 정기 정비를 위한 배관 부품 교체. 내열성 특수강 배관 포함.',
|
||||
'created_by': 'admin'
|
||||
}
|
||||
]
|
||||
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
# 기존 더미 데이터 삭제 (개발용)
|
||||
print("🧹 기존 더미 데이터 정리...")
|
||||
conn.execute(text("DELETE FROM jobs WHERE job_no IN ('J24-001', 'J24-002')"))
|
||||
|
||||
# 새 더미 데이터 삽입
|
||||
print("📝 더미 데이터 삽입 중...")
|
||||
|
||||
for job in dummy_jobs:
|
||||
insert_query = text("""
|
||||
INSERT INTO jobs (
|
||||
job_no, job_name, client_name, end_user, epc_company,
|
||||
project_site, contract_date, delivery_date, delivery_terms,
|
||||
status, description, created_by, is_active
|
||||
) VALUES (
|
||||
:job_no, :job_name, :client_name, :end_user, :epc_company,
|
||||
:project_site, :contract_date, :delivery_date, :delivery_terms,
|
||||
:status, :description, :created_by, :is_active
|
||||
)
|
||||
""")
|
||||
|
||||
conn.execute(insert_query, {**job, 'is_active': True})
|
||||
print(f"✅ {job['job_no']}: {job['job_name']}")
|
||||
|
||||
# 커밋
|
||||
conn.commit()
|
||||
|
||||
# 결과 확인
|
||||
result = conn.execute(text("""
|
||||
SELECT job_no, job_name, client_name, status
|
||||
FROM jobs
|
||||
WHERE job_no IN ('J24-001', 'J24-002')
|
||||
"""))
|
||||
jobs = result.fetchall()
|
||||
|
||||
print(f"\n🎉 총 {len(jobs)}개 더미 Job 생성 완료!")
|
||||
print("\n📋 생성된 더미 데이터:")
|
||||
for job in jobs:
|
||||
print(f" • {job[0]}: {job[1]} ({job[2]}) - {job[3]}")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ 더미 데이터 생성 실패: {e}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
create_dummy_jobs()
|
||||
19
backend/scripts/create_jobs.sql
Normal file
19
backend/scripts/create_jobs.sql
Normal file
@@ -0,0 +1,19 @@
|
||||
CREATE TABLE IF NOT EXISTS jobs (
|
||||
job_no VARCHAR(50) PRIMARY KEY,
|
||||
job_name VARCHAR(200) NOT NULL,
|
||||
client_name VARCHAR(100) NOT NULL,
|
||||
end_user VARCHAR(100),
|
||||
epc_company VARCHAR(100),
|
||||
project_site VARCHAR(200),
|
||||
contract_date DATE,
|
||||
delivery_date DATE,
|
||||
delivery_terms VARCHAR(100),
|
||||
status VARCHAR(20) DEFAULT '진행중',
|
||||
delivery_completed_date DATE,
|
||||
project_closed_date DATE,
|
||||
description TEXT,
|
||||
created_by VARCHAR(50),
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
is_active BOOLEAN DEFAULT true
|
||||
);
|
||||
81
backend/scripts/insert_dummy_jobs.py
Normal file
81
backend/scripts/insert_dummy_jobs.py
Normal file
@@ -0,0 +1,81 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import os
|
||||
from datetime import datetime, date
|
||||
|
||||
# 프로젝트 루트를 Python path에 추가
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
try:
|
||||
from app.database import engine
|
||||
from sqlalchemy import text
|
||||
print("✅ 데이터베이스 연결 성공")
|
||||
except ImportError as e:
|
||||
print(f"❌ 임포트 실패: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
def insert_dummy_data():
|
||||
dummy_jobs = [
|
||||
{
|
||||
'job_no': 'J24-001',
|
||||
'job_name': '울산 SK에너지 정유시설 증설 배관공사',
|
||||
'client_name': '삼성엔지니어링',
|
||||
'end_user': 'SK에너지',
|
||||
'epc_company': '삼성엔지니어링',
|
||||
'project_site': '울산광역시 온산공단',
|
||||
'contract_date': '2024-03-15',
|
||||
'delivery_date': '2024-08-30',
|
||||
'delivery_terms': 'FOB 울산항',
|
||||
'description': '정유시설 증설을 위한 배관 자재 공급',
|
||||
'created_by': 'admin'
|
||||
},
|
||||
{
|
||||
'job_no': 'J24-002',
|
||||
'job_name': '포스코 광양 제철소 배관 정비공사',
|
||||
'client_name': '포스코',
|
||||
'end_user': '포스코',
|
||||
'epc_company': None,
|
||||
'project_site': '전남 광양시 포스코 제철소',
|
||||
'contract_date': '2024-04-02',
|
||||
'delivery_date': '2024-07-15',
|
||||
'delivery_terms': 'DDP 광양제철소',
|
||||
'description': '제철소 정기 정비용 배관 부품',
|
||||
'created_by': 'admin'
|
||||
}
|
||||
]
|
||||
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
# 기존 더미 데이터 삭제
|
||||
conn.execute(text("DELETE FROM jobs WHERE job_no IN ('J24-001', 'J24-002')"))
|
||||
|
||||
# 새 데이터 삽입
|
||||
for job in dummy_jobs:
|
||||
query = text("""
|
||||
INSERT INTO jobs (
|
||||
job_no, job_name, client_name, end_user, epc_company,
|
||||
project_site, contract_date, delivery_date, delivery_terms,
|
||||
description, created_by, is_active
|
||||
) VALUES (
|
||||
:job_no, :job_name, :client_name, :end_user, :epc_company,
|
||||
:project_site, :contract_date, :delivery_date, :delivery_terms,
|
||||
:description, :created_by, :is_active
|
||||
)
|
||||
""")
|
||||
|
||||
conn.execute(query, {**job, 'is_active': True})
|
||||
print(f"✅ {job['job_no']}: {job['job_name']}")
|
||||
|
||||
conn.commit()
|
||||
print(f"\n🎉 {len(dummy_jobs)}개 더미 Job 생성 완료!")
|
||||
|
||||
# 확인
|
||||
result = conn.execute(text("SELECT job_no, job_name, client_name FROM jobs"))
|
||||
for row in result:
|
||||
print(f" • {row[0]}: {row[1]} ({row[2]})")
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ 오류: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
insert_dummy_data()
|
||||
73
backend/scripts/setup_database.py
Normal file
73
backend/scripts/setup_database.py
Normal file
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
전체 데이터베이스 설정 및 더미 데이터 생성
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
from sqlalchemy import create_engine, text
|
||||
|
||||
# 프로젝트 루트를 Python path에 추가
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
def setup_database():
|
||||
"""데이터베이스 전체 설정"""
|
||||
|
||||
try:
|
||||
from app.database import engine
|
||||
print("✅ 기존 데이터베이스 연결 사용")
|
||||
except ImportError:
|
||||
# 개발용 직접 연결
|
||||
DATABASE_URL = "sqlite:///./test.db"
|
||||
engine = create_engine(DATABASE_URL)
|
||||
print("⚠️ 개발용 SQLite 연결")
|
||||
|
||||
try:
|
||||
with engine.connect() as conn:
|
||||
print("🏗️ 1단계: jobs 테이블 생성...")
|
||||
|
||||
# jobs 테이블 생성 SQL 실행
|
||||
with open('scripts/01_create_jobs_table.sql', 'r', encoding='utf-8') as f:
|
||||
sql_commands = f.read().split(';')
|
||||
for command in sql_commands:
|
||||
command = command.strip()
|
||||
if command:
|
||||
conn.execute(text(command))
|
||||
|
||||
print("✅ jobs 테이블 생성 완료")
|
||||
|
||||
print("🔧 2단계: files 테이블 수정...")
|
||||
|
||||
# files 테이블 수정 (선택적)
|
||||
try:
|
||||
with open('scripts/02_modify_files_table.sql', 'r', encoding='utf-8') as f:
|
||||
sql_commands = f.read().split(';')
|
||||
for command in sql_commands:
|
||||
command = command.strip()
|
||||
if command and not command.startswith('--'):
|
||||
conn.execute(text(command))
|
||||
print("✅ files 테이블 수정 완료")
|
||||
except Exception as e:
|
||||
print(f"⚠️ files 테이블 수정 건너뜀: {e}")
|
||||
|
||||
# 커밋
|
||||
conn.commit()
|
||||
|
||||
print("🎯 3단계: 더미 데이터 생성...")
|
||||
# 더미 데이터 스크립트 실행
|
||||
exec(open('scripts/03_insert_dummy_data.py').read())
|
||||
|
||||
print("\n🎉 전체 설정 완료!")
|
||||
print("\n📋 다음 단계:")
|
||||
print(" 1. API 서버 실행")
|
||||
print(" 2. GET /jobs 엔드포인트 테스트")
|
||||
print(" 3. Job 선택 후 BOM 파일 업로드")
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ 데이터베이스 설정 실패: {e}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup_database()
|
||||
5
backend/test_bom.csv
Normal file
5
backend/test_bom.csv
Normal file
@@ -0,0 +1,5 @@
|
||||
Description,Quantity,Unit,Size
|
||||
"PIPE ASTM A106 GR.B",10,EA,4"
|
||||
"ELBOW 90° ASTM A234",5,EA,4"
|
||||
"VALVE GATE ASTM A216",2,EA,4"
|
||||
"FLANGE WELD NECK",8,EA,4"
|
||||
|
Can't render this file because it contains an unexpected character in line 2 and column 30.
|
Reference in New Issue
Block a user