Some checks failed
SonarQube Analysis / SonarQube Scan (push) Has been cancelled
- frontend/src/pages/revision/ 폴더 완전 삭제 - EnhancedRevisionPage.css 제거 - support_details 저장 시 트랜잭션 오류로 인해 임시로 상세 정보 저장 비활성화 - 리비전 기능 재설계 예정
664 lines
28 KiB
Python
664 lines
28 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Docker 환경용 마이그레이션 스크립트
|
|
컨테이너 내부에서 실행되는 간단한 마이그레이션 도구
|
|
"""
|
|
|
|
import os
|
|
import sys
|
|
import psycopg2
|
|
from psycopg2.extras import RealDictCursor
|
|
from datetime import datetime
|
|
|
|
class DockerMigrator:
|
|
def __init__(self):
|
|
# Docker 환경의 DB 연결 설정
|
|
self.db_config = {
|
|
'host': os.getenv('DB_HOST', 'tk-mp-postgres'),
|
|
'port': int(os.getenv('DB_PORT', 5432)),
|
|
'database': os.getenv('DB_NAME', 'tk_mp_bom'),
|
|
'user': os.getenv('DB_USER', 'tkmp_user'),
|
|
'password': os.getenv('DB_PASSWORD', 'tkmp_password_2025')
|
|
}
|
|
|
|
def check_and_fix_schema(self):
|
|
"""스키마 체크 및 수정"""
|
|
print("🔍 스키마 체크 시작...")
|
|
|
|
fixes_applied = []
|
|
|
|
try:
|
|
conn = psycopg2.connect(**self.db_config)
|
|
cursor = conn.cursor(cursor_factory=RealDictCursor)
|
|
|
|
# 1. users.status 컬럼 체크 및 추가
|
|
if self._check_and_add_users_status(cursor):
|
|
fixes_applied.append("users.status 컬럼 추가")
|
|
|
|
# 2. files 테이블 누락 컬럼들 체크 및 추가
|
|
if self._check_and_add_files_columns(cursor):
|
|
fixes_applied.append("files 테이블 누락 컬럼들 추가")
|
|
|
|
# 3. materials 테이블 누락 컬럼들 체크 및 추가
|
|
if self._check_and_add_materials_columns(cursor):
|
|
fixes_applied.append("materials 테이블 누락 컬럼들 추가")
|
|
|
|
# 3.5. material_purchase_tracking 테이블 누락 컬럼들 체크 및 추가
|
|
if self._check_and_add_mpt_columns(cursor):
|
|
fixes_applied.append("material_purchase_tracking 테이블 누락 컬럼들 추가")
|
|
|
|
# 4. 누락된 상세 테이블들 체크 및 생성
|
|
if self._check_and_create_detail_tables(cursor):
|
|
fixes_applied.append("누락된 상세 테이블들 생성")
|
|
|
|
# 5. 리비전 관리 테이블들 체크 및 생성
|
|
if self._check_and_create_revision_tables(cursor):
|
|
fixes_applied.append("리비전 관리 테이블들 생성")
|
|
|
|
# 6. PIPE 관련 테이블들 체크 및 생성
|
|
if self._check_and_create_pipe_tables(cursor):
|
|
fixes_applied.append("PIPE 관련 테이블들 생성")
|
|
|
|
# 7. 기타 필요한 수정사항들...
|
|
# 향후 추가될 수 있는 다른 스키마 수정사항들
|
|
|
|
conn.commit()
|
|
cursor.close()
|
|
conn.close()
|
|
|
|
if fixes_applied:
|
|
print("✅ 스키마 수정 완료:")
|
|
for fix in fixes_applied:
|
|
print(f" - {fix}")
|
|
else:
|
|
print("✅ 스키마가 이미 최신 상태입니다.")
|
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
print(f"❌ 스키마 체크 실패: {e}")
|
|
return False
|
|
|
|
def _check_and_add_users_status(self, cursor) -> bool:
|
|
"""users.status 컬럼 체크 및 추가"""
|
|
try:
|
|
# status 컬럼이 존재하는지 확인
|
|
cursor.execute("""
|
|
SELECT column_name
|
|
FROM information_schema.columns
|
|
WHERE table_name = 'users' AND column_name = 'status'
|
|
""")
|
|
|
|
if cursor.fetchone():
|
|
return False # 이미 존재함
|
|
|
|
# status 컬럼 추가
|
|
cursor.execute("ALTER TABLE users ADD COLUMN status VARCHAR(20) DEFAULT 'active'")
|
|
|
|
# 기존 사용자들의 status를 'active'로 설정
|
|
cursor.execute("UPDATE users SET status = 'active' WHERE status IS NULL")
|
|
|
|
print(" 🔧 users.status 컬럼 추가됨")
|
|
return True
|
|
|
|
except Exception as e:
|
|
print(f" ❌ users.status 컬럼 추가 실패: {e}")
|
|
return False
|
|
|
|
def _check_and_add_files_columns(self, cursor) -> bool:
|
|
"""files 테이블 누락 컬럼들 체크 및 추가"""
|
|
try:
|
|
# 필요한 컬럼들과 그 정의
|
|
required_columns = {
|
|
'job_no': 'VARCHAR(100)',
|
|
'bom_name': 'VARCHAR(255)',
|
|
'description': 'TEXT',
|
|
'parsed_count': 'INTEGER'
|
|
}
|
|
|
|
added_columns = []
|
|
|
|
for column_name, column_type in required_columns.items():
|
|
# 컬럼이 존재하는지 확인
|
|
cursor.execute("""
|
|
SELECT column_name
|
|
FROM information_schema.columns
|
|
WHERE table_name = 'files' AND column_name = %s
|
|
""", (column_name,))
|
|
|
|
if not cursor.fetchone():
|
|
# 컬럼 추가
|
|
cursor.execute(f"ALTER TABLE files ADD COLUMN {column_name} {column_type}")
|
|
added_columns.append(column_name)
|
|
print(f" 🔧 files.{column_name} 컬럼 추가됨")
|
|
|
|
return len(added_columns) > 0
|
|
|
|
except Exception as e:
|
|
print(f" ❌ files 테이블 컬럼 추가 실패: {e}")
|
|
return False
|
|
|
|
def _check_and_add_materials_columns(self, cursor) -> bool:
|
|
"""materials 테이블 누락 컬럼들 체크 및 추가 (테스팅 서버 기준)"""
|
|
try:
|
|
# 테스팅 서버 기준 필요한 컬럼들
|
|
required_columns = {
|
|
# 사이즈 정보
|
|
'main_nom': 'VARCHAR(50)',
|
|
'red_nom': 'VARCHAR(50)',
|
|
'row_number': 'INTEGER',
|
|
|
|
# 재질 정보
|
|
'full_material_grade': 'VARCHAR(100)',
|
|
'standard': 'VARCHAR(100)',
|
|
'grade': 'VARCHAR(100)',
|
|
'subcategory': 'VARCHAR(100)',
|
|
|
|
# 사용자 입력 정보
|
|
'brand': 'VARCHAR(100)',
|
|
'user_requirement': 'TEXT',
|
|
|
|
# 메타데이터
|
|
'material_hash': 'VARCHAR(64)',
|
|
'classified_by': 'VARCHAR(100)',
|
|
'updated_by': 'VARCHAR(100)',
|
|
'revision_status': 'VARCHAR(20) DEFAULT \'current\'',
|
|
|
|
# 추가 필드들
|
|
'length': 'NUMERIC(10,3)',
|
|
'total_length': 'NUMERIC(10,3)',
|
|
'is_active': 'BOOLEAN DEFAULT true',
|
|
'purchase_confirmed': 'BOOLEAN DEFAULT false',
|
|
'confirmed_quantity': 'NUMERIC(10,3)',
|
|
'purchase_status': 'VARCHAR(20)',
|
|
'purchase_confirmed_by': 'VARCHAR(100)',
|
|
'purchase_confirmed_at': 'TIMESTAMP',
|
|
'normalized_description': 'TEXT'
|
|
}
|
|
|
|
added_columns = []
|
|
|
|
for column_name, column_type in required_columns.items():
|
|
# 컬럼이 존재하는지 확인
|
|
cursor.execute("""
|
|
SELECT column_name
|
|
FROM information_schema.columns
|
|
WHERE table_name = 'materials' AND column_name = %s
|
|
""", (column_name,))
|
|
|
|
if not cursor.fetchone():
|
|
# 컬럼 추가
|
|
cursor.execute(f"ALTER TABLE materials ADD COLUMN {column_name} {column_type}")
|
|
added_columns.append(column_name)
|
|
print(f" 🔧 materials.{column_name} 컬럼 추가됨")
|
|
|
|
if added_columns:
|
|
print(f" ✅ materials 테이블에 {len(added_columns)}개 컬럼 추가 완료")
|
|
|
|
return len(added_columns) > 0
|
|
|
|
except Exception as e:
|
|
print(f" ❌ materials 테이블 컬럼 추가 실패: {e}")
|
|
return False
|
|
|
|
def _check_and_add_mpt_columns(self, cursor) -> bool:
|
|
"""material_purchase_tracking 테이블 누락 컬럼들 체크 및 추가"""
|
|
try:
|
|
# 필요한 컬럼들
|
|
required_columns = {
|
|
'description': 'TEXT',
|
|
'purchase_status': 'VARCHAR(20)'
|
|
}
|
|
|
|
added_columns = []
|
|
|
|
for column_name, column_type in required_columns.items():
|
|
# 컬럼이 존재하는지 확인
|
|
cursor.execute("""
|
|
SELECT column_name
|
|
FROM information_schema.columns
|
|
WHERE table_name = 'material_purchase_tracking' AND column_name = %s
|
|
""", (column_name,))
|
|
|
|
if not cursor.fetchone():
|
|
# 컬럼 추가
|
|
cursor.execute(f"ALTER TABLE material_purchase_tracking ADD COLUMN {column_name} {column_type}")
|
|
added_columns.append(column_name)
|
|
print(f" 🔧 material_purchase_tracking.{column_name} 컬럼 추가됨")
|
|
|
|
if added_columns:
|
|
print(f" ✅ material_purchase_tracking 테이블에 {len(added_columns)}개 컬럼 추가 완료")
|
|
|
|
return len(added_columns) > 0
|
|
|
|
except Exception as e:
|
|
print(f" ❌ material_purchase_tracking 테이블 컬럼 추가 실패: {e}")
|
|
return False
|
|
|
|
def _check_and_create_detail_tables(self, cursor) -> bool:
|
|
"""누락된 상세 테이블들 체크 및 생성"""
|
|
try:
|
|
# 필요한 상세 테이블들과 그 구조
|
|
required_tables = {
|
|
'support_details': """
|
|
CREATE TABLE support_details (
|
|
id SERIAL PRIMARY KEY,
|
|
material_id INTEGER NOT NULL,
|
|
file_id INTEGER NOT NULL,
|
|
support_type VARCHAR(50),
|
|
support_subtype VARCHAR(50),
|
|
load_rating VARCHAR(50),
|
|
load_capacity VARCHAR(50),
|
|
material_standard VARCHAR(100),
|
|
material_grade VARCHAR(100),
|
|
pipe_size VARCHAR(50),
|
|
length_mm NUMERIC(10,3),
|
|
width_mm NUMERIC(10,3),
|
|
height_mm NUMERIC(10,3),
|
|
classification_confidence DOUBLE PRECISION,
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
FOREIGN KEY (material_id) REFERENCES materials(id) ON DELETE CASCADE,
|
|
FOREIGN KEY (file_id) REFERENCES files(id) ON DELETE CASCADE
|
|
)
|
|
""",
|
|
'special_material_details': """
|
|
CREATE TABLE special_material_details (
|
|
id SERIAL PRIMARY KEY,
|
|
material_id INTEGER NOT NULL,
|
|
file_id INTEGER NOT NULL,
|
|
special_type VARCHAR(50),
|
|
special_subtype VARCHAR(50),
|
|
material_standard VARCHAR(100),
|
|
material_grade VARCHAR(100),
|
|
size_spec VARCHAR(50),
|
|
classification_confidence DOUBLE PRECISION,
|
|
additional_info JSONB,
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
FOREIGN KEY (material_id) REFERENCES materials(id) ON DELETE CASCADE,
|
|
FOREIGN KEY (file_id) REFERENCES files(id) ON DELETE CASCADE
|
|
)
|
|
""",
|
|
'purchase_requests': """
|
|
CREATE TABLE purchase_requests (
|
|
request_id SERIAL PRIMARY KEY,
|
|
request_no VARCHAR(50) UNIQUE,
|
|
file_id INTEGER,
|
|
job_no VARCHAR(50),
|
|
category VARCHAR(50),
|
|
material_count INTEGER,
|
|
excel_file_path VARCHAR(500),
|
|
requested_by INTEGER,
|
|
requested_at TIMESTAMP,
|
|
status VARCHAR(20) DEFAULT 'requested',
|
|
notes TEXT,
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
FOREIGN KEY (file_id) REFERENCES files(id),
|
|
FOREIGN KEY (requested_by) REFERENCES users(user_id)
|
|
)
|
|
""",
|
|
'purchase_request_items': """
|
|
CREATE TABLE purchase_request_items (
|
|
item_id SERIAL PRIMARY KEY,
|
|
request_id INTEGER NOT NULL,
|
|
material_id INTEGER NOT NULL,
|
|
quantity INTEGER,
|
|
unit VARCHAR(20),
|
|
description TEXT,
|
|
user_requirement TEXT,
|
|
is_ordered BOOLEAN DEFAULT false,
|
|
is_received BOOLEAN DEFAULT false,
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
FOREIGN KEY (request_id) REFERENCES purchase_requests(request_id) ON DELETE CASCADE,
|
|
FOREIGN KEY (material_id) REFERENCES materials(id) ON DELETE CASCADE
|
|
)
|
|
"""
|
|
}
|
|
|
|
created_tables = []
|
|
|
|
for table_name, create_sql in required_tables.items():
|
|
# 테이블이 존재하는지 확인
|
|
cursor.execute("""
|
|
SELECT EXISTS (
|
|
SELECT FROM information_schema.tables
|
|
WHERE table_name = %s
|
|
)
|
|
""", (table_name,))
|
|
|
|
result = cursor.fetchone()
|
|
if isinstance(result, dict):
|
|
table_exists = result.get('exists', False)
|
|
else:
|
|
table_exists = result[0] if result else False
|
|
if not table_exists:
|
|
# 테이블 생성
|
|
cursor.execute(create_sql)
|
|
created_tables.append(table_name)
|
|
print(f" 🏗️ {table_name} 테이블 생성됨")
|
|
|
|
if created_tables:
|
|
print(f" ✅ {len(created_tables)}개 상세 테이블 생성 완료")
|
|
|
|
return len(created_tables) > 0
|
|
|
|
except Exception as e:
|
|
print(f" ❌ 상세 테이블 생성 실패: {e}")
|
|
import traceback
|
|
print(f" 상세 오류: {traceback.format_exc()}")
|
|
return False
|
|
|
|
def verify_critical_tables(self):
|
|
"""중요 테이블들이 존재하는지 확인"""
|
|
print("🔍 중요 테이블 존재 여부 확인...")
|
|
|
|
critical_tables = [
|
|
'users', 'projects', 'files', 'materials',
|
|
'pipe_details', 'fitting_details', 'flange_details',
|
|
'valve_details', 'gasket_details', 'bolt_details'
|
|
]
|
|
|
|
try:
|
|
conn = psycopg2.connect(**self.db_config)
|
|
cursor = conn.cursor()
|
|
|
|
missing_tables = []
|
|
|
|
for table in critical_tables:
|
|
cursor.execute("""
|
|
SELECT EXISTS (
|
|
SELECT FROM information_schema.tables
|
|
WHERE table_name = %s
|
|
)
|
|
""", (table,))
|
|
|
|
if not cursor.fetchone()[0]:
|
|
missing_tables.append(table)
|
|
|
|
cursor.close()
|
|
conn.close()
|
|
|
|
if missing_tables:
|
|
print("❌ 누락된 중요 테이블들:")
|
|
for table in missing_tables:
|
|
print(f" - {table}")
|
|
return False
|
|
else:
|
|
print("✅ 모든 중요 테이블이 존재합니다.")
|
|
return True
|
|
|
|
except Exception as e:
|
|
print(f"❌ 테이블 확인 실패: {e}")
|
|
return False
|
|
|
|
def _check_and_create_revision_tables(self, cursor):
|
|
"""간단한 리비전 관리 테이블들 체크 및 생성"""
|
|
print("🔍 간단한 리비전 관리 테이블들 확인 중...")
|
|
|
|
revision_tables = {
|
|
'simple_revision_comparisons': '''
|
|
CREATE TABLE simple_revision_comparisons (
|
|
id SERIAL PRIMARY KEY,
|
|
job_no VARCHAR(50) NOT NULL,
|
|
current_file_id INTEGER REFERENCES files(id) NOT NULL,
|
|
previous_file_id INTEGER REFERENCES files(id) NOT NULL,
|
|
category VARCHAR(50) NOT NULL,
|
|
added_count INTEGER DEFAULT 0,
|
|
removed_count INTEGER DEFAULT 0,
|
|
changed_count INTEGER DEFAULT 0,
|
|
unchanged_count INTEGER DEFAULT 0,
|
|
purchased_affected INTEGER DEFAULT 0,
|
|
unpurchased_affected INTEGER DEFAULT 0,
|
|
inventory_count INTEGER DEFAULT 0,
|
|
comparison_data JSONB,
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
created_by_username VARCHAR(100)
|
|
);
|
|
CREATE INDEX idx_simple_revision_comparisons_job_no ON simple_revision_comparisons(job_no);
|
|
CREATE INDEX idx_simple_revision_comparisons_category ON simple_revision_comparisons(category);
|
|
''',
|
|
'simple_revision_materials': '''
|
|
CREATE TABLE simple_revision_materials (
|
|
id SERIAL PRIMARY KEY,
|
|
comparison_id INTEGER REFERENCES simple_revision_comparisons(id) NOT NULL,
|
|
material_id INTEGER REFERENCES materials(id),
|
|
change_type VARCHAR(20) NOT NULL,
|
|
revision_action VARCHAR(30),
|
|
quantity_before NUMERIC(10,3),
|
|
quantity_after NUMERIC(10,3),
|
|
quantity_difference NUMERIC(10,3),
|
|
purchase_status VARCHAR(20),
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
);
|
|
CREATE INDEX idx_simple_revision_materials_comparison ON simple_revision_materials(comparison_id);
|
|
''',
|
|
}
|
|
|
|
created_tables = []
|
|
|
|
for table_name, create_sql in revision_tables.items():
|
|
# 테이블 존재 확인
|
|
cursor.execute("""
|
|
SELECT EXISTS (
|
|
SELECT FROM information_schema.tables
|
|
WHERE table_name = %s
|
|
)
|
|
""", (table_name,))
|
|
|
|
result = cursor.fetchone()
|
|
if isinstance(result, dict):
|
|
table_exists = result.get('exists', False)
|
|
else:
|
|
table_exists = result[0] if result else False
|
|
|
|
if not table_exists:
|
|
# 테이블 생성
|
|
cursor.execute(create_sql)
|
|
created_tables.append(table_name)
|
|
print(f" 🏗️ {table_name} 테이블 생성됨")
|
|
|
|
if created_tables:
|
|
print(f"✅ {len(created_tables)}개 리비전 관리 테이블 생성 완료")
|
|
return True
|
|
else:
|
|
print("✅ 모든 리비전 관리 테이블이 이미 존재합니다")
|
|
return False
|
|
|
|
def _check_and_create_pipe_tables(self, cursor):
|
|
"""PIPE 관련 테이블들 체크 및 생성"""
|
|
print("🔍 PIPE 관련 테이블들 확인 중...")
|
|
|
|
pipe_tables = {
|
|
'pipe_cutting_plans': '''
|
|
CREATE TABLE pipe_cutting_plans (
|
|
id SERIAL PRIMARY KEY,
|
|
job_no VARCHAR(50) NOT NULL,
|
|
file_id INTEGER REFERENCES files(id) NOT NULL,
|
|
area VARCHAR(20) NOT NULL,
|
|
drawing_name VARCHAR(100) NOT NULL,
|
|
line_number VARCHAR(50),
|
|
pipe_info TEXT,
|
|
length NUMERIC(10,3),
|
|
end_preparation VARCHAR(20),
|
|
status VARCHAR(20) DEFAULT 'draft',
|
|
cutting_status VARCHAR(20) DEFAULT 'not_cut',
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
created_by VARCHAR(100)
|
|
);
|
|
CREATE INDEX idx_pipe_cutting_plans_job_no ON pipe_cutting_plans(job_no);
|
|
CREATE INDEX idx_pipe_cutting_plans_drawing ON pipe_cutting_plans(drawing_name);
|
|
''',
|
|
'pipe_revision_comparisons': '''
|
|
CREATE TABLE pipe_revision_comparisons (
|
|
id SERIAL PRIMARY KEY,
|
|
job_no VARCHAR(50) NOT NULL,
|
|
current_cutting_plan_id INTEGER REFERENCES pipe_cutting_plans(id),
|
|
previous_cutting_plan_id INTEGER REFERENCES pipe_cutting_plans(id),
|
|
drawing_name VARCHAR(100) NOT NULL,
|
|
has_changes BOOLEAN DEFAULT FALSE,
|
|
total_pipes_current INTEGER DEFAULT 0,
|
|
total_pipes_previous INTEGER DEFAULT 0,
|
|
added_pipes INTEGER DEFAULT 0,
|
|
removed_pipes INTEGER DEFAULT 0,
|
|
changed_pipes INTEGER DEFAULT 0,
|
|
unchanged_pipes INTEGER DEFAULT 0,
|
|
comparison_details JSONB,
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
);
|
|
CREATE INDEX idx_pipe_revision_comparisons_job_no ON pipe_revision_comparisons(job_no);
|
|
''',
|
|
'pipe_revision_changes': '''
|
|
CREATE TABLE pipe_revision_changes (
|
|
id SERIAL PRIMARY KEY,
|
|
comparison_id INTEGER REFERENCES pipe_revision_comparisons(id) NOT NULL,
|
|
change_type VARCHAR(20) NOT NULL,
|
|
drawing_name VARCHAR(100) NOT NULL,
|
|
line_number VARCHAR(50),
|
|
pipe_info TEXT,
|
|
length NUMERIC(10,3),
|
|
end_preparation VARCHAR(20),
|
|
old_data JSONB,
|
|
new_data JSONB,
|
|
cutting_status VARCHAR(20) DEFAULT 'not_cut',
|
|
impact_on_material_requirement JSONB,
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
);
|
|
''',
|
|
'pipe_drawing_issues': '''
|
|
CREATE TABLE pipe_drawing_issues (
|
|
id SERIAL PRIMARY KEY,
|
|
job_no VARCHAR(50) NOT NULL,
|
|
snapshot_id INTEGER REFERENCES pipe_issue_snapshots(id),
|
|
area VARCHAR(20) NOT NULL,
|
|
drawing_name VARCHAR(100) NOT NULL,
|
|
issue_description TEXT NOT NULL,
|
|
severity VARCHAR(20) DEFAULT 'normal',
|
|
status VARCHAR(20) DEFAULT 'open',
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
created_by VARCHAR(100),
|
|
resolved_by VARCHAR(100),
|
|
resolved_at TIMESTAMP
|
|
);
|
|
CREATE INDEX idx_pipe_drawing_issues_job_no ON pipe_drawing_issues(job_no);
|
|
''',
|
|
'pipe_segment_issues': '''
|
|
CREATE TABLE pipe_segment_issues (
|
|
id SERIAL PRIMARY KEY,
|
|
job_no VARCHAR(50) NOT NULL,
|
|
snapshot_id INTEGER REFERENCES pipe_issue_snapshots(id),
|
|
area VARCHAR(20) NOT NULL,
|
|
drawing_name VARCHAR(100) NOT NULL,
|
|
line_no VARCHAR(50) NOT NULL,
|
|
pipe_info TEXT NOT NULL,
|
|
length NUMERIC(10,3) NOT NULL,
|
|
issue_description TEXT NOT NULL,
|
|
issue_type VARCHAR(30),
|
|
severity VARCHAR(20) DEFAULT 'normal',
|
|
status VARCHAR(20) DEFAULT 'open',
|
|
resolution_action TEXT,
|
|
length_adjustment NUMERIC(10,3),
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
created_by VARCHAR(100),
|
|
resolved_by VARCHAR(100),
|
|
resolved_at TIMESTAMP
|
|
);
|
|
CREATE INDEX idx_pipe_segment_issues_job_no ON pipe_segment_issues(job_no);
|
|
''',
|
|
'pipe_issue_snapshots': '''
|
|
CREATE TABLE pipe_issue_snapshots (
|
|
id SERIAL PRIMARY KEY,
|
|
job_no VARCHAR(50) NOT NULL,
|
|
snapshot_name VARCHAR(200),
|
|
cutting_plan_finalized_at TIMESTAMP NOT NULL,
|
|
is_locked BOOLEAN DEFAULT TRUE,
|
|
snapshot_data JSONB,
|
|
total_segments INTEGER DEFAULT 0,
|
|
total_drawings INTEGER DEFAULT 0,
|
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
created_by VARCHAR(100),
|
|
description TEXT
|
|
);
|
|
CREATE INDEX idx_pipe_issue_snapshots_job_no ON pipe_issue_snapshots(job_no);
|
|
'''
|
|
}
|
|
|
|
created_tables = []
|
|
|
|
# pipe_issue_snapshots를 먼저 생성 (다른 테이블들이 참조하므로)
|
|
priority_tables = ['pipe_issue_snapshots', 'pipe_cutting_plans']
|
|
|
|
# 우선순위 테이블들 먼저 생성
|
|
for table_name in priority_tables:
|
|
if table_name in pipe_tables:
|
|
cursor.execute("""
|
|
SELECT EXISTS (
|
|
SELECT FROM information_schema.tables
|
|
WHERE table_name = %s
|
|
)
|
|
""", (table_name,))
|
|
|
|
result = cursor.fetchone()
|
|
if isinstance(result, dict):
|
|
table_exists = result.get('exists', False)
|
|
else:
|
|
table_exists = result[0] if result else False
|
|
|
|
if not table_exists:
|
|
cursor.execute(pipe_tables[table_name])
|
|
created_tables.append(table_name)
|
|
print(f" 🏗️ {table_name} 테이블 생성됨")
|
|
|
|
# 나머지 테이블들 생성
|
|
for table_name, create_sql in pipe_tables.items():
|
|
if table_name in priority_tables:
|
|
continue # 이미 처리됨
|
|
|
|
cursor.execute("""
|
|
SELECT EXISTS (
|
|
SELECT FROM information_schema.tables
|
|
WHERE table_name = %s
|
|
)
|
|
""", (table_name,))
|
|
|
|
result = cursor.fetchone()
|
|
if isinstance(result, dict):
|
|
table_exists = result.get('exists', False)
|
|
else:
|
|
table_exists = result[0] if result else False
|
|
|
|
if not table_exists:
|
|
cursor.execute(create_sql)
|
|
created_tables.append(table_name)
|
|
print(f" 🏗️ {table_name} 테이블 생성됨")
|
|
|
|
if created_tables:
|
|
print(f"✅ {len(created_tables)}개 PIPE 관련 테이블 생성 완료")
|
|
return True
|
|
else:
|
|
print("✅ 모든 PIPE 관련 테이블이 이미 존재합니다")
|
|
return False
|
|
|
|
def main():
|
|
print("🚀 Docker 환경 마이그레이션 시작")
|
|
|
|
migrator = DockerMigrator()
|
|
|
|
# 1. 중요 테이블 존재 여부 확인
|
|
if not migrator.verify_critical_tables():
|
|
print("💥 중요 테이블이 누락되어 있습니다!")
|
|
return False
|
|
|
|
# 2. 스키마 체크 및 수정
|
|
if not migrator.check_and_fix_schema():
|
|
print("💥 스키마 수정 실패!")
|
|
return False
|
|
|
|
print("🎉 마이그레이션 완료!")
|
|
return True
|
|
|
|
if __name__ == "__main__":
|
|
success = main()
|
|
sys.exit(0 if success else 1)
|