feat: enhance revision logic with fuzzy matching, dynamic material loading, and schema automation

- Improved RevisionComparator with fuzzy matching (RapidFuzz) and dynamic DB material loading
- Enhanced regex patterns for better size/material extraction
- Initialized Alembic for schema migrations and created baseline migration
- Added entrypoint.sh for automated migrations in Docker
- Fixed SyntaxError in fitting_classifier.py
- Updated test suite with new functionality tests
This commit is contained in:
Hyungi Ahn
2026-01-09 09:36:40 +09:00
parent afea8428b2
commit f16bc662ad
11 changed files with 1575 additions and 76 deletions

116
backend/alembic.ini Normal file
View File

@@ -0,0 +1,116 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

1
backend/alembic/README Normal file
View File

@@ -0,0 +1 @@
Generic single-database configuration.

116
backend/alembic/env.py Normal file
View File

@@ -0,0 +1,116 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
import os
import sys
from pathlib import Path
# Backend root directory adding to path to allow imports
backend_path = Path(__file__).parent.parent
sys.path.append(str(backend_path))
from app.models import Base
from app.config import get_settings
target_metadata = Base.metadata
# Update config with app settings
# Update config with app settings
settings = get_settings()
# 우선적으로 환경변수에서 DB URL을 확인하여 설정 (로컬 마이그레이션용)
env_db_url = os.getenv("DATABASE_URL")
if env_db_url:
config.set_main_option("sqlalchemy.url", env_db_url)
else:
config.set_main_option("sqlalchemy.url", settings.get_database_url())
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
# Debug: Check what URL is actually being used
# 우선적으로 환경변수에서 DB URL을 확인하여 설정 (로컬 마이그레이션용)
env_db_url = os.getenv("DATABASE_URL")
if env_db_url:
print(f"DEBUG: Using DATABASE_URL from environment: {env_db_url}")
url = env_db_url
else:
url = config.get_main_option("sqlalchemy.url")
print(f"DEBUG: Using default configuration URL: {url}")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
# Debug: Check what URL is actually being used
env_db_url = os.getenv("DATABASE_URL")
if env_db_url:
print(f"DEBUG: Using DATABASE_URL from environment: {env_db_url}")
config.set_main_option("sqlalchemy.url", env_db_url)
else:
url = config.get_main_option("sqlalchemy.url")
print(f"DEBUG: Using default configuration URL: {url}")
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,872 @@
"""Initial baseline
Revision ID: 8905071fdd15
Revises:
Create Date: 2026-01-09 09:29:05.123731
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '8905071fdd15'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('role_permissions')
op.drop_table('user_activity_logs')
op.drop_index('idx_support_details_file_id', table_name='support_details')
op.drop_index('idx_support_details_material_id', table_name='support_details')
op.drop_table('support_details')
op.drop_table('material_purchase_tracking')
op.drop_index('idx_purchase_confirmations_job_revision', table_name='purchase_confirmations')
op.drop_table('purchase_confirmations')
op.drop_table('valve_details')
op.drop_table('purchase_items')
op.drop_index('idx_revision_sessions_job_no', table_name='revision_sessions')
op.drop_index('idx_revision_sessions_status', table_name='revision_sessions')
op.drop_table('revision_sessions')
op.drop_table('instrument_details')
op.drop_table('fitting_details')
op.drop_table('flange_details')
op.drop_index('idx_special_material_details_file_id', table_name='special_material_details')
op.drop_index('idx_special_material_details_material_id', table_name='special_material_details')
op.drop_table('special_material_details')
op.drop_table('pipe_end_preparations')
op.drop_table('user_sessions')
op.drop_table('login_logs')
op.drop_table('material_revisions_comparison')
op.drop_index('idx_purchase_request_items_category', table_name='purchase_request_items')
op.drop_index('idx_purchase_request_items_material_id', table_name='purchase_request_items')
op.drop_index('idx_purchase_request_items_request_id', table_name='purchase_request_items')
op.drop_table('purchase_request_items')
op.drop_table('material_comparison_details')
op.drop_index('idx_confirmed_purchase_items_category', table_name='confirmed_purchase_items')
op.drop_index('idx_confirmed_purchase_items_confirmation', table_name='confirmed_purchase_items')
op.drop_table('confirmed_purchase_items')
op.drop_table('material_purchase_mapping')
op.drop_table('bolt_details')
op.drop_table('permissions')
op.drop_index('idx_purchase_requests_job_no', table_name='purchase_requests')
op.drop_index('idx_purchase_requests_requested_by', table_name='purchase_requests')
op.drop_index('idx_purchase_requests_status', table_name='purchase_requests')
op.drop_table('purchase_requests')
op.drop_table('gasket_details')
op.drop_index('idx_revision_changes_action', table_name='revision_material_changes')
op.drop_index('idx_revision_changes_session', table_name='revision_material_changes')
op.drop_index('idx_revision_changes_status', table_name='revision_material_changes')
op.drop_table('revision_material_changes')
op.drop_index('idx_revision_logs_date', table_name='revision_action_logs')
op.drop_index('idx_revision_logs_session', table_name='revision_action_logs')
op.drop_index('idx_revision_logs_type', table_name='revision_action_logs')
op.drop_table('revision_action_logs')
op.drop_index('idx_inventory_transfers_date', table_name='inventory_transfers')
op.drop_index('idx_inventory_transfers_material', table_name='inventory_transfers')
op.drop_table('inventory_transfers')
op.drop_table('users')
op.drop_table('jobs')
op.drop_index('idx_files_active', table_name='files')
op.drop_index('idx_files_project', table_name='files')
op.drop_index('idx_files_purchase_confirmed', table_name='files')
op.drop_index('idx_files_uploaded_by', table_name='files')
op.create_index(op.f('ix_files_id'), 'files', ['id'], unique=False)
op.drop_constraint('files_project_id_fkey', 'files', type_='foreignkey')
op.create_foreign_key(None, 'files', 'projects', ['project_id'], ['id'])
op.drop_column('files', 'description')
op.drop_column('files', 'classification_completed')
op.drop_column('files', 'purchase_confirmed')
op.drop_column('files', 'bom_name')
op.drop_column('files', 'confirmed_at')
op.drop_column('files', 'confirmed_by')
op.drop_column('files', 'job_no')
op.drop_column('files', 'parsed_count')
op.create_index(op.f('ix_material_categories_id'), 'material_categories', ['id'], unique=False)
op.create_index(op.f('ix_material_grades_id'), 'material_grades', ['id'], unique=False)
op.create_index(op.f('ix_material_patterns_id'), 'material_patterns', ['id'], unique=False)
op.create_index(op.f('ix_material_specifications_id'), 'material_specifications', ['id'], unique=False)
op.drop_constraint('material_standards_standard_code_key', 'material_standards', type_='unique')
op.create_index(op.f('ix_material_standards_id'), 'material_standards', ['id'], unique=False)
op.create_index(op.f('ix_material_standards_standard_code'), 'material_standards', ['standard_code'], unique=True)
op.create_index(op.f('ix_material_tubing_mapping_id'), 'material_tubing_mapping', ['id'], unique=False)
op.alter_column('materials', 'verified_by',
existing_type=sa.VARCHAR(length=100),
type_=sa.String(length=50),
existing_nullable=True)
op.alter_column('materials', 'material_hash',
existing_type=sa.VARCHAR(length=64),
type_=sa.String(length=100),
existing_nullable=True)
op.alter_column('materials', 'full_material_grade',
existing_type=sa.TEXT(),
type_=sa.String(length=100),
existing_nullable=True)
op.drop_index('idx_materials_category', table_name='materials')
op.drop_index('idx_materials_classification_details', table_name='materials', postgresql_using='gin')
op.drop_index('idx_materials_file', table_name='materials')
op.drop_index('idx_materials_material_size', table_name='materials')
op.create_index(op.f('ix_materials_id'), 'materials', ['id'], unique=False)
op.drop_constraint('materials_file_id_fkey', 'materials', type_='foreignkey')
op.create_foreign_key(None, 'materials', 'files', ['file_id'], ['id'])
op.drop_column('materials', 'classification_details')
op.add_column('pipe_details', sa.Column('material_standard', sa.String(length=50), nullable=True))
op.add_column('pipe_details', sa.Column('material_grade', sa.String(length=50), nullable=True))
op.add_column('pipe_details', sa.Column('material_type', sa.String(length=50), nullable=True))
op.add_column('pipe_details', sa.Column('wall_thickness', sa.String(length=50), nullable=True))
op.add_column('pipe_details', sa.Column('nominal_size', sa.String(length=50), nullable=True))
op.add_column('pipe_details', sa.Column('material_confidence', sa.Numeric(precision=3, scale=2), nullable=True))
op.add_column('pipe_details', sa.Column('manufacturing_confidence', sa.Numeric(precision=3, scale=2), nullable=True))
op.add_column('pipe_details', sa.Column('end_prep_confidence', sa.Numeric(precision=3, scale=2), nullable=True))
op.add_column('pipe_details', sa.Column('schedule_confidence', sa.Numeric(precision=3, scale=2), nullable=True))
op.alter_column('pipe_details', 'file_id',
existing_type=sa.INTEGER(),
nullable=False)
op.create_index(op.f('ix_pipe_details_id'), 'pipe_details', ['id'], unique=False)
op.drop_constraint('pipe_details_material_id_fkey', 'pipe_details', type_='foreignkey')
op.drop_constraint('pipe_details_file_id_fkey', 'pipe_details', type_='foreignkey')
op.create_foreign_key(None, 'pipe_details', 'files', ['file_id'], ['id'])
op.drop_column('pipe_details', 'outer_diameter')
op.drop_column('pipe_details', 'additional_info')
op.drop_column('pipe_details', 'classification_confidence')
op.drop_column('pipe_details', 'material_id')
op.drop_column('pipe_details', 'material_spec')
op.drop_index('idx_projects_design_code', table_name='projects')
op.drop_index('idx_projects_official_code', table_name='projects')
op.drop_constraint('projects_official_project_code_key', 'projects', type_='unique')
op.create_index(op.f('ix_projects_id'), 'projects', ['id'], unique=False)
op.create_index(op.f('ix_projects_official_project_code'), 'projects', ['official_project_code'], unique=True)
op.create_index(op.f('ix_requirement_types_id'), 'requirement_types', ['id'], unique=False)
op.create_index(op.f('ix_special_material_grades_id'), 'special_material_grades', ['id'], unique=False)
op.create_index(op.f('ix_special_material_patterns_id'), 'special_material_patterns', ['id'], unique=False)
op.create_index(op.f('ix_special_materials_id'), 'special_materials', ['id'], unique=False)
op.create_index(op.f('ix_tubing_categories_id'), 'tubing_categories', ['id'], unique=False)
op.alter_column('tubing_manufacturers', 'contact_info',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=sa.JSON(),
existing_nullable=True)
op.alter_column('tubing_manufacturers', 'quality_certs',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=sa.JSON(),
existing_nullable=True)
op.create_index(op.f('ix_tubing_manufacturers_id'), 'tubing_manufacturers', ['id'], unique=False)
op.alter_column('tubing_products', 'last_price_update',
existing_type=sa.DATE(),
type_=sa.DateTime(),
existing_nullable=True)
op.drop_constraint('tubing_products_specification_id_manufacturer_id_manufactur_key', 'tubing_products', type_='unique')
op.create_index(op.f('ix_tubing_products_id'), 'tubing_products', ['id'], unique=False)
op.create_index(op.f('ix_tubing_specifications_id'), 'tubing_specifications', ['id'], unique=False)
op.alter_column('user_requirements', 'due_date',
existing_type=sa.DATE(),
type_=sa.DateTime(),
existing_nullable=True)
op.create_index(op.f('ix_user_requirements_id'), 'user_requirements', ['id'], unique=False)
op.drop_constraint('user_requirements_material_id_fkey', 'user_requirements', type_='foreignkey')
op.drop_constraint('user_requirements_file_id_fkey', 'user_requirements', type_='foreignkey')
op.create_foreign_key(None, 'user_requirements', 'materials', ['material_id'], ['id'])
op.create_foreign_key(None, 'user_requirements', 'files', ['file_id'], ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'user_requirements', type_='foreignkey')
op.drop_constraint(None, 'user_requirements', type_='foreignkey')
op.create_foreign_key('user_requirements_file_id_fkey', 'user_requirements', 'files', ['file_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key('user_requirements_material_id_fkey', 'user_requirements', 'materials', ['material_id'], ['id'], ondelete='CASCADE')
op.drop_index(op.f('ix_user_requirements_id'), table_name='user_requirements')
op.alter_column('user_requirements', 'due_date',
existing_type=sa.DateTime(),
type_=sa.DATE(),
existing_nullable=True)
op.drop_index(op.f('ix_tubing_specifications_id'), table_name='tubing_specifications')
op.drop_index(op.f('ix_tubing_products_id'), table_name='tubing_products')
op.create_unique_constraint('tubing_products_specification_id_manufacturer_id_manufactur_key', 'tubing_products', ['specification_id', 'manufacturer_id', 'manufacturer_part_number'])
op.alter_column('tubing_products', 'last_price_update',
existing_type=sa.DateTime(),
type_=sa.DATE(),
existing_nullable=True)
op.drop_index(op.f('ix_tubing_manufacturers_id'), table_name='tubing_manufacturers')
op.alter_column('tubing_manufacturers', 'quality_certs',
existing_type=sa.JSON(),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True)
op.alter_column('tubing_manufacturers', 'contact_info',
existing_type=sa.JSON(),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_nullable=True)
op.drop_index(op.f('ix_tubing_categories_id'), table_name='tubing_categories')
op.drop_index(op.f('ix_special_materials_id'), table_name='special_materials')
op.drop_index(op.f('ix_special_material_patterns_id'), table_name='special_material_patterns')
op.drop_index(op.f('ix_special_material_grades_id'), table_name='special_material_grades')
op.drop_index(op.f('ix_requirement_types_id'), table_name='requirement_types')
op.drop_index(op.f('ix_projects_official_project_code'), table_name='projects')
op.drop_index(op.f('ix_projects_id'), table_name='projects')
op.create_unique_constraint('projects_official_project_code_key', 'projects', ['official_project_code'])
op.create_index('idx_projects_official_code', 'projects', ['official_project_code'], unique=False)
op.create_index('idx_projects_design_code', 'projects', ['design_project_code'], unique=False)
op.add_column('pipe_details', sa.Column('material_spec', sa.VARCHAR(length=100), autoincrement=False, nullable=True))
op.add_column('pipe_details', sa.Column('material_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.add_column('pipe_details', sa.Column('classification_confidence', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True))
op.add_column('pipe_details', sa.Column('additional_info', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True))
op.add_column('pipe_details', sa.Column('outer_diameter', sa.VARCHAR(length=50), autoincrement=False, nullable=True))
op.drop_constraint(None, 'pipe_details', type_='foreignkey')
op.create_foreign_key('pipe_details_file_id_fkey', 'pipe_details', 'files', ['file_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key('pipe_details_material_id_fkey', 'pipe_details', 'materials', ['material_id'], ['id'], ondelete='CASCADE')
op.drop_index(op.f('ix_pipe_details_id'), table_name='pipe_details')
op.alter_column('pipe_details', 'file_id',
existing_type=sa.INTEGER(),
nullable=True)
op.drop_column('pipe_details', 'schedule_confidence')
op.drop_column('pipe_details', 'end_prep_confidence')
op.drop_column('pipe_details', 'manufacturing_confidence')
op.drop_column('pipe_details', 'material_confidence')
op.drop_column('pipe_details', 'nominal_size')
op.drop_column('pipe_details', 'wall_thickness')
op.drop_column('pipe_details', 'material_type')
op.drop_column('pipe_details', 'material_grade')
op.drop_column('pipe_details', 'material_standard')
op.add_column('materials', sa.Column('classification_details', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True))
op.drop_constraint(None, 'materials', type_='foreignkey')
op.create_foreign_key('materials_file_id_fkey', 'materials', 'files', ['file_id'], ['id'], ondelete='CASCADE')
op.drop_index(op.f('ix_materials_id'), table_name='materials')
op.create_index('idx_materials_material_size', 'materials', ['material_grade', 'size_spec'], unique=False)
op.create_index('idx_materials_file', 'materials', ['file_id'], unique=False)
op.create_index('idx_materials_classification_details', 'materials', ['classification_details'], unique=False, postgresql_using='gin')
op.create_index('idx_materials_category', 'materials', ['classified_category', 'classified_subcategory'], unique=False)
op.alter_column('materials', 'full_material_grade',
existing_type=sa.String(length=100),
type_=sa.TEXT(),
existing_nullable=True)
op.alter_column('materials', 'material_hash',
existing_type=sa.String(length=100),
type_=sa.VARCHAR(length=64),
existing_nullable=True)
op.alter_column('materials', 'verified_by',
existing_type=sa.String(length=50),
type_=sa.VARCHAR(length=100),
existing_nullable=True)
op.drop_index(op.f('ix_material_tubing_mapping_id'), table_name='material_tubing_mapping')
op.drop_index(op.f('ix_material_standards_standard_code'), table_name='material_standards')
op.drop_index(op.f('ix_material_standards_id'), table_name='material_standards')
op.create_unique_constraint('material_standards_standard_code_key', 'material_standards', ['standard_code'])
op.drop_index(op.f('ix_material_specifications_id'), table_name='material_specifications')
op.drop_index(op.f('ix_material_patterns_id'), table_name='material_patterns')
op.drop_index(op.f('ix_material_grades_id'), table_name='material_grades')
op.drop_index(op.f('ix_material_categories_id'), table_name='material_categories')
op.add_column('files', sa.Column('parsed_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True))
op.add_column('files', sa.Column('job_no', sa.VARCHAR(length=50), autoincrement=False, nullable=True))
op.add_column('files', sa.Column('confirmed_by', sa.VARCHAR(length=100), autoincrement=False, nullable=True, comment='구매 수량 확정자'))
op.add_column('files', sa.Column('confirmed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True, comment='구매 수량 확정 시간'))
op.add_column('files', sa.Column('bom_name', sa.VARCHAR(length=255), autoincrement=False, nullable=True))
op.add_column('files', sa.Column('purchase_confirmed', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True, comment='구매 수량 확정 여부'))
op.add_column('files', sa.Column('classification_completed', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True))
op.add_column('files', sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True))
op.drop_constraint(None, 'files', type_='foreignkey')
op.create_foreign_key('files_project_id_fkey', 'files', 'projects', ['project_id'], ['id'], ondelete='CASCADE')
op.drop_index(op.f('ix_files_id'), table_name='files')
op.create_index('idx_files_uploaded_by', 'files', ['uploaded_by'], unique=False)
op.create_index('idx_files_purchase_confirmed', 'files', ['purchase_confirmed'], unique=False)
op.create_index('idx_files_project', 'files', ['project_id'], unique=False)
op.create_index('idx_files_active', 'files', ['is_active'], unique=False)
op.create_table('jobs',
sa.Column('job_no', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('job_name', sa.VARCHAR(length=200), autoincrement=False, nullable=False),
sa.Column('client_name', sa.VARCHAR(length=100), autoincrement=False, nullable=False),
sa.Column('end_user', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('epc_company', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('project_site', sa.VARCHAR(length=200), autoincrement=False, nullable=True),
sa.Column('contract_date', sa.DATE(), autoincrement=False, nullable=True),
sa.Column('delivery_date', sa.DATE(), autoincrement=False, nullable=True),
sa.Column('delivery_terms', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('status', sa.VARCHAR(length=20), server_default=sa.text("'진행중'::character varying"), autoincrement=False, nullable=True),
sa.Column('delivery_completed_date', sa.DATE(), autoincrement=False, nullable=True),
sa.Column('project_closed_date', sa.DATE(), autoincrement=False, nullable=True),
sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('created_by', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('is_active', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True),
sa.Column('updated_by', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('assigned_to', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('project_type', sa.VARCHAR(length=50), server_default=sa.text("'냉동기'::character varying"), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('job_no', name='jobs_pkey')
)
op.create_table('users',
sa.Column('user_id', sa.INTEGER(), server_default=sa.text("nextval('users_user_id_seq'::regclass)"), autoincrement=True, nullable=False),
sa.Column('username', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('password', sa.VARCHAR(length=255), autoincrement=False, nullable=False),
sa.Column('name', sa.VARCHAR(length=100), autoincrement=False, nullable=False),
sa.Column('email', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('role', sa.VARCHAR(length=20), server_default=sa.text("'user'::character varying"), autoincrement=False, nullable=True),
sa.Column('access_level', sa.VARCHAR(length=20), server_default=sa.text("'worker'::character varying"), autoincrement=False, nullable=True),
sa.Column('is_active', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True),
sa.Column('failed_login_attempts', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('locked_until', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('department', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('position', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('phone', sa.VARCHAR(length=20), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('last_login_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('status', sa.VARCHAR(length=20), server_default=sa.text("'active'::character varying"), autoincrement=False, nullable=True),
sa.CheckConstraint("access_level::text = ANY (ARRAY['admin'::character varying, 'system'::character varying, 'group_leader'::character varying, 'support_team'::character varying, 'worker'::character varying]::text[])", name='users_access_level_check'),
sa.CheckConstraint("role::text = ANY (ARRAY['admin'::character varying, 'system'::character varying, 'leader'::character varying, 'support'::character varying, 'user'::character varying]::text[])", name='users_role_check'),
sa.PrimaryKeyConstraint('user_id', name='users_pkey'),
sa.UniqueConstraint('username', name='users_username_key'),
postgresql_ignore_search_path=False
)
op.create_table('inventory_transfers',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('revision_change_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('material_description', sa.TEXT(), autoincrement=False, nullable=False),
sa.Column('category', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('quantity', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=False),
sa.Column('unit', sa.VARCHAR(length=10), autoincrement=False, nullable=False),
sa.Column('inventory_location', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('storage_notes', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('transferred_by', sa.VARCHAR(length=100), autoincrement=False, nullable=False),
sa.Column('transferred_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('status', sa.VARCHAR(length=20), server_default=sa.text("'transferred'::character varying"), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['revision_change_id'], ['revision_material_changes.id'], name='inventory_transfers_revision_change_id_fkey'),
sa.PrimaryKeyConstraint('id', name='inventory_transfers_pkey')
)
op.create_index('idx_inventory_transfers_material', 'inventory_transfers', ['material_description'], unique=False)
op.create_index('idx_inventory_transfers_date', 'inventory_transfers', ['transferred_at'], unique=False)
op.create_table('revision_action_logs',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('session_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('revision_change_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('action_type', sa.VARCHAR(length=30), autoincrement=False, nullable=False),
sa.Column('action_description', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('executed_by', sa.VARCHAR(length=100), autoincrement=False, nullable=False),
sa.Column('executed_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('result', sa.VARCHAR(length=20), autoincrement=False, nullable=False),
sa.Column('result_message', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('result_data', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['revision_change_id'], ['revision_material_changes.id'], name='revision_action_logs_revision_change_id_fkey'),
sa.ForeignKeyConstraint(['session_id'], ['revision_sessions.id'], name='revision_action_logs_session_id_fkey'),
sa.PrimaryKeyConstraint('id', name='revision_action_logs_pkey')
)
op.create_index('idx_revision_logs_type', 'revision_action_logs', ['action_type'], unique=False)
op.create_index('idx_revision_logs_session', 'revision_action_logs', ['session_id'], unique=False)
op.create_index('idx_revision_logs_date', 'revision_action_logs', ['executed_at'], unique=False)
op.create_table('revision_material_changes',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('session_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('material_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('previous_material_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('material_description', sa.TEXT(), autoincrement=False, nullable=False),
sa.Column('category', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('change_type', sa.VARCHAR(length=20), autoincrement=False, nullable=False),
sa.Column('previous_quantity', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=True),
sa.Column('current_quantity', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=True),
sa.Column('quantity_difference', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=True),
sa.Column('purchase_status', sa.VARCHAR(length=20), autoincrement=False, nullable=False),
sa.Column('purchase_confirmed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('revision_action', sa.VARCHAR(length=30), autoincrement=False, nullable=True),
sa.Column('action_status', sa.VARCHAR(length=20), server_default=sa.text("'pending'::character varying"), autoincrement=False, nullable=True),
sa.Column('processed_by', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('processed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('processing_notes', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['material_id'], ['materials.id'], name='revision_material_changes_material_id_fkey'),
sa.ForeignKeyConstraint(['session_id'], ['revision_sessions.id'], name='revision_material_changes_session_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='revision_material_changes_pkey')
)
op.create_index('idx_revision_changes_status', 'revision_material_changes', ['action_status'], unique=False)
op.create_index('idx_revision_changes_session', 'revision_material_changes', ['session_id'], unique=False)
op.create_index('idx_revision_changes_action', 'revision_material_changes', ['revision_action'], unique=False)
op.create_table('gasket_details',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('material_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('gasket_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('gasket_subtype', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('material_type', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('filler_material', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('size_inches', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('pressure_rating', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('thickness', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('temperature_range', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('fire_safe', sa.BOOLEAN(), autoincrement=False, nullable=True),
sa.Column('classification_confidence', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('additional_info', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name='gasket_details_file_id_fkey', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['material_id'], ['materials.id'], name='gasket_details_material_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='gasket_details_pkey')
)
op.create_table('purchase_requests',
sa.Column('request_id', sa.INTEGER(), server_default=sa.text("nextval('purchase_requests_request_id_seq'::regclass)"), autoincrement=True, nullable=False),
sa.Column('request_no', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('job_no', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('category', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('material_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('excel_file_path', sa.VARCHAR(length=500), autoincrement=False, nullable=True),
sa.Column('project_name', sa.VARCHAR(length=200), autoincrement=False, nullable=True),
sa.Column('requested_by', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('requested_by_username', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('request_date', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('status', sa.VARCHAR(length=20), server_default=sa.text("'pending'::character varying"), autoincrement=False, nullable=True),
sa.Column('total_items', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('notes', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('approved_by', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('approved_by_username', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('approved_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['approved_by'], ['users.user_id'], name='purchase_requests_approved_by_fkey'),
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name='purchase_requests_file_id_fkey'),
sa.ForeignKeyConstraint(['requested_by'], ['users.user_id'], name='purchase_requests_requested_by_fkey'),
sa.PrimaryKeyConstraint('request_id', name='purchase_requests_pkey'),
sa.UniqueConstraint('request_no', name='purchase_requests_request_no_key'),
postgresql_ignore_search_path=False
)
op.create_index('idx_purchase_requests_status', 'purchase_requests', ['status'], unique=False)
op.create_index('idx_purchase_requests_requested_by', 'purchase_requests', ['requested_by'], unique=False)
op.create_index('idx_purchase_requests_job_no', 'purchase_requests', ['job_no'], unique=False)
op.create_table('permissions',
sa.Column('permission_id', sa.INTEGER(), server_default=sa.text("nextval('permissions_permission_id_seq'::regclass)"), autoincrement=True, nullable=False),
sa.Column('permission_name', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('module', sa.VARCHAR(length=30), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('permission_id', name='permissions_pkey'),
sa.UniqueConstraint('permission_name', name='permissions_permission_name_key'),
postgresql_ignore_search_path=False
)
op.create_table('bolt_details',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('material_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('bolt_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('thread_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('diameter', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('length', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('material_standard', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('material_grade', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('coating_type', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('pressure_rating', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('includes_nut', sa.BOOLEAN(), autoincrement=False, nullable=True),
sa.Column('includes_washer', sa.BOOLEAN(), autoincrement=False, nullable=True),
sa.Column('nut_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('washer_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('classification_confidence', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('additional_info', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name='bolt_details_file_id_fkey', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['material_id'], ['materials.id'], name='bolt_details_material_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='bolt_details_pkey')
)
op.create_table('material_purchase_mapping',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('material_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('purchase_item_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('quantity_ratio', sa.NUMERIC(precision=5, scale=2), server_default=sa.text('1.0'), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['material_id'], ['materials.id'], name='material_purchase_mapping_material_id_fkey', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['purchase_item_id'], ['purchase_items.id'], name='material_purchase_mapping_purchase_item_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='material_purchase_mapping_pkey'),
sa.UniqueConstraint('material_id', 'purchase_item_id', name='material_purchase_mapping_material_id_purchase_item_id_key')
)
op.create_table('confirmed_purchase_items',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('confirmation_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('item_code', sa.VARCHAR(length=100), autoincrement=False, nullable=False),
sa.Column('category', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('specification', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('size', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('material', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('bom_quantity', sa.NUMERIC(precision=15, scale=3), server_default=sa.text('0'), autoincrement=False, nullable=False),
sa.Column('calculated_qty', sa.NUMERIC(precision=15, scale=3), server_default=sa.text('0'), autoincrement=False, nullable=False),
sa.Column('unit', sa.VARCHAR(length=20), server_default=sa.text("'EA'::character varying"), autoincrement=False, nullable=False),
sa.Column('safety_factor', sa.NUMERIC(precision=5, scale=3), server_default=sa.text('1.0'), autoincrement=False, nullable=False),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['confirmation_id'], ['purchase_confirmations.id'], name='confirmed_purchase_items_confirmation_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='confirmed_purchase_items_pkey'),
comment='확정된 구매 품목 상세 테이블'
)
op.create_index('idx_confirmed_purchase_items_confirmation', 'confirmed_purchase_items', ['confirmation_id'], unique=False)
op.create_index('idx_confirmed_purchase_items_category', 'confirmed_purchase_items', ['category'], unique=False)
op.create_table('material_comparison_details',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('comparison_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('material_hash', sa.VARCHAR(length=32), autoincrement=False, nullable=False),
sa.Column('change_type', sa.VARCHAR(length=20), autoincrement=False, nullable=False),
sa.Column('description', sa.TEXT(), autoincrement=False, nullable=False),
sa.Column('size_spec', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('material_grade', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('previous_quantity', sa.NUMERIC(precision=10, scale=3), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('current_quantity', sa.NUMERIC(precision=10, scale=3), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('quantity_diff', sa.NUMERIC(precision=10, scale=3), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('additional_purchase_needed', sa.NUMERIC(precision=10, scale=3), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('classified_category', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('classification_confidence', sa.NUMERIC(precision=3, scale=2), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['comparison_id'], ['material_revisions_comparison.id'], name='material_comparison_details_comparison_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='material_comparison_details_pkey')
)
op.create_table('purchase_request_items',
sa.Column('item_id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('request_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('material_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('description', sa.TEXT(), autoincrement=False, nullable=False),
sa.Column('category', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('subcategory', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('material_grade', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('size_spec', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('quantity', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=False),
sa.Column('unit', sa.VARCHAR(length=10), autoincrement=False, nullable=False),
sa.Column('drawing_name', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('notes', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('user_requirement', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('is_ordered', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True),
sa.Column('is_received', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['material_id'], ['materials.id'], name='purchase_request_items_material_id_fkey', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['request_id'], ['purchase_requests.request_id'], name='purchase_request_items_request_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('item_id', name='purchase_request_items_pkey')
)
op.create_index('idx_purchase_request_items_request_id', 'purchase_request_items', ['request_id'], unique=False)
op.create_index('idx_purchase_request_items_material_id', 'purchase_request_items', ['material_id'], unique=False)
op.create_index('idx_purchase_request_items_category', 'purchase_request_items', ['category'], unique=False)
op.create_table('material_revisions_comparison',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('job_no', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('current_revision', sa.VARCHAR(length=20), autoincrement=False, nullable=False),
sa.Column('previous_revision', sa.VARCHAR(length=20), autoincrement=False, nullable=False),
sa.Column('current_file_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('previous_file_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('total_current_items', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('total_previous_items', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('new_items_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('modified_items_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('removed_items_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('unchanged_items_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('comparison_details', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('created_by', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['current_file_id'], ['files.id'], name='material_revisions_comparison_current_file_id_fkey'),
sa.ForeignKeyConstraint(['previous_file_id'], ['files.id'], name='material_revisions_comparison_previous_file_id_fkey'),
sa.PrimaryKeyConstraint('id', name='material_revisions_comparison_pkey'),
sa.UniqueConstraint('job_no', 'current_revision', 'previous_revision', name='material_revisions_comparison_job_no_current_revision_previ_key')
)
op.create_table('login_logs',
sa.Column('log_id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('login_time', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('ip_address', sa.VARCHAR(length=45), autoincrement=False, nullable=True),
sa.Column('user_agent', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('login_status', sa.VARCHAR(length=20), autoincrement=False, nullable=True),
sa.Column('failure_reason', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('session_duration', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.CheckConstraint("login_status::text = ANY (ARRAY['success'::character varying, 'failed'::character varying]::text[])", name='login_logs_login_status_check'),
sa.ForeignKeyConstraint(['user_id'], ['users.user_id'], name='login_logs_user_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('log_id', name='login_logs_pkey')
)
op.create_table('user_sessions',
sa.Column('session_id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('refresh_token', sa.VARCHAR(length=500), autoincrement=False, nullable=False),
sa.Column('expires_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=False),
sa.Column('ip_address', sa.VARCHAR(length=45), autoincrement=False, nullable=True),
sa.Column('user_agent', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('is_active', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.user_id'], name='user_sessions_user_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('session_id', name='user_sessions_pkey')
)
op.create_table('pipe_end_preparations',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('material_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('file_id', sa.INTEGER(), autoincrement=False, nullable=False),
sa.Column('end_preparation_type', sa.VARCHAR(length=50), server_default=sa.text("'PBE'::character varying"), autoincrement=False, nullable=True),
sa.Column('end_preparation_code', sa.VARCHAR(length=20), autoincrement=False, nullable=True),
sa.Column('machining_required', sa.BOOLEAN(), server_default=sa.text('false'), autoincrement=False, nullable=True),
sa.Column('cutting_note', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('original_description', sa.TEXT(), autoincrement=False, nullable=False),
sa.Column('clean_description', sa.TEXT(), autoincrement=False, nullable=False),
sa.Column('confidence', sa.DOUBLE_PRECISION(precision=53), server_default=sa.text('0.0'), autoincrement=False, nullable=True),
sa.Column('matched_pattern', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name='pipe_end_preparations_file_id_fkey', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['material_id'], ['materials.id'], name='pipe_end_preparations_material_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='pipe_end_preparations_pkey')
)
op.create_table('special_material_details',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('material_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('special_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('special_subtype', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('material_standard', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('material_grade', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('specifications', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('dimensions', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('weight_kg', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=True),
sa.Column('classification_confidence', sa.NUMERIC(precision=3, scale=2), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name='special_material_details_file_id_fkey', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['material_id'], ['materials.id'], name='special_material_details_material_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='special_material_details_pkey')
)
op.create_index('idx_special_material_details_material_id', 'special_material_details', ['material_id'], unique=False)
op.create_index('idx_special_material_details_file_id', 'special_material_details', ['file_id'], unique=False)
op.create_table('flange_details',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('material_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('flange_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('facing_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('pressure_rating', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('material_standard', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('material_grade', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('size_inches', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('bolt_hole_count', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('bolt_hole_size', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('classification_confidence', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('additional_info', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name='flange_details_file_id_fkey', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['material_id'], ['materials.id'], name='flange_details_material_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='flange_details_pkey')
)
op.create_table('fitting_details',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('material_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('fitting_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('fitting_subtype', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('connection_method', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('connection_code', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('pressure_rating', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('max_pressure', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('manufacturing_method', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('material_standard', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('material_grade', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('material_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('main_size', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('reduced_size', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('length_mm', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=True),
sa.Column('schedule', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('classification_confidence', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('additional_info', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name='fitting_details_file_id_fkey', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['material_id'], ['materials.id'], name='fitting_details_material_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='fitting_details_pkey')
)
op.create_table('instrument_details',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('material_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('instrument_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('instrument_subtype', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('measurement_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('measurement_range', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('accuracy', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('connection_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('connection_size', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('body_material', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('wetted_parts_material', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('electrical_rating', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('output_signal', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('classification_confidence', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('additional_info', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name='instrument_details_file_id_fkey', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['material_id'], ['materials.id'], name='instrument_details_material_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='instrument_details_pkey')
)
op.create_table('revision_sessions',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('job_no', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('current_file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('previous_file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('current_revision', sa.VARCHAR(length=20), autoincrement=False, nullable=False),
sa.Column('previous_revision', sa.VARCHAR(length=20), autoincrement=False, nullable=False),
sa.Column('status', sa.VARCHAR(length=20), server_default=sa.text("'processing'::character varying"), autoincrement=False, nullable=True),
sa.Column('total_materials', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('processed_materials', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('added_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('removed_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('changed_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('unchanged_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('purchase_cancel_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('inventory_transfer_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('additional_purchase_count', sa.INTEGER(), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('created_by', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('completed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['current_file_id'], ['files.id'], name='revision_sessions_current_file_id_fkey'),
sa.ForeignKeyConstraint(['previous_file_id'], ['files.id'], name='revision_sessions_previous_file_id_fkey'),
sa.PrimaryKeyConstraint('id', name='revision_sessions_pkey')
)
op.create_index('idx_revision_sessions_status', 'revision_sessions', ['status'], unique=False)
op.create_index('idx_revision_sessions_job_no', 'revision_sessions', ['job_no'], unique=False)
op.create_table('purchase_items',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('item_code', sa.VARCHAR(length=100), autoincrement=False, nullable=False),
sa.Column('category', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('specification', sa.TEXT(), autoincrement=False, nullable=False),
sa.Column('material_spec', sa.VARCHAR(length=200), autoincrement=False, nullable=True),
sa.Column('size_spec', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('unit', sa.VARCHAR(length=10), autoincrement=False, nullable=False),
sa.Column('bom_quantity', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=False),
sa.Column('safety_factor', sa.NUMERIC(precision=3, scale=2), server_default=sa.text('1.10'), autoincrement=False, nullable=True),
sa.Column('minimum_order_qty', sa.NUMERIC(precision=10, scale=3), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('order_unit_qty', sa.NUMERIC(precision=10, scale=3), server_default=sa.text('1'), autoincrement=False, nullable=True),
sa.Column('calculated_qty', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=True),
sa.Column('cutting_loss', sa.NUMERIC(precision=10, scale=3), server_default=sa.text('0'), autoincrement=False, nullable=True),
sa.Column('standard_length', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=True),
sa.Column('pipes_count', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('waste_length', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=True),
sa.Column('detailed_spec', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('preferred_supplier', sa.VARCHAR(length=200), autoincrement=False, nullable=True),
sa.Column('last_unit_price', sa.NUMERIC(precision=10, scale=2), autoincrement=False, nullable=True),
sa.Column('currency', sa.VARCHAR(length=10), server_default=sa.text("'KRW'::character varying"), autoincrement=False, nullable=True),
sa.Column('lead_time_days', sa.INTEGER(), server_default=sa.text('30'), autoincrement=False, nullable=True),
sa.Column('job_no', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('revision', sa.VARCHAR(length=20), server_default=sa.text("'Rev.0'::character varying"), autoincrement=False, nullable=True),
sa.Column('file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('is_active', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('created_by', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('updated_by', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('approved_by', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('approved_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name='purchase_items_file_id_fkey', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name='purchase_items_pkey'),
sa.UniqueConstraint('item_code', name='purchase_items_item_code_key')
)
op.create_table('valve_details',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('material_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('valve_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('valve_subtype', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('actuator_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('connection_method', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('pressure_rating', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('pressure_class', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('body_material', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('trim_material', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('size_inches', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('fire_safe', sa.BOOLEAN(), autoincrement=False, nullable=True),
sa.Column('low_temp_service', sa.BOOLEAN(), autoincrement=False, nullable=True),
sa.Column('special_features', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('classification_confidence', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('additional_info', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name='valve_details_file_id_fkey', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['material_id'], ['materials.id'], name='valve_details_material_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='valve_details_pkey')
)
op.create_table('purchase_confirmations',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('job_no', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('bom_name', sa.VARCHAR(length=255), autoincrement=False, nullable=False),
sa.Column('revision', sa.VARCHAR(length=50), server_default=sa.text("'Rev.0'::character varying"), autoincrement=False, nullable=False),
sa.Column('confirmed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=False),
sa.Column('confirmed_by', sa.VARCHAR(length=100), autoincrement=False, nullable=False),
sa.Column('is_active', sa.BOOLEAN(), server_default=sa.text('true'), autoincrement=False, nullable=False),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name='purchase_confirmations_file_id_fkey'),
sa.PrimaryKeyConstraint('id', name='purchase_confirmations_pkey'),
comment='구매 수량 확정 마스터 테이블'
)
op.create_index('idx_purchase_confirmations_job_revision', 'purchase_confirmations', ['job_no', 'revision', 'is_active'], unique=False)
op.create_table('material_purchase_tracking',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('material_hash', sa.VARCHAR(length=64), autoincrement=False, nullable=False),
sa.Column('original_description', sa.TEXT(), autoincrement=False, nullable=False),
sa.Column('size_spec', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('material_grade', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('bom_quantity', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=False),
sa.Column('confirmed_quantity', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=True),
sa.Column('purchase_quantity', sa.NUMERIC(precision=10, scale=3), autoincrement=False, nullable=True),
sa.Column('status', sa.VARCHAR(length=20), server_default=sa.text("'pending'::character varying"), autoincrement=False, nullable=True),
sa.Column('confirmed_by', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('confirmed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('ordered_by', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('ordered_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('approved_by', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('approved_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('job_no', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('revision', sa.VARCHAR(length=20), autoincrement=False, nullable=True),
sa.Column('file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('description', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('purchase_status', sa.VARCHAR(length=20), server_default=sa.text("'pending'::character varying"), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name='material_purchase_tracking_file_id_fkey', ondelete='SET NULL'),
sa.PrimaryKeyConstraint('id', name='material_purchase_tracking_pkey')
)
op.create_table('support_details',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('material_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('file_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('support_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('support_subtype', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('load_rating', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('load_capacity', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('material_standard', sa.VARCHAR(length=100), autoincrement=False, nullable=True),
sa.Column('material_grade', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('pipe_size', sa.VARCHAR(length=20), autoincrement=False, nullable=True),
sa.Column('length_mm', sa.NUMERIC(precision=10, scale=2), autoincrement=False, nullable=True),
sa.Column('width_mm', sa.NUMERIC(precision=10, scale=2), autoincrement=False, nullable=True),
sa.Column('height_mm', sa.NUMERIC(precision=10, scale=2), autoincrement=False, nullable=True),
sa.Column('classification_confidence', sa.NUMERIC(precision=3, scale=2), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['file_id'], ['files.id'], name='support_details_file_id_fkey', ondelete='CASCADE'),
sa.ForeignKeyConstraint(['material_id'], ['materials.id'], name='support_details_material_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='support_details_pkey')
)
op.create_index('idx_support_details_material_id', 'support_details', ['material_id'], unique=False)
op.create_index('idx_support_details_file_id', 'support_details', ['file_id'], unique=False)
op.create_table('user_activity_logs',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('username', sa.VARCHAR(length=100), autoincrement=False, nullable=False),
sa.Column('activity_type', sa.VARCHAR(length=50), autoincrement=False, nullable=False),
sa.Column('activity_description', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('target_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('target_type', sa.VARCHAR(length=50), autoincrement=False, nullable=True),
sa.Column('ip_address', sa.VARCHAR(length=45), autoincrement=False, nullable=True),
sa.Column('user_agent', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('metadata', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='user_activity_logs_pkey')
)
op.create_table('role_permissions',
sa.Column('role_permission_id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('role', sa.VARCHAR(length=20), autoincrement=False, nullable=False),
sa.Column('permission_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['permission_id'], ['permissions.permission_id'], name='role_permissions_permission_id_fkey', ondelete='CASCADE'),
sa.PrimaryKeyConstraint('role_permission_id', name='role_permissions_pkey'),
sa.UniqueConstraint('role', 'permission_id', name='role_permissions_role_permission_id_key')
)
# ### end Alembic commands ###

View File

@@ -147,6 +147,7 @@ class Settings(BaseSettings):
env_file = ".env"
env_file_encoding = "utf-8"
case_sensitive = False
extra = "ignore"
def __init__(self, **kwargs):
super().__init__(**kwargs)

View File

@@ -253,17 +253,28 @@ def classify_fitting(dat_file: str, description: str, main_nom: str,
is_instrument = any(kw in desc_upper for kw in instrument_keywords)
if is_instrument:
fitting_type["category"] = "INSTRUMENT_FITTING"
if "SWAGELOK" in desc_upper: fitting_type["brand"] = "SWAGELOK"
fitting_type_result["category"] = "INSTRUMENT_FITTING"
if "SWAGELOK" in desc_upper: fitting_type_result["brand"] = "SWAGELOK"
# Tube OD 추출 (예: 1/4", 6MM, 12MM)
tube_match = re.search(r'(\d+(?:/\d+)?)\s*(?:\"|INCH|MM)\s*(?:OD|TUBE)', desc_upper)
if tube_match:
fitting_type["tube_od"] = tube_match.group(0)
fitting_type_result["tube_od"] = tube_match.group(0)
return {
"category": "FITTING",
"fitting_type": fitting_type,
"fitting_type": fitting_type_result,
"connection_method": connection_result,
"pressure_rating": pressure_result,
"schedule": schedule_result,
"manufacturing": manufacturing_result,
"overall_confidence": calculate_fitting_confidence({
"material": material_result.get("confidence", 0),
"fitting_type": fitting_type_result.get("confidence", 0),
"connection": connection_result.get("confidence", 0),
"pressure": pressure_result.get("confidence", 0)
})
}
def analyze_size_pattern_for_fitting_type(description: str, main_nom: str, red_nom: str = None) -> Dict:

View File

@@ -93,16 +93,11 @@ class RevisionComparator:
def compare_materials(self, previous_confirmed: Dict, new_materials: List[Dict]) -> Dict:
"""
기존 확정 자재와 신규 자재 비교
Args:
previous_confirmed: 이전 확정 자재 정보
new_materials: 신규 업로드된 자재 목록
Returns:
비교 결과 딕셔너리
"""
try:
# 이전 확정 자재를 해시맵으로 변환 (빠른 검색을 위해)
from rapidfuzz import fuzz
# 이전 확정 자재 해시맵 생성
confirmed_materials = {}
for item in previous_confirmed["items"]:
material_hash = self._generate_material_hash(
@@ -112,13 +107,19 @@ class RevisionComparator:
)
confirmed_materials[material_hash] = item
# 해시 역참조 맵 (유사도 비교용)
# 해시 -> 정규화된 설명 문자열 (비교 대상)
# 여기서는 specification 자체를 비교 대상으로 사용 (가장 정보량이 많음)
confirmed_specs = {
h: item["specification"] for h, item in confirmed_materials.items()
}
# 신규 자재 분석
unchanged_materials = [] # 변경 없음 (분류 불필요)
changed_materials = [] # 변경됨 (재분류 필요)
new_materials_list = [] # 신규 추가 (분류 필요)
unchanged_materials = []
changed_materials = []
new_materials_list = []
for new_material in new_materials:
# 자재 해시 생성 (description 기반)
description = new_material.get("description", "")
size = self._extract_size_from_description(description)
material = self._extract_material_from_description(description)
@@ -126,13 +127,13 @@ class RevisionComparator:
material_hash = self._generate_material_hash(description, size, material)
if material_hash in confirmed_materials:
# 정확히 일치하는 자재 발견 (해시 일치)
confirmed_item = confirmed_materials[material_hash]
# 수량 비교
new_qty = float(new_material.get("quantity", 0))
confirmed_qty = float(confirmed_item["bom_quantity"])
if abs(new_qty - confirmed_qty) > 0.001: # 수량 변경
if abs(new_qty - confirmed_qty) > 0.001:
changed_materials.append({
**new_material,
"change_type": "QUANTITY_CHANGED",
@@ -140,27 +141,49 @@ class RevisionComparator:
"previous_item": confirmed_item
})
else:
# 수량 동일 - 기존 분류 결과 재사용
unchanged_materials.append({
**new_material,
"reuse_classification": True,
"previous_item": confirmed_item
})
else:
# 신규 자재
new_materials_list.append({
**new_material,
"change_type": "NEW_MATERIAL"
})
# 해시 불일치 - 유사도 검사 (Fuzzy Matching)
# 신규 자재 설명과 기존 확정 자재들의 스펙 비교
best_match_hash = None
best_match_score = 0
# 성능을 위해 간단한 필터링 후 정밀 비교 권장되나,
# 현재는 전체 비교 (데이터량이 많지 않다고 가정)
for h, spec in confirmed_specs.items():
score = fuzz.ratio(description.lower(), spec.lower())
if score > 85: # 85점 이상이면 매우 유사
if score > best_match_score:
best_match_score = score
best_match_hash = h
if best_match_hash:
# 유사한 자재 발견 (오타 또는 미세 변경 가능성)
similar_item = confirmed_materials[best_match_hash]
new_materials_list.append({
**new_material,
"change_type": "NEW_BUT_SIMILAR",
"similarity_score": best_match_score,
"similar_to": similar_item
})
else:
# 완전히 새로운 자재
new_materials_list.append({
**new_material,
"change_type": "NEW_MATERIAL"
})
# 삭제된 자재 찾기 (이전에는 있었지만 현재는 없는 것)
# 삭제된 자재 찾기
new_material_hashes = set()
for material in new_materials:
description = material.get("description", "")
size = self._extract_size_from_description(description)
material_grade = self._extract_material_from_description(description)
hash_key = self._generate_material_hash(description, size, material_grade)
new_material_hashes.add(hash_key)
d = material.get("description", "")
s = self._extract_size_from_description(d)
m = self._extract_material_from_description(d)
new_material_hashes.add(self._generate_material_hash(d, s, m))
removed_materials = []
for hash_key, confirmed_item in confirmed_materials.items():
@@ -186,7 +209,7 @@ class RevisionComparator:
"removed_materials": removed_materials
}
logger.info(f"리비전 비교 완료: 변경없음 {len(unchanged_materials)}, "
logger.info(f"리비전 비교 완료 (Fuzzy 적용): 변경없음 {len(unchanged_materials)}, "
f"변경됨 {len(changed_materials)}, 신규 {len(new_materials_list)}, "
f"삭제됨 {len(removed_materials)}")
@@ -195,7 +218,7 @@ class RevisionComparator:
except Exception as e:
logger.error(f"자재 비교 실패: {str(e)}")
raise
def _extract_revision_number(self, revision: str) -> int:
"""리비전 문자열에서 숫자 추출 (Rev.1 → 1)"""
try:
@@ -206,37 +229,136 @@ class RevisionComparator:
return 0
def _generate_material_hash(self, description: str, size: str, material: str) -> str:
"""자재 고유성 판단을 위한 해시 생성"""
# RULES.md의 코딩 컨벤션 준수
hash_input = f"{description}|{size}|{material}".lower().strip()
"""
자재 고유성 판단을 위한 해시 생성
Args:
description: 자재 설명
size: 자재 규격/크기
material: 자재 재질
Returns:
MD5 해시 문자열
"""
import re
def normalize(s: Optional[str]) -> str:
if s is None:
return ""
# 다중 공백을 단일 공백으로 치환하고 앞뒤 공백 제거
s = re.sub(r'\s+', ' ', str(s))
return s.strip().lower()
# 각 컴포넌트 정규화
d_norm = normalize(description)
s_norm = normalize(size)
m_norm = normalize(material)
# RULES.md의 코딩 컨벤션 준수 (pipe separator 사용)
# 값이 없는 경우에도 구분자를 포함하여 구조 유지 (예: "desc||mat")
hash_input = f"{d_norm}|{s_norm}|{m_norm}"
return hashlib.md5(hash_input.encode()).hexdigest()
def _extract_size_from_description(self, description: str) -> str:
"""자재 설명에서 사이즈 정보 추출"""
# 간단한 사이즈 패턴 추출 (실제로는 더 정교한 로직 필요)
"""
자재 설명에서 사이즈 정보 추출
지원하는 패턴 (단어 경계 \b 추가하여 정확도 향상):
- 1/2" (인치)
- 100A (A단위)
- 50mm (밀리미터)
- 10x20 (가로x세로)
- DN100 (DN단위)
"""
if not description:
return ""
import re
size_patterns = [
r'(\d+(?:\.\d+)?)\s*(?:mm|MM|인치|inch|")',
r'(\d+(?:\.\d+)?)\s*x\s*(\d+(?:\.\d+)?)',
r'DN\s*(\d+)',
r'(\d+)\s*A'
# 인치 패턴 (분수 포함): 1/2", 1.5", 1-1/2"
r'\b(\d+(?:[-/.]\d+)?)\s*(?:inch|인치|")',
# 밀리미터 패턴: 100mm, 100.5 MM
r'\b(\d+(?:\.\d+)?)\s*(?:mm|MM)\b',
# A단위 패턴: 100A, 100 A
r'\b(\d+)\s*A\b',
# DN단위 패턴: DN100, DN 100
r'DN\s*(\d+)\b',
# 치수 패턴: 10x20, 10*20
r'\b(\d+(?:\.\d+)?)\s*[xX*]\s*(\d+(?:\.\d+)?)\b'
]
for pattern in size_patterns:
match = re.search(pattern, description, re.IGNORECASE)
if match:
return match.group(0)
return match.group(0).strip()
return ""
def _load_materials_from_db(self) -> List[str]:
"""DB에서 자재 목록 동적 로딩 (캐싱 적용 고려 가능)"""
try:
# MaterialSpecification 및 SpecialMaterial 테이블에서 자재 코드 조회
query = text("""
SELECT spec_code FROM material_specifications
WHERE is_active = TRUE
UNION
SELECT grade_code FROM material_grades
WHERE is_active = TRUE
UNION
SELECT material_name FROM special_materials
WHERE is_active = TRUE
""")
result = self.db.execute(query).fetchall()
db_materials = [row[0] for row in result]
# 기본 하드코딩 리스트 (DB 조회 실패 시 또는 보완용)
default_materials = [
"SUS316L", "SUS316", "SUS304L", "SUS304",
"SS316L", "SS316", "SS304L", "SS304",
"A105N", "A105",
"A234 WPB", "A234",
"A106 Gr.B", "A106",
"WCB", "CF8M", "CF8",
"CS", "STS", "PVC", "PP", "PE"
]
# 합치고 중복 제거 후 길이 역순 정렬 (긴 단어 우선 매칭)
combined = list(set(db_materials + default_materials))
combined.sort(key=len, reverse=True)
return combined
except Exception as e:
logger.warning(f"DB 자재 로딩 실패 (기본값 사용): {str(e)}")
materials = [
"SUS316L", "SUS316", "SUS304L", "SUS304",
"SS316L", "SS316", "SS304L", "SS304",
"A105N", "A105",
"A234 WPB", "A234",
"A106 Gr.B", "A106",
"WCB", "CF8M", "CF8",
"CS", "STS", "PVC", "PP", "PE"
]
return materials
def _extract_material_from_description(self, description: str) -> str:
"""자재 설명에서 재질 정보 추출"""
# 일반적인 재질 패턴
materials = ["SS304", "SS316", "SS316L", "A105", "WCB", "CF8M", "CF8", "CS"]
"""
자재 설명에서 재질 정보 추출
우선순위에 따라 매칭 (구체적인 재질 먼저)
"""
if not description:
return ""
# 자재 목록 로딩 (메모리 캐싱을 위해 클래스 속성으로 저장 고려 가능하지만 여기선 매번 호출로 단순화)
# 성능이 중요하다면 __init__ 시점에 로드하거나 lru_cache 사용 권장
materials = self._load_materials_from_db()
description_upper = description.upper()
for material in materials:
if material in description_upper:
# 단어 매칭을 위해 간단한 검사 수행 (부분 문자열이 다른 단어의 일부가 아닌지)
if material.upper() in description_upper:
return material
return ""

13
backend/entrypoint.sh Executable file
View File

@@ -0,0 +1,13 @@
#!/bin/bash
set -e
# Wait for DB to be ready (optional, but good practice if not handled by docker-compose)
# /wait-for-it.sh db:5432 --
# Run migrations
echo "Running database migrations..."
alembic upgrade head
# Start application
echo "Starting application..."
exec "$@"

View File

@@ -1,41 +1,63 @@
# FastAPI 웹 프레임워크
fastapi==0.104.1
uvicorn[standard]==0.24.0
# 데이터베이스
sqlalchemy==2.0.23
psycopg2-binary==2.9.9
alembic==1.13.1
# 파일 처리
pandas==2.1.4
annotated-types==0.7.0
anyio==3.7.1
async-timeout==5.0.1
bcrypt==4.1.2
black==23.11.0
certifi==2026.1.4
click==8.1.8
coverage==7.10.7
dnspython==2.7.0
email-validator==2.3.0
et_xmlfile==2.0.0
exceptiongroup==1.3.1
fastapi==0.104.1
flake8==6.1.0
h11==0.16.0
httpcore==1.0.9
httptools==0.7.1
httpx==0.25.2
idna==3.11
iniconfig==2.1.0
Mako==1.3.10
MarkupSafe==3.0.3
mccabe==0.7.0
mypy_extensions==1.1.0
numpy==1.26.4
openpyxl==3.1.2
xlrd>=2.0.1
python-multipart==0.0.6
# 데이터 검증
packaging==25.0
pandas==2.1.4
pathspec==1.0.1
platformdirs==4.4.0
pluggy==1.6.0
psycopg2-binary==2.9.9
pycodestyle==2.11.1
pydantic==2.5.2
pydantic-settings==2.1.0
# 기타 유틸리티
python-dotenv==1.0.0
httpx==0.25.2
redis==5.0.1
python-magic==0.4.27
# 인증 시스템
pydantic_core==2.14.5
pyflakes==3.1.0
PyJWT==2.8.0
bcrypt==4.1.2
python-multipart==0.0.6
email-validator==2.3.0
# 개발 도구
pytest==7.4.3
pytest-asyncio==0.21.1
pytest-cov==4.1.0
pytest-mock==3.12.0
black==23.11.0
flake8==6.1.0
python-dateutil==2.9.0.post0
python-dotenv==1.0.0
python-magic==0.4.27
python-multipart==0.0.6
openpyxl==3.1.2
pytz==2025.2
PyYAML==6.0.3
RapidFuzz==3.13.0
redis==5.0.1
six==1.17.0
sniffio==1.3.1
SQLAlchemy==2.0.23
starlette==0.27.0
tomli==2.3.0
typing_extensions==4.15.0
tzdata==2025.3
uvicorn==0.24.0
uvloop==0.22.1
watchfiles==1.1.1
websockets==15.0.1
xlrd==2.0.1

View File

@@ -0,0 +1,199 @@
import pytest
from unittest.mock import MagicMock
from app.services.revision_comparator import RevisionComparator
@pytest.fixture
def mock_db():
return MagicMock()
@pytest.fixture
def comparator(mock_db):
return RevisionComparator(mock_db)
def test_generate_material_hash(comparator):
"""해시 생성 로직 테스트"""
# 1. 기본 케이스
desc = "PIPE 100A SCH40"
size = "100A"
mat = "A105"
hash1 = comparator._generate_material_hash(desc, size, mat)
# 2. 공백/대소문자 차이 (정규화 확인)
hash2 = comparator._generate_material_hash(" pipe 100a sch40 ", "100A", "a105")
assert hash1 == hash2, "공백과 대소문자는 무시되어야 합니다."
# 3. None 값 처리 (Robustness)
hash3 = comparator._generate_material_hash(None, None, None)
assert isinstance(hash3, str)
assert len(hash3) == 32 # MD5 checking
# 4. 빈 문자열 처리
hash4 = comparator._generate_material_hash("", "", "")
assert hash3 == hash4, "None과 빈 문자열은 동일하게 처리되어야 합니다."
def test_extract_size_from_description(comparator):
"""사이즈 추출 로직 테스트"""
# 1. inch patterns
assert comparator._extract_size_from_description('PIPE 1/2" SCH40') == '1/2"'
assert comparator._extract_size_from_description('ELBOW 1.5inch 90D') == '1.5inch'
# 2. mm patterns
assert comparator._extract_size_from_description('Plate 100mm') == '100mm'
assert comparator._extract_size_from_description('Bar 50.5 MM') == '50.5 MM'
# 3. A patterns
assert comparator._extract_size_from_description('PIPE 100A') == '100A'
assert comparator._extract_size_from_description('FLANGE 50 A') == '50 A'
# 4. DN patterns
assert comparator._extract_size_from_description('VALVE DN100') == 'DN100'
# 5. Dimensions
assert comparator._extract_size_from_description('GASKET 10x20') == '10x20'
assert comparator._extract_size_from_description('SHEET 10*20') == '10*20'
# 6. No match
assert comparator._extract_size_from_description('Just Text') == ""
assert comparator._extract_size_from_description(None) == ""
def test_extract_material_from_description(comparator):
"""재질 추출 로직 테스트"""
# 1. Standard materials
assert comparator._extract_material_from_description('PIPE A106 Gr.B') == 'A106 Gr.B' # Should match longer first if implemented correctly
assert comparator._extract_material_from_description('FLANGE A105') == 'A105'
# 2. Stainless Steel
assert comparator._extract_material_from_description('PIPE SUS304L') == 'SUS304L'
assert comparator._extract_material_from_description('PIPE SS316') == 'SS316'
# 3. Case Insensitivity
assert comparator._extract_material_from_description('pipe sus316l') == 'SUS316L'
# 4. Partial matches (should prioritize specific)
# If "A106" is checked before "A106 Gr.B", it might return "A106".
# The implementation list order matters.
# In our impl: "A106 Gr.B" is before "A106", so it should work.
assert comparator._extract_material_from_description('Material A106 Gr.B Spec') == 'A106 Gr.B'
def test_compare_materials_logic_flow(comparator):
"""비교 로직 통합 테스트"""
previous_confirmed = {
"revision": "Rev.0",
"confirmed_at": "2024-01-01",
"items": [
{
"specification": "PIPE 100A A106",
"size": "100A",
"material": "A106", # Extraction will find 'A106' from 'PIPE 100A A106'
"bom_quantity": 10.0
}
]
}
# Case 1: Identical (Normalized)
new_materials_same = [{"description": "pipe 100a", "quantity": 10.0}]
# Note: extraction logic needs to find size/mat from description to match hash.
# "pipe 100a" -> size="100A", mat="" (A106 not in desc)
# The hash will be MD5("pipe 100a|100a|")
# Previous hash was MD5("pipe 100a|100a|a106")
# They WON'T match if extraction fails to find "A106".
# Let's provide description that allows extraction to work fully
new_materials_full = [{"description": "PIPE 100A A106", "quantity": 10.0}]
result = comparator.compare_materials(previous_confirmed, new_materials_full)
assert result["unchanged_count"] == 1
# Case 2: Quantity Changed
new_materials_qty = [{"description": "PIPE 100A A106", "quantity": 20.0}]
result = comparator.compare_materials(previous_confirmed, new_materials_qty)
assert result["changed_count"] == 1
assert result["changed_materials"][0]["change_type"] == "QUANTITY_CHANGED"
# Case 3: New Item
new_materials_new = [{"description": "NEW ITEM SUS304", "quantity": 1.0}]
result = comparator.compare_materials(previous_confirmed, new_materials_new)
assert result["new_count"] == 1
def test_compare_materials_fuzzy_match(comparator):
"""Fuzzy Matching 테스트"""
previous_confirmed = {
"revision": "Rev.0",
"confirmed_at": "2024-01-01",
"items": [
{
"specification": "PIPE 100A SCH40 A106",
"size": "100A",
"material": "A106",
"bom_quantity": 10.0
}
]
}
# 오타가 포함된 유사 자재 (PIPEE -> PIPE, A106 -> A106)
# 정규화/해시는 다르지만 텍스트 유사도는 높음
new_materials_typo = [{
"description": "PIPEE 100A SCH40 A106",
"quantity": 10.0
}]
# RapidFuzz가 설치되어 있어야 동작
try:
import rapidfuzz
result = comparator.compare_materials(previous_confirmed, new_materials_typo)
# 해시는 다르므로 new_count에 포함되거나 유사 자재로 분류됨
# 구현에 따라 "new_materials" 리스트에 "change_type": "NEW_BUT_SIMILAR" 로 들어감
assert result["new_count"] == 1
new_item = result["new_materials"][0]
assert new_item["change_type"] == "NEW_BUT_SIMILAR"
assert new_item["similarity_score"] > 85
except ImportError:
pytest.skip("rapidfuzz not installed")
def test_extract_size_word_boundary(comparator):
"""Regex Word Boundary 테스트"""
# 1. mm boundary check
# "100mm" -> ok, "100mm2" -> fail if boundary used
assert comparator._extract_size_from_description("100mm") == "100mm"
# assert comparator._extract_size_from_description("100mmm") == "" # This depends on implementation strictness
# 2. inch boundary
assert comparator._extract_size_from_description("1/2 inch") == "1/2 inch"
# 3. DN boundary
assert comparator._extract_size_from_description("DN100") == "DN100"
# "DN100A" should ideally not match DN100 if we want strictness, or it might match 100A.
def test_dynamic_material_loading(comparator, mock_db):
"""DB 기반 동적 자재 로딩 테스트"""
# Mocking DB result
# fetchall returns list of rows (tuples)
mock_db.execute.return_value.fetchall.return_value = [
("TITANIUM_GR2",),
("INCONEL625",),
]
# 1. Check if DB loaded materials are correctly extracted
# Test with a material that is ONLY in the mocked DB response, not in default list
description = "PIPE 100A TITANIUM_GR2"
material = comparator._extract_material_from_description(description)
assert material == "TITANIUM_GR2"
# Verify DB execute was called
assert mock_db.execute.called
# 2. Test fallback mechanism (simulate DB connection failure)
# mock_db.execute.side_effect = Exception("DB Connection Error")
# Reset mock to raise exception on next call
mock_db.execute.side_effect = Exception("DB Fail")
# SUS316L is in the default fallback list
description_fallback = "PIPE 100A SUS316L"
material_fallback = comparator._extract_material_from_description(description_fallback)
# Should still work using default list
assert material_fallback == "SUS316L"