Files
tk-factory-services/ai-service/services/classification_service.py
Hyungi Ahn b3012b8320 feat: AI 서비스 및 AI 어시스턴트 전용 페이지 추가
- ai-service: Ollama 기반 AI 서비스 (분류, 시맨틱 검색, RAG Q&A, 패턴 분석)
- AI 어시스턴트 페이지: 채팅형 Q&A, 시맨틱 검색, 패턴 분석, 분류 테스트
- 권한 시스템에 ai_assistant 페이지 등록 (기본 비활성)
- 기존 페이지에 AI 기능 통합 (대시보드, 수신함, 관리함)
- docker-compose, gateway, nginx 설정 업데이트

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-06 09:38:30 +09:00

61 lines
1.8 KiB
Python

import json
from services.ollama_client import ollama_client
from config import settings
CLASSIFY_PROMPT_PATH = "prompts/classify_issue.txt"
SUMMARIZE_PROMPT_PATH = "prompts/summarize_issue.txt"
def _load_prompt(path: str) -> str:
with open(path, "r", encoding="utf-8") as f:
return f.read()
async def classify_issue(description: str, detail_notes: str = "") -> dict:
template = _load_prompt(CLASSIFY_PROMPT_PATH)
prompt = template.format(
description=description or "",
detail_notes=detail_notes or "",
)
raw = await ollama_client.generate_text(prompt)
try:
start = raw.find("{")
end = raw.rfind("}") + 1
if start >= 0 and end > start:
return json.loads(raw[start:end])
except json.JSONDecodeError:
pass
return {"raw_response": raw, "parse_error": True}
async def summarize_issue(
description: str, detail_notes: str = "", solution: str = ""
) -> dict:
template = _load_prompt(SUMMARIZE_PROMPT_PATH)
prompt = template.format(
description=description or "",
detail_notes=detail_notes or "",
solution=solution or "",
)
raw = await ollama_client.generate_text(prompt)
try:
start = raw.find("{")
end = raw.rfind("}") + 1
if start >= 0 and end > start:
return json.loads(raw[start:end])
except json.JSONDecodeError:
pass
return {"summary": raw.strip()}
async def classify_and_summarize(
description: str, detail_notes: str = ""
) -> dict:
classification = await classify_issue(description, detail_notes)
summary_result = await summarize_issue(description, detail_notes)
return {
"classification": classification,
"summary": summary_result.get("summary", ""),
}