import json from services.ollama_client import ollama_client from config import settings CLASSIFY_PROMPT_PATH = "prompts/classify_issue.txt" SUMMARIZE_PROMPT_PATH = "prompts/summarize_issue.txt" def _load_prompt(path: str) -> str: with open(path, "r", encoding="utf-8") as f: return f.read() async def classify_issue(description: str, detail_notes: str = "") -> dict: template = _load_prompt(CLASSIFY_PROMPT_PATH) prompt = template.format( description=description or "", detail_notes=detail_notes or "", ) raw = await ollama_client.generate_text(prompt) try: start = raw.find("{") end = raw.rfind("}") + 1 if start >= 0 and end > start: return json.loads(raw[start:end]) except json.JSONDecodeError: pass return {"raw_response": raw, "parse_error": True} async def summarize_issue( description: str, detail_notes: str = "", solution: str = "" ) -> dict: template = _load_prompt(SUMMARIZE_PROMPT_PATH) prompt = template.format( description=description or "", detail_notes=detail_notes or "", solution=solution or "", ) raw = await ollama_client.generate_text(prompt) try: start = raw.find("{") end = raw.rfind("}") + 1 if start >= 0 and end > start: return json.loads(raw[start:end]) except json.JSONDecodeError: pass return {"summary": raw.strip()} async def classify_and_summarize( description: str, detail_notes: str = "" ) -> dict: classification = await classify_issue(description, detail_notes) summary_result = await summarize_issue(description, detail_notes) return { "classification": classification, "summary": summary_result.get("summary", ""), }