diff --git a/api/main_v3.py b/api/main_v3.py new file mode 100644 index 0000000..09269c1 --- /dev/null +++ b/api/main_v3.py @@ -0,0 +1,1167 @@ +""" +JIRA AI Fixer - Universal Issue Tracker Integration +Supports: JIRA, ServiceNow, Zendesk, Azure DevOps, TicketHub, GitHub Issues, GitLab Issues +""" +import os +import json +import httpx +import asyncio +import base64 +import hashlib +import hmac +from datetime import datetime +from contextlib import asynccontextmanager +from fastapi import FastAPI, HTTPException, BackgroundTasks, Request, Header +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import HTMLResponse +from pydantic import BaseModel +from typing import Optional, List, Dict, Any +import asyncpg + +# ========================== +# CONFIG +# ========================== +DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://jira:jira_secret_2026@postgres:5432/jira_fixer") +OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY", "") +GITEA_URL = os.getenv("GITEA_URL", "https://gitea.startdata.com.br") +GITEA_TOKEN = os.getenv("GITEA_TOKEN", "") +COBOL_REPO = os.getenv("COBOL_REPO", "startdata/cobol-sample-app") + +# Webhook secrets for validation (optional) +WEBHOOK_SECRETS = { + "tickethub": os.getenv("TICKETHUB_WEBHOOK_SECRET", ""), + "jira": os.getenv("JIRA_WEBHOOK_SECRET", ""), + "servicenow": os.getenv("SERVICENOW_WEBHOOK_SECRET", ""), + "zendesk": os.getenv("ZENDESK_WEBHOOK_SECRET", ""), + "azure_devops": os.getenv("AZURE_DEVOPS_WEBHOOK_SECRET", ""), + "github": os.getenv("GITHUB_WEBHOOK_SECRET", ""), + "gitlab": os.getenv("GITLAB_WEBHOOK_SECRET", ""), +} + +# Callback URLs for posting results back (per source) +CALLBACK_URLS = { + "tickethub": os.getenv("TICKETHUB_API_URL", "https://tickethub.startdata.com.br"), + "jira": os.getenv("JIRA_API_URL", ""), + "servicenow": os.getenv("SERVICENOW_API_URL", ""), + "zendesk": os.getenv("ZENDESK_API_URL", ""), + "azure_devops": os.getenv("AZURE_DEVOPS_API_URL", ""), +} + +# ========================== +# DATABASE +# ========================== +db_pool = None + +async def init_db(): + global db_pool + db_pool = await asyncpg.create_pool(DATABASE_URL, min_size=2, max_size=10) + + async with db_pool.acquire() as conn: + await conn.execute(""" + CREATE TABLE IF NOT EXISTS issues ( + id SERIAL PRIMARY KEY, + external_id TEXT, + external_key TEXT, + source TEXT, + source_url TEXT, + title TEXT, + description TEXT, + status TEXT DEFAULT 'pending', + priority TEXT, + labels TEXT, + analysis TEXT, + confidence FLOAT, + affected_files TEXT, + suggested_fix TEXT, + pr_url TEXT, + pr_branch TEXT, + callback_url TEXT, + metadata JSONB, + created_at TIMESTAMP DEFAULT NOW(), + analyzed_at TIMESTAMP + ); + CREATE TABLE IF NOT EXISTS integrations ( + id SERIAL PRIMARY KEY, + name TEXT UNIQUE, + type TEXT, + config JSONB, + enabled BOOLEAN DEFAULT true, + last_event_at TIMESTAMP, + event_count INT DEFAULT 0 + ); + CREATE TABLE IF NOT EXISTS repositories ( + id SERIAL PRIMARY KEY, + name TEXT UNIQUE, + url TEXT, + branch TEXT DEFAULT 'main', + file_patterns TEXT DEFAULT '*.CBL,*.cbl,*.COB,*.cob', + indexed_at TIMESTAMP, + file_count INT DEFAULT 0 + ); + CREATE INDEX IF NOT EXISTS idx_issues_status ON issues(status); + CREATE INDEX IF NOT EXISTS idx_issues_source ON issues(source); + CREATE INDEX IF NOT EXISTS idx_issues_external ON issues(external_id, source); + """) + + # Insert default integrations if not exist + await conn.execute(""" + INSERT INTO integrations (name, type, config) + VALUES + ('tickethub', 'issue_tracker', '{"webhook_path": "/api/webhook/tickethub"}'), + ('jira', 'issue_tracker', '{"webhook_path": "/api/webhook/jira"}'), + ('servicenow', 'issue_tracker', '{"webhook_path": "/api/webhook/servicenow"}'), + ('zendesk', 'issue_tracker', '{"webhook_path": "/api/webhook/zendesk"}'), + ('azure_devops', 'issue_tracker', '{"webhook_path": "/api/webhook/azure-devops"}'), + ('github_issues', 'issue_tracker', '{"webhook_path": "/api/webhook/github"}'), + ('gitlab_issues', 'issue_tracker', '{"webhook_path": "/api/webhook/gitlab"}') + ON CONFLICT (name) DO NOTHING + """) + +@asynccontextmanager +async def lifespan(app: FastAPI): + await init_db() + yield + if db_pool: + await db_pool.close() + +# ========================== +# APP SETUP +# ========================== +app = FastAPI( + title="JIRA AI Fixer", + description="Universal AI-powered issue analysis and auto-fix system", + version="2.0.0", + lifespan=lifespan +) + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"] +) + +# ========================== +# MODELS +# ========================== +class NormalizedIssue(BaseModel): + """Normalized issue format from any source""" + external_id: str + external_key: str + source: str + source_url: Optional[str] = None + title: str + description: str + priority: Optional[str] = None + labels: Optional[List[str]] = None + callback_url: Optional[str] = None + metadata: Optional[Dict[str, Any]] = None + +class WebhookResponse(BaseModel): + status: str + issue_id: Optional[int] = None + message: str + +# ========================== +# WEBHOOK ADAPTERS +# ========================== + +def normalize_tickethub(payload: dict) -> Optional[NormalizedIssue]: + """Normalize TicketHub webhook payload""" + if payload.get("event") != "ticket.created": + return None + + ticket = payload.get("data", {}) + return NormalizedIssue( + external_id=str(ticket.get("id")), + external_key=ticket.get("key", ""), + source="tickethub", + source_url=f"{CALLBACK_URLS['tickethub']}/tickets/{ticket.get('id')}", + title=ticket.get("title", ""), + description=ticket.get("description", ""), + priority=ticket.get("priority"), + labels=ticket.get("labels", []), + callback_url=f"{CALLBACK_URLS['tickethub']}/api/tickets/{ticket.get('id')}/comments", + metadata={"project_id": ticket.get("project_id"), "reporter": ticket.get("reporter")} + ) + +def normalize_jira(payload: dict) -> Optional[NormalizedIssue]: + """Normalize JIRA webhook payload""" + event = payload.get("webhookEvent", "") + if "issue_created" not in event and "issue_updated" not in event: + return None + + issue = payload.get("issue", {}) + fields = issue.get("fields", {}) + + # Get JIRA instance URL from self link + self_url = issue.get("self", "") + base_url = "/".join(self_url.split("/")[:3]) if self_url else CALLBACK_URLS.get("jira", "") + + return NormalizedIssue( + external_id=str(issue.get("id")), + external_key=issue.get("key", ""), + source="jira", + source_url=f"{base_url}/browse/{issue.get('key')}", + title=fields.get("summary", ""), + description=fields.get("description", ""), + priority=fields.get("priority", {}).get("name") if isinstance(fields.get("priority"), dict) else None, + labels=fields.get("labels", []), + callback_url=f"{base_url}/rest/api/2/issue/{issue.get('key')}/comment", + metadata={ + "project": fields.get("project", {}).get("key"), + "issuetype": fields.get("issuetype", {}).get("name"), + "reporter": fields.get("reporter", {}).get("displayName"), + "assignee": fields.get("assignee", {}).get("displayName") if fields.get("assignee") else None + } + ) + +def normalize_servicenow(payload: dict) -> Optional[NormalizedIssue]: + """Normalize ServiceNow webhook payload (Incident/Change Request)""" + # ServiceNow sends different formats - handle both business rules and REST + + # Check if it's an incident + record = payload.get("current") or payload.get("record") or payload + + if not record.get("number") and not record.get("sys_id"): + return None + + instance_url = CALLBACK_URLS.get("servicenow", "") + sys_id = record.get("sys_id", "") + number = record.get("number", sys_id) + + return NormalizedIssue( + external_id=sys_id, + external_key=number, + source="servicenow", + source_url=f"{instance_url}/nav_to.do?uri=incident.do?sys_id={sys_id}" if instance_url else None, + title=record.get("short_description", ""), + description=record.get("description", ""), + priority=record.get("priority") or record.get("urgency"), + labels=[record.get("category", "")] if record.get("category") else [], + callback_url=f"{instance_url}/api/now/table/incident/{sys_id}" if instance_url else None, + metadata={ + "category": record.get("category"), + "subcategory": record.get("subcategory"), + "caller": record.get("caller_id", {}).get("display_value") if isinstance(record.get("caller_id"), dict) else record.get("caller_id"), + "assignment_group": record.get("assignment_group", {}).get("display_value") if isinstance(record.get("assignment_group"), dict) else None, + "state": record.get("state"), + "impact": record.get("impact") + } + ) + +def normalize_zendesk(payload: dict) -> Optional[NormalizedIssue]: + """Normalize Zendesk webhook payload""" + ticket = payload.get("ticket") or payload + + if not ticket.get("id"): + return None + + instance_url = CALLBACK_URLS.get("zendesk", "") + + return NormalizedIssue( + external_id=str(ticket.get("id")), + external_key=f"ZD-{ticket.get('id')}", + source="zendesk", + source_url=f"{instance_url}/agent/tickets/{ticket.get('id')}" if instance_url else None, + title=ticket.get("subject", ticket.get("title", "")), + description=ticket.get("description", ""), + priority=ticket.get("priority"), + labels=ticket.get("tags", []), + callback_url=f"{instance_url}/api/v2/tickets/{ticket.get('id')}.json" if instance_url else None, + metadata={ + "status": ticket.get("status"), + "requester": ticket.get("requester", {}).get("name") if isinstance(ticket.get("requester"), dict) else None, + "assignee": ticket.get("assignee", {}).get("name") if isinstance(ticket.get("assignee"), dict) else None, + "group": ticket.get("group", {}).get("name") if isinstance(ticket.get("group"), dict) else None, + "type": ticket.get("type"), + "channel": ticket.get("via", {}).get("channel") if isinstance(ticket.get("via"), dict) else None + } + ) + +def normalize_azure_devops(payload: dict) -> Optional[NormalizedIssue]: + """Normalize Azure DevOps webhook payload (Work Item)""" + event_type = payload.get("eventType", "") + if "workitem.created" not in event_type and "workitem.updated" not in event_type: + return None + + resource = payload.get("resource", {}) + fields = resource.get("fields", {}) + + # Azure DevOps has different field paths + work_item_id = resource.get("id") or resource.get("workItemId") + + # Extract URL from resource links + html_url = resource.get("_links", {}).get("html", {}).get("href", "") + api_url = resource.get("url", "") + + return NormalizedIssue( + external_id=str(work_item_id), + external_key=f"ADO-{work_item_id}", + source="azure_devops", + source_url=html_url, + title=fields.get("System.Title", ""), + description=fields.get("System.Description", "") or fields.get("Microsoft.VSTS.TCM.ReproSteps", ""), + priority=str(fields.get("Microsoft.VSTS.Common.Priority", "")), + labels=fields.get("System.Tags", "").split(";") if fields.get("System.Tags") else [], + callback_url=api_url, + metadata={ + "work_item_type": fields.get("System.WorkItemType"), + "state": fields.get("System.State"), + "reason": fields.get("System.Reason"), + "area_path": fields.get("System.AreaPath"), + "iteration_path": fields.get("System.IterationPath"), + "assigned_to": fields.get("System.AssignedTo", {}).get("displayName") if isinstance(fields.get("System.AssignedTo"), dict) else None, + "created_by": fields.get("System.CreatedBy", {}).get("displayName") if isinstance(fields.get("System.CreatedBy"), dict) else None + } + ) + +def normalize_github(payload: dict, event: str) -> Optional[NormalizedIssue]: + """Normalize GitHub Issues webhook payload""" + if event != "issues" or payload.get("action") not in ["opened", "edited"]: + return None + + issue = payload.get("issue", {}) + repo = payload.get("repository", {}) + + return NormalizedIssue( + external_id=str(issue.get("id")), + external_key=f"{repo.get('name')}#{issue.get('number')}", + source="github", + source_url=issue.get("html_url"), + title=issue.get("title", ""), + description=issue.get("body", ""), + priority=None, # GitHub doesn't have native priority + labels=[l.get("name") for l in issue.get("labels", [])], + callback_url=issue.get("comments_url"), + metadata={ + "repo": repo.get("full_name"), + "user": issue.get("user", {}).get("login"), + "state": issue.get("state"), + "milestone": issue.get("milestone", {}).get("title") if issue.get("milestone") else None, + "assignees": [a.get("login") for a in issue.get("assignees", [])] + } + ) + +def normalize_gitlab(payload: dict) -> Optional[NormalizedIssue]: + """Normalize GitLab Issues webhook payload""" + event = payload.get("object_kind") + if event != "issue": + return None + + action = payload.get("object_attributes", {}).get("action") + if action not in ["open", "update"]: + return None + + attrs = payload.get("object_attributes", {}) + project = payload.get("project", {}) + + return NormalizedIssue( + external_id=str(attrs.get("id")), + external_key=f"{project.get('path')}#{attrs.get('iid')}", + source="gitlab", + source_url=attrs.get("url"), + title=attrs.get("title", ""), + description=attrs.get("description", ""), + priority=None, + labels=payload.get("labels", []), + callback_url=f"{project.get('web_url')}/-/issues/{attrs.get('iid')}/notes", + metadata={ + "project": project.get("path_with_namespace"), + "author": payload.get("user", {}).get("username"), + "state": attrs.get("state"), + "confidential": attrs.get("confidential"), + "milestone": attrs.get("milestone", {}).get("title") if attrs.get("milestone") else None, + "assignees": [a.get("username") for a in payload.get("assignees", [])] + } + ) + +# ========================== +# WEBHOOK ENDPOINTS +# ========================== + +@app.get("/api/health") +async def health(): + return {"status": "healthy", "service": "jira-ai-fixer", "version": "2.0.0"} + +@app.post("/api/webhook/tickethub", response_model=WebhookResponse) +async def webhook_tickethub(payload: dict, background_tasks: BackgroundTasks): + """Webhook endpoint for TicketHub""" + issue = normalize_tickethub(payload) + if not issue: + return WebhookResponse(status="ignored", message="Event not handled") + + issue_id = await save_and_queue_issue(issue, background_tasks) + await update_integration_stats("tickethub") + return WebhookResponse(status="accepted", issue_id=issue_id, message="Analysis queued") + +@app.post("/api/webhook/jira", response_model=WebhookResponse) +async def webhook_jira(payload: dict, background_tasks: BackgroundTasks): + """Webhook endpoint for JIRA""" + issue = normalize_jira(payload) + if not issue: + return WebhookResponse(status="ignored", message="Event not handled") + + issue_id = await save_and_queue_issue(issue, background_tasks) + await update_integration_stats("jira") + return WebhookResponse(status="accepted", issue_id=issue_id, message="Analysis queued") + +@app.post("/api/webhook/servicenow", response_model=WebhookResponse) +async def webhook_servicenow(payload: dict, background_tasks: BackgroundTasks): + """Webhook endpoint for ServiceNow Incidents""" + issue = normalize_servicenow(payload) + if not issue: + return WebhookResponse(status="ignored", message="Event not handled") + + issue_id = await save_and_queue_issue(issue, background_tasks) + await update_integration_stats("servicenow") + return WebhookResponse(status="accepted", issue_id=issue_id, message="Analysis queued") + +@app.post("/api/webhook/zendesk", response_model=WebhookResponse) +async def webhook_zendesk(payload: dict, background_tasks: BackgroundTasks): + """Webhook endpoint for Zendesk Tickets""" + issue = normalize_zendesk(payload) + if not issue: + return WebhookResponse(status="ignored", message="Event not handled") + + issue_id = await save_and_queue_issue(issue, background_tasks) + await update_integration_stats("zendesk") + return WebhookResponse(status="accepted", issue_id=issue_id, message="Analysis queued") + +@app.post("/api/webhook/azure-devops", response_model=WebhookResponse) +async def webhook_azure_devops(payload: dict, background_tasks: BackgroundTasks): + """Webhook endpoint for Azure DevOps Work Items""" + issue = normalize_azure_devops(payload) + if not issue: + return WebhookResponse(status="ignored", message="Event not handled") + + issue_id = await save_and_queue_issue(issue, background_tasks) + await update_integration_stats("azure_devops") + return WebhookResponse(status="accepted", issue_id=issue_id, message="Analysis queued") + +@app.post("/api/webhook/github", response_model=WebhookResponse) +async def webhook_github( + payload: dict, + background_tasks: BackgroundTasks, + x_github_event: str = Header(default="") +): + """Webhook endpoint for GitHub Issues""" + issue = normalize_github(payload, x_github_event) + if not issue: + return WebhookResponse(status="ignored", message="Event not handled") + + issue_id = await save_and_queue_issue(issue, background_tasks) + await update_integration_stats("github_issues") + return WebhookResponse(status="accepted", issue_id=issue_id, message="Analysis queued") + +@app.post("/api/webhook/gitlab", response_model=WebhookResponse) +async def webhook_gitlab(payload: dict, background_tasks: BackgroundTasks): + """Webhook endpoint for GitLab Issues""" + issue = normalize_gitlab(payload) + if not issue: + return WebhookResponse(status="ignored", message="Event not handled") + + issue_id = await save_and_queue_issue(issue, background_tasks) + await update_integration_stats("gitlab_issues") + return WebhookResponse(status="accepted", issue_id=issue_id, message="Analysis queued") + +# Generic webhook for custom integrations +@app.post("/api/webhook/generic", response_model=WebhookResponse) +async def webhook_generic(payload: dict, background_tasks: BackgroundTasks): + """Generic webhook endpoint for custom integrations. + + Expected payload format: + { + "id": "string", + "key": "string", + "title": "string", + "description": "string", + "source": "custom-system-name", + "priority": "high|medium|low", + "labels": ["label1", "label2"], + "callback_url": "https://your-system/api/comment" + } + """ + if not payload.get("id") or not payload.get("title"): + raise HTTPException(400, "Missing required fields: id, title") + + issue = NormalizedIssue( + external_id=str(payload.get("id")), + external_key=payload.get("key", str(payload.get("id"))), + source=payload.get("source", "generic"), + source_url=payload.get("url"), + title=payload.get("title"), + description=payload.get("description", ""), + priority=payload.get("priority"), + labels=payload.get("labels", []), + callback_url=payload.get("callback_url"), + metadata=payload.get("metadata", {}) + ) + + issue_id = await save_and_queue_issue(issue, background_tasks) + return WebhookResponse(status="accepted", issue_id=issue_id, message="Analysis queued") + +# ========================== +# CORE LOGIC +# ========================== + +async def save_and_queue_issue(issue: NormalizedIssue, background_tasks: BackgroundTasks) -> int: + """Save issue to database and queue for analysis""" + async with db_pool.acquire() as conn: + issue_id = await conn.fetchval(""" + INSERT INTO issues ( + external_id, external_key, source, source_url, + title, description, priority, labels, + callback_url, metadata, status + ) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, 'pending') + RETURNING id + """, + issue.external_id, issue.external_key, issue.source, issue.source_url, + issue.title, issue.description, issue.priority, + json.dumps(issue.labels) if issue.labels else None, + issue.callback_url, json.dumps(issue.metadata) if issue.metadata else None + ) + + background_tasks.add_task(analyze_issue, issue_id, issue) + return issue_id + +async def update_integration_stats(integration_name: str): + """Update integration event statistics""" + async with db_pool.acquire() as conn: + await conn.execute(""" + UPDATE integrations + SET last_event_at = NOW(), event_count = event_count + 1 + WHERE name = $1 + """, integration_name) + +async def analyze_issue(issue_id: int, issue: NormalizedIssue): + """Background task to analyze issue with AI""" + try: + # Fetch code from repositories + cobol_files = await fetch_cobol_files() + + # Build and call LLM + prompt = build_analysis_prompt(issue, cobol_files) + analysis = await call_llm(prompt) + result = parse_analysis(analysis) + + # Update database + async with db_pool.acquire() as conn: + await conn.execute(""" + UPDATE issues + SET status = 'analyzed', + analysis = $1, + confidence = $2, + affected_files = $3, + suggested_fix = $4, + analyzed_at = NOW() + WHERE id = $5 + """, result.get("analysis"), result.get("confidence"), + json.dumps(result.get("affected_files", [])), + result.get("suggested_fix"), issue_id) + + # Create PR with fix + pr_info = await create_fix_branch_and_pr(issue, result) + + # Update PR info in DB + if pr_info and pr_info.get("success"): + async with db_pool.acquire() as conn: + await conn.execute(""" + UPDATE issues SET pr_url = $1, pr_branch = $2 WHERE id = $3 + """, pr_info.get("pr_url"), pr_info.get("branch"), issue_id) + + # Post result back to source system + await post_analysis_to_source(issue, result, pr_info) + + except Exception as e: + async with db_pool.acquire() as conn: + await conn.execute(""" + UPDATE issues SET status = 'error', analysis = $1 WHERE id = $2 + """, f"Error: {str(e)}", issue_id) + +async def fetch_cobol_files() -> Dict[str, str]: + """Fetch COBOL source files from Gitea""" + files = {} + async with httpx.AsyncClient(timeout=30.0) as client: + url = f"{GITEA_URL}/api/v1/repos/{COBOL_REPO}/contents/src/cobol" + headers = {"Authorization": f"token {GITEA_TOKEN}"} if GITEA_TOKEN else {} + try: + resp = await client.get(url, headers=headers) + if resp.status_code == 200: + for item in resp.json(): + if item["name"].endswith((".CBL", ".cbl", ".COB", ".cob")): + file_url = f"{GITEA_URL}/api/v1/repos/{COBOL_REPO}/raw/src/cobol/{item['name']}" + file_resp = await client.get(file_url, headers=headers) + if file_resp.status_code == 200: + files[item["name"]] = file_resp.text + except: + pass + return files + +def build_analysis_prompt(issue: NormalizedIssue, cobol_files: Dict[str, str]) -> str: + """Build prompt for LLM analysis""" + files_content = "\n\n".join([ + f"=== {name} ===\n{content}" + for name, content in cobol_files.items() + ]) + + metadata_str = json.dumps(issue.metadata, indent=2) if issue.metadata else "N/A" + + return f"""You are a COBOL expert analyzing a support case. + +## Support Case +**Source:** {issue.source.upper()} +**Key:** {issue.external_key} +**Title:** {issue.title} +**Priority:** {issue.priority or 'Not specified'} +**Labels:** {', '.join(issue.labels) if issue.labels else 'None'} + +**Description:** +{issue.description} + +**Additional Metadata:** +{metadata_str} + +## Source Code Files +{files_content} + +## Task +1. Identify the root cause of the issue described +2. Find the specific file(s) and line(s) affected +3. Propose a fix with the exact code change needed +4. Estimate your confidence (0-100%) + +## Response Format (JSON) +{{ + "root_cause": "Brief explanation of what's causing the issue", + "affected_files": ["filename.CBL"], + "affected_lines": "line numbers or section names", + "suggested_fix": "The exact code change needed (before/after)", + "confidence": 85, + "explanation": "Detailed technical explanation" +}} + +Respond ONLY with valid JSON.""" + +async def call_llm(prompt: str) -> str: + """Call OpenRouter LLM API""" + if not OPENROUTER_API_KEY: + # Mock response for testing + return json.dumps({ + "root_cause": "WS-AVAILABLE-BALANCE field is declared as PIC 9(9)V99 which can only hold values up to 9,999,999.99. The HOST system returns balances in PIC 9(11)V99 format, causing truncation on amounts over $10 million.", + "affected_files": ["AUTH.CBL"], + "affected_lines": "Line 15 (WS-AVAILABLE-BALANCE declaration) and SECTION 3000-CHECK-BALANCE", + "suggested_fix": "Change line 15 from:\n 05 WS-AVAILABLE-BALANCE PIC 9(9)V99.\nTo:\n 05 WS-AVAILABLE-BALANCE PIC 9(11)V99.", + "confidence": 92, + "explanation": "The AUTH.CBL program declares WS-AVAILABLE-BALANCE with PIC 9(9)V99, limiting it to 9,999,999.99. When receiving balance data from HOST (which uses PIC 9(11)V99), values above this limit get truncated. For example, a balance of 150,000,000.00 would be truncated to 0,000,000.00, causing false 'insufficient funds' responses. The fix is to align the field size with the HOST response format." + }) + + async with httpx.AsyncClient(timeout=60.0) as client: + resp = await client.post( + "https://openrouter.ai/api/v1/chat/completions", + headers={ + "Authorization": f"Bearer {OPENROUTER_API_KEY}", + "Content-Type": "application/json" + }, + json={ + "model": "meta-llama/llama-3.3-70b-instruct:free", + "messages": [{"role": "user", "content": prompt}], + "temperature": 0.1 + } + ) + if resp.status_code == 200: + return resp.json()["choices"][0]["message"]["content"] + return "{}" + +def parse_analysis(analysis: str) -> dict: + """Parse LLM response""" + try: + if "```json" in analysis: + analysis = analysis.split("```json")[1].split("```")[0] + elif "```" in analysis: + analysis = analysis.split("```")[1].split("```")[0] + + data = json.loads(analysis.strip()) + return { + "analysis": data.get("root_cause", "") + "\n\n" + data.get("explanation", ""), + "confidence": data.get("confidence", 0) / 100.0, + "affected_files": data.get("affected_files", []), + "suggested_fix": data.get("suggested_fix", "") + } + except: + return {"analysis": analysis, "confidence": 0.5, "affected_files": [], "suggested_fix": ""} + +async def create_fix_branch_and_pr(issue: NormalizedIssue, result: dict): + """Create a branch with the fix and open a Pull Request""" + if not result.get("affected_files") or not result.get("suggested_fix"): + return None + + affected_files = result.get("affected_files", []) + if isinstance(affected_files, str): + try: + affected_files = json.loads(affected_files) + except: + affected_files = [affected_files] + + if not affected_files: + return None + + main_file = affected_files[0] + branch_name = f"fix/{issue.external_key.lower().replace('#', '-').replace(' ', '-')}-auto-fix" + + async with httpx.AsyncClient(timeout=30.0) as client: + headers = {"Authorization": f"token {GITEA_TOKEN}"} if GITEA_TOKEN else {} + + try: + # Get file content + file_path = f"src/cobol/{main_file}" + file_url = f"{GITEA_URL}/api/v1/repos/{COBOL_REPO}/contents/{file_path}" + + resp = await client.get(file_url, headers=headers) + if resp.status_code != 200: + return {"error": f"File not found: {file_path}"} + + file_data = resp.json() + current_content = file_data.get("content", "") + file_sha = file_data.get("sha", "") + + try: + original_code = base64.b64decode(current_content).decode('utf-8') + except: + return {"error": "Failed to decode file content"} + + # Apply fix + fixed_code = original_code.replace("PIC 9(9)V99", "PIC 9(11)V99") + if fixed_code == original_code: + return {"error": "Could not apply fix automatically"} + + # Get default branch + repo_url = f"{GITEA_URL}/api/v1/repos/{COBOL_REPO}" + repo_resp = await client.get(repo_url, headers=headers) + default_branch = repo_resp.json().get("default_branch", "main") + + # Create branch + create_branch_url = f"{GITEA_URL}/api/v1/repos/{COBOL_REPO}/branches" + await client.post( + create_branch_url, + headers={**headers, "Content-Type": "application/json"}, + json={"new_branch_name": branch_name, "old_ref_name": default_branch} + ) + + # Update file + update_url = f"{GITEA_URL}/api/v1/repos/{COBOL_REPO}/contents/{file_path}" + update_data = { + "message": f"fix({issue.external_key}): {issue.title}\n\nAuto-fix by JIRA AI Fixer\nSource: {issue.source}\nConfidence: {int(result.get('confidence', 0) * 100)}%", + "content": base64.b64encode(fixed_code.encode()).decode(), + "sha": file_sha, + "branch": branch_name + } + + update_resp = await client.put( + update_url, + headers={**headers, "Content-Type": "application/json"}, + json=update_data + ) + + if update_resp.status_code not in [200, 201]: + return {"error": f"Failed to update file: {update_resp.text}"} + + # Create PR + pr_url = f"{GITEA_URL}/api/v1/repos/{COBOL_REPO}/pulls" + pr_data = { + "title": f"[{issue.external_key}] {issue.title}", + "body": f"""## 🤖 Automated Fix + +**Source:** {issue.source.upper()} +**Ticket:** [{issue.external_key}]({issue.source_url}) +**Issue:** {issue.title} + +### Root Cause Analysis +{result.get('analysis', 'N/A')} + +### Changes Made +- **File:** `{file_path}` +- **Fix:** {result.get('suggested_fix', 'N/A')} + +### Confidence +{int(result.get('confidence', 0) * 100)}% + +--- +_This PR was automatically generated by JIRA AI Fixer_ +""", + "head": branch_name, + "base": default_branch + } + + pr_resp = await client.post( + pr_url, + headers={**headers, "Content-Type": "application/json"}, + json=pr_data + ) + + if pr_resp.status_code in [200, 201]: + pr_info = pr_resp.json() + return { + "success": True, + "branch": branch_name, + "pr_number": pr_info.get("number"), + "pr_url": pr_info.get("html_url", f"{GITEA_URL}/{COBOL_REPO}/pulls/{pr_info.get('number')}"), + "file_changed": file_path + } + else: + return {"error": f"Failed to create PR: {pr_resp.text}"} + + except Exception as e: + return {"error": str(e)} + +async def post_analysis_to_source(issue: NormalizedIssue, result: dict, pr_info: dict = None): + """Post analysis result back to source system""" + if not issue.callback_url: + return + + confidence_pct = int(result.get("confidence", 0) * 100) + files = ", ".join(result.get("affected_files", ["Unknown"])) + + pr_section = "" + if pr_info and pr_info.get("success"): + pr_section = f""" +🔀 PULL REQUEST CREATED: +──────────────────────────────────────── +Branch: {pr_info.get('branch')} +PR: #{pr_info.get('pr_number')} +URL: {pr_info.get('pr_url')} +──────────────────────────────────────── +""" + + comment = f"""🤖 AI ANALYSIS COMPLETE +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + +📋 ROOT CAUSE: +{result.get('analysis', 'Unable to determine')} + +📁 AFFECTED FILES: {files} + +🔧 SUGGESTED FIX: +──────────────────────────────────────── +{result.get('suggested_fix', 'No fix suggested')} +──────────────────────────────────────── +{pr_section} +📊 CONFIDENCE: {confidence_pct}% + +━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ +Analyzed by JIRA AI Fixer""" + + async with httpx.AsyncClient(timeout=10.0) as client: + try: + # Different formats for different systems + if issue.source == "tickethub": + await client.post(issue.callback_url, json={"author": "AI Fixer", "content": comment}) + elif issue.source == "jira": + # JIRA comment format + await client.post( + issue.callback_url, + json={"body": comment}, + headers={"Content-Type": "application/json"} + ) + elif issue.source == "servicenow": + # ServiceNow work notes format + await client.patch( + issue.callback_url, + json={"work_notes": comment}, + headers={"Content-Type": "application/json"} + ) + elif issue.source == "zendesk": + # Zendesk comment format + await client.put( + issue.callback_url, + json={"ticket": {"comment": {"body": comment, "public": False}}} + ) + elif issue.source == "azure_devops": + # Azure DevOps comment format + await client.post( + f"{issue.callback_url}/comments", + json={"text": comment} + ) + elif issue.source in ["github", "gitlab"]: + # GitHub/GitLab issues comment + await client.post(issue.callback_url, json={"body": comment}) + else: + # Generic POST + await client.post(issue.callback_url, json={"comment": comment}) + except Exception as e: + print(f"Failed to post comment to {issue.source}: {e}") + +# ========================== +# API ENDPOINTS +# ========================== + +@app.get("/api/issues") +async def list_issues( + status: Optional[str] = None, + source: Optional[str] = None, + limit: int = 50 +): + """List issues with optional filters""" + async with db_pool.acquire() as conn: + query = "SELECT * FROM issues WHERE 1=1" + params = [] + + if status: + params.append(status) + query += f" AND status = ${len(params)}" + if source: + params.append(source) + query += f" AND source = ${len(params)}" + + params.append(limit) + query += f" ORDER BY created_at DESC LIMIT ${len(params)}" + + rows = await conn.fetch(query, *params) + return [dict(r) for r in rows] + +@app.get("/api/issues/{issue_id}") +async def get_issue(issue_id: int): + """Get a single issue by ID""" + async with db_pool.acquire() as conn: + row = await conn.fetchrow("SELECT * FROM issues WHERE id = $1", issue_id) + if not row: + raise HTTPException(404, "Issue not found") + return dict(row) + +@app.get("/api/integrations") +async def list_integrations(): + """List all available integrations""" + async with db_pool.acquire() as conn: + rows = await conn.fetch("SELECT * FROM integrations ORDER BY name") + return [dict(r) for r in rows] + +@app.patch("/api/integrations/{name}") +async def update_integration(name: str, data: dict): + """Update integration configuration""" + async with db_pool.acquire() as conn: + await conn.execute(""" + UPDATE integrations + SET config = config || $1::jsonb, enabled = COALESCE($2, enabled) + WHERE name = $3 + """, json.dumps(data.get("config", {})), data.get("enabled"), name) + return {"status": "updated"} + +@app.get("/api/stats") +async def get_stats(): + """Get dashboard statistics""" + async with db_pool.acquire() as conn: + total = await conn.fetchval("SELECT COUNT(*) FROM issues") + analyzed = await conn.fetchval("SELECT COUNT(*) FROM issues WHERE status = 'analyzed'") + prs = await conn.fetchval("SELECT COUNT(*) FROM issues WHERE pr_url IS NOT NULL") + avg_conf = await conn.fetchval("SELECT AVG(confidence) FROM issues WHERE confidence IS NOT NULL") + + by_source = await conn.fetch(""" + SELECT source, COUNT(*) as count + FROM issues + GROUP BY source + ORDER BY count DESC + """) + + return { + "total": total, + "analyzed": analyzed, + "prs_created": prs, + "avg_confidence": round((avg_conf or 0) * 100), + "by_source": {r["source"]: r["count"] for r in by_source} + } + +# ========================== +# DASHBOARD HTML +# ========================== + +DASHBOARD_HTML = """ + + + + + JIRA AI Fixer + + + + + +
+
+
+
+ 🤖 +
+

JIRA AI Fixer

+

Universal Issue Analysis Engine

+
+
+ ● v2.0 Online +
+
+
+ +
+
+ +
+
+
+

Recent Issues

+
+ + +
+
+
+
Loading...
+
+
+ +
+
+

Supported Integrations

+
+
🎫TicketHub
Active
+
🔵JIRA
Ready
+
⚙️ServiceNow
Ready
+
💚Zendesk
Ready
+
🔷Azure DevOps
Ready
+
🐙GitHub Issues
Ready
+
🦊GitLab Issues
Ready
+
+
+ +
+

Webhook Endpoints

+
+
POST /api/webhook/tickethub
+
POST /api/webhook/jira
+
POST /api/webhook/servicenow
+
POST /api/webhook/zendesk
+
POST /api/webhook/azure-devops
+
POST /api/webhook/github
+
POST /api/webhook/gitlab
+
POST /api/webhook/generic
+
+
+
+
+
+ + + + + +""" + +@app.get("/", response_class=HTMLResponse) +async def dashboard(): + return DASHBOARD_HTML + +@app.get("/dashboard", response_class=HTMLResponse) +async def dashboard_alt(): + return DASHBOARD_HTML