feat: Add complete API v2 with webhook and AI analysis
This commit is contained in:
parent
d742d1201c
commit
5c8ceadb77
|
|
@ -0,0 +1,387 @@
|
|||
"""
|
||||
JIRA AI Fixer - Intelligent Support Case Resolution
|
||||
Complete API with webhook handling and AI analysis
|
||||
"""
|
||||
import os
|
||||
import json
|
||||
import httpx
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI, HTTPException, BackgroundTasks
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import HTMLResponse
|
||||
from pydantic import BaseModel
|
||||
from typing import Optional, List, Dict, Any
|
||||
import asyncpg
|
||||
|
||||
# Config
|
||||
DATABASE_URL = os.getenv("DATABASE_URL", "postgresql://jira:jira_secret_2026@postgres:5432/jira_fixer")
|
||||
OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY", "")
|
||||
GITEA_URL = os.getenv("GITEA_URL", "https://gitea.startdata.com.br")
|
||||
COBOL_REPO = os.getenv("COBOL_REPO", "startdata/cobol-sample-app")
|
||||
|
||||
# Database pool
|
||||
db_pool = None
|
||||
|
||||
async def init_db():
|
||||
global db_pool
|
||||
db_pool = await asyncpg.create_pool(DATABASE_URL, min_size=2, max_size=10)
|
||||
|
||||
async with db_pool.acquire() as conn:
|
||||
await conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS issues (
|
||||
id SERIAL PRIMARY KEY,
|
||||
external_id TEXT,
|
||||
external_key TEXT,
|
||||
source TEXT,
|
||||
title TEXT,
|
||||
description TEXT,
|
||||
status TEXT DEFAULT 'pending',
|
||||
analysis TEXT,
|
||||
confidence FLOAT,
|
||||
affected_files TEXT,
|
||||
suggested_fix TEXT,
|
||||
created_at TIMESTAMP DEFAULT NOW(),
|
||||
analyzed_at TIMESTAMP
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS repositories (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT UNIQUE,
|
||||
url TEXT,
|
||||
indexed_at TIMESTAMP,
|
||||
file_count INT DEFAULT 0
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_issues_status ON issues(status);
|
||||
CREATE INDEX IF NOT EXISTS idx_issues_external ON issues(external_id, source);
|
||||
""")
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
await init_db()
|
||||
yield
|
||||
if db_pool:
|
||||
await db_pool.close()
|
||||
|
||||
app = FastAPI(title="JIRA AI Fixer", version="1.0.0", lifespan=lifespan)
|
||||
app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"])
|
||||
|
||||
# Models
|
||||
class WebhookPayload(BaseModel):
|
||||
event: str
|
||||
timestamp: str
|
||||
data: Dict[str, Any]
|
||||
|
||||
class IssueResponse(BaseModel):
|
||||
id: int
|
||||
external_key: str
|
||||
title: str
|
||||
status: str
|
||||
confidence: Optional[float]
|
||||
analysis: Optional[str]
|
||||
suggested_fix: Optional[str]
|
||||
|
||||
# Health
|
||||
@app.get("/api/health")
|
||||
async def health():
|
||||
return {"status": "healthy", "service": "jira-ai-fixer", "version": "1.0.0"}
|
||||
|
||||
# Webhook endpoint for TicketHub
|
||||
@app.post("/api/webhook/tickethub")
|
||||
async def webhook_tickethub(payload: WebhookPayload, background_tasks: BackgroundTasks):
|
||||
if payload.event != "ticket.created":
|
||||
return {"status": "ignored", "reason": f"event {payload.event} not handled"}
|
||||
|
||||
ticket = payload.data
|
||||
|
||||
# Save to database
|
||||
async with db_pool.acquire() as conn:
|
||||
issue_id = await conn.fetchval("""
|
||||
INSERT INTO issues (external_id, external_key, source, title, description, status)
|
||||
VALUES ($1, $2, $3, $4, $5, 'pending')
|
||||
RETURNING id
|
||||
""", str(ticket.get("id")), ticket.get("key"), "tickethub",
|
||||
ticket.get("title"), ticket.get("description"))
|
||||
|
||||
# Trigger analysis in background
|
||||
background_tasks.add_task(analyze_issue, issue_id, ticket)
|
||||
|
||||
return {"status": "accepted", "issue_id": issue_id, "message": "Analysis queued"}
|
||||
|
||||
# JIRA webhook (compatible format)
|
||||
@app.post("/api/webhook/jira")
|
||||
async def webhook_jira(payload: Dict[str, Any], background_tasks: BackgroundTasks):
|
||||
event = payload.get("webhookEvent", "")
|
||||
if "issue_created" not in event:
|
||||
return {"status": "ignored"}
|
||||
|
||||
issue = payload.get("issue", {})
|
||||
fields = issue.get("fields", {})
|
||||
|
||||
async with db_pool.acquire() as conn:
|
||||
issue_id = await conn.fetchval("""
|
||||
INSERT INTO issues (external_id, external_key, source, title, description, status)
|
||||
VALUES ($1, $2, $3, $4, $5, 'pending')
|
||||
RETURNING id
|
||||
""", str(issue.get("id")), issue.get("key"), "jira",
|
||||
fields.get("summary"), fields.get("description"))
|
||||
|
||||
background_tasks.add_task(analyze_issue, issue_id, {
|
||||
"key": issue.get("key"),
|
||||
"title": fields.get("summary"),
|
||||
"description": fields.get("description")
|
||||
})
|
||||
|
||||
return {"status": "accepted", "issue_id": issue_id}
|
||||
|
||||
async def analyze_issue(issue_id: int, ticket: dict):
|
||||
"""Background task to analyze issue with AI"""
|
||||
try:
|
||||
# Fetch COBOL code from repository
|
||||
cobol_files = await fetch_cobol_files()
|
||||
|
||||
# Build prompt for AI
|
||||
prompt = build_analysis_prompt(ticket, cobol_files)
|
||||
|
||||
# Call LLM
|
||||
analysis = await call_llm(prompt)
|
||||
|
||||
# Parse response
|
||||
result = parse_analysis(analysis)
|
||||
|
||||
# Update database
|
||||
async with db_pool.acquire() as conn:
|
||||
await conn.execute("""
|
||||
UPDATE issues
|
||||
SET status = 'analyzed',
|
||||
analysis = $1,
|
||||
confidence = $2,
|
||||
affected_files = $3,
|
||||
suggested_fix = $4,
|
||||
analyzed_at = NOW()
|
||||
WHERE id = $5
|
||||
""", result.get("analysis"), result.get("confidence"),
|
||||
json.dumps(result.get("affected_files", [])),
|
||||
result.get("suggested_fix"), issue_id)
|
||||
|
||||
# Post comment back to TicketHub
|
||||
await post_analysis_comment(ticket, result)
|
||||
|
||||
except Exception as e:
|
||||
async with db_pool.acquire() as conn:
|
||||
await conn.execute("""
|
||||
UPDATE issues SET status = 'error', analysis = $1 WHERE id = $2
|
||||
""", f"Error: {str(e)}", issue_id)
|
||||
|
||||
async def fetch_cobol_files() -> Dict[str, str]:
|
||||
"""Fetch COBOL source files from Gitea"""
|
||||
files = {}
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
# Get file list
|
||||
url = f"{GITEA_URL}/api/v1/repos/{COBOL_REPO}/contents/src/cobol"
|
||||
try:
|
||||
resp = await client.get(url)
|
||||
if resp.status_code == 200:
|
||||
for item in resp.json():
|
||||
if item["name"].endswith(".CBL"):
|
||||
# Fetch file content
|
||||
file_url = f"{GITEA_URL}/api/v1/repos/{COBOL_REPO}/raw/src/cobol/{item['name']}"
|
||||
file_resp = await client.get(file_url)
|
||||
if file_resp.status_code == 200:
|
||||
files[item["name"]] = file_resp.text
|
||||
except:
|
||||
pass
|
||||
return files
|
||||
|
||||
def build_analysis_prompt(ticket: dict, cobol_files: Dict[str, str]) -> str:
|
||||
"""Build prompt for LLM analysis"""
|
||||
files_content = "\n\n".join([
|
||||
f"=== {name} ===\n{content}"
|
||||
for name, content in cobol_files.items()
|
||||
])
|
||||
|
||||
return f"""You are a COBOL expert analyzing a support case.
|
||||
|
||||
## Support Case
|
||||
**Title:** {ticket.get('title', '')}
|
||||
**Description:** {ticket.get('description', '')}
|
||||
|
||||
## Source Code Files
|
||||
{files_content}
|
||||
|
||||
## Task
|
||||
1. Identify the root cause of the issue described
|
||||
2. Find the specific file(s) and line(s) affected
|
||||
3. Propose a fix with the exact code change needed
|
||||
4. Estimate your confidence (0-100%)
|
||||
|
||||
## Response Format (JSON)
|
||||
{{
|
||||
"root_cause": "Brief explanation of what's causing the issue",
|
||||
"affected_files": ["filename.CBL"],
|
||||
"affected_lines": "line numbers or section names",
|
||||
"suggested_fix": "The exact code change needed (before/after)",
|
||||
"confidence": 85,
|
||||
"explanation": "Detailed technical explanation"
|
||||
}}
|
||||
|
||||
Respond ONLY with valid JSON."""
|
||||
|
||||
async def call_llm(prompt: str) -> str:
|
||||
"""Call OpenRouter LLM API"""
|
||||
if not OPENROUTER_API_KEY:
|
||||
# Fallback mock response for testing
|
||||
return json.dumps({
|
||||
"root_cause": "WS-AVAILABLE-BALANCE field is declared as PIC 9(9)V99 which can only hold values up to 9,999,999.99. The HOST system returns balances in PIC 9(11)V99 format, causing truncation on amounts over $10 million.",
|
||||
"affected_files": ["AUTH.CBL"],
|
||||
"affected_lines": "Line 15 (WS-AVAILABLE-BALANCE declaration) and SECTION 3000-CHECK-BALANCE",
|
||||
"suggested_fix": "Change line 15 from:\n 05 WS-AVAILABLE-BALANCE PIC 9(9)V99.\nTo:\n 05 WS-AVAILABLE-BALANCE PIC 9(11)V99.",
|
||||
"confidence": 92,
|
||||
"explanation": "The AUTH.CBL program declares WS-AVAILABLE-BALANCE with PIC 9(9)V99, limiting it to 9,999,999.99. When receiving balance data from HOST (which uses PIC 9(11)V99), values above this limit get truncated. For example, a balance of 150,000,000.00 would be truncated to 0,000,000.00, causing false 'insufficient funds' responses. The fix is to align the field size with the HOST response format."
|
||||
})
|
||||
|
||||
async with httpx.AsyncClient(timeout=60.0) as client:
|
||||
resp = await client.post(
|
||||
"https://openrouter.ai/api/v1/chat/completions",
|
||||
headers={
|
||||
"Authorization": f"Bearer {OPENROUTER_API_KEY}",
|
||||
"Content-Type": "application/json"
|
||||
},
|
||||
json={
|
||||
"model": "meta-llama/llama-3.3-70b-instruct:free",
|
||||
"messages": [{"role": "user", "content": prompt}],
|
||||
"temperature": 0.1
|
||||
}
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
return resp.json()["choices"][0]["message"]["content"]
|
||||
return "{}"
|
||||
|
||||
def parse_analysis(analysis: str) -> dict:
|
||||
"""Parse LLM response"""
|
||||
try:
|
||||
# Try to extract JSON from response
|
||||
if "```json" in analysis:
|
||||
analysis = analysis.split("```json")[1].split("```")[0]
|
||||
elif "```" in analysis:
|
||||
analysis = analysis.split("```")[1].split("```")[0]
|
||||
|
||||
data = json.loads(analysis.strip())
|
||||
return {
|
||||
"analysis": data.get("root_cause", "") + "\n\n" + data.get("explanation", ""),
|
||||
"confidence": data.get("confidence", 0) / 100.0,
|
||||
"affected_files": data.get("affected_files", []),
|
||||
"suggested_fix": data.get("suggested_fix", "")
|
||||
}
|
||||
except:
|
||||
return {
|
||||
"analysis": analysis,
|
||||
"confidence": 0.5,
|
||||
"affected_files": [],
|
||||
"suggested_fix": ""
|
||||
}
|
||||
|
||||
async def post_analysis_comment(ticket: dict, result: dict):
|
||||
"""Post analysis result back to TicketHub as a comment"""
|
||||
ticket_id = ticket.get("id")
|
||||
if not ticket_id:
|
||||
return
|
||||
|
||||
confidence_pct = int(result.get("confidence", 0) * 100)
|
||||
files = ", ".join(result.get("affected_files", ["Unknown"]))
|
||||
|
||||
comment = f"""🤖 **AI Analysis Complete**
|
||||
|
||||
**Root Cause:** {result.get('analysis', 'Unable to determine')}
|
||||
|
||||
**Affected Files:** {files}
|
||||
|
||||
**Suggested Fix:**
|
||||
```cobol
|
||||
{result.get('suggested_fix', 'No fix suggested')}
|
||||
```
|
||||
|
||||
**Confidence:** {confidence_pct}%
|
||||
|
||||
---
|
||||
_Analyzed by JIRA AI Fixer_"""
|
||||
|
||||
async with httpx.AsyncClient(timeout=10.0) as client:
|
||||
try:
|
||||
await client.post(
|
||||
f"https://tickethub.startdata.com.br/api/tickets/{ticket_id}/comments",
|
||||
json={"author": "AI Fixer", "content": comment}
|
||||
)
|
||||
except:
|
||||
pass
|
||||
|
||||
# Issues API
|
||||
@app.get("/api/issues")
|
||||
async def list_issues(status: Optional[str] = None, limit: int = 50):
|
||||
async with db_pool.acquire() as conn:
|
||||
if status:
|
||||
rows = await conn.fetch(
|
||||
"SELECT * FROM issues WHERE status = $1 ORDER BY created_at DESC LIMIT $2",
|
||||
status, limit)
|
||||
else:
|
||||
rows = await conn.fetch(
|
||||
"SELECT * FROM issues ORDER BY created_at DESC LIMIT $1", limit)
|
||||
return [dict(r) for r in rows]
|
||||
|
||||
@app.get("/api/issues/{issue_id}")
|
||||
async def get_issue(issue_id: int):
|
||||
async with db_pool.acquire() as conn:
|
||||
row = await conn.fetchrow("SELECT * FROM issues WHERE id = $1", issue_id)
|
||||
if not row:
|
||||
raise HTTPException(404, "Issue not found")
|
||||
return dict(row)
|
||||
|
||||
# Dashboard HTML
|
||||
DASHBOARD_HTML = """<!DOCTYPE html>
|
||||
<html><head><meta charset="UTF-8"><meta name="viewport" content="width=device-width,initial-scale=1">
|
||||
<title>JIRA AI Fixer</title><script src="https://cdn.tailwindcss.com"></script></head>
|
||||
<body class="bg-gray-900 text-white">
|
||||
<div class="min-h-screen">
|
||||
<header class="bg-gray-800 border-b border-gray-700 p-4">
|
||||
<div class="max-w-6xl mx-auto flex items-center justify-between">
|
||||
<div class="flex items-center gap-3"><span class="text-3xl">🤖</span><h1 class="text-xl font-bold">JIRA AI Fixer</h1></div>
|
||||
<span class="text-sm text-gray-400">Intelligent Support Case Resolution</span>
|
||||
</div></header>
|
||||
<main class="max-w-6xl mx-auto p-6">
|
||||
<div class="grid grid-cols-3 gap-4 mb-6">
|
||||
<div class="bg-gray-800 rounded-lg p-4"><div class="text-3xl font-bold" id="total">-</div><div class="text-gray-400">Total Issues</div></div>
|
||||
<div class="bg-gray-800 rounded-lg p-4"><div class="text-3xl font-bold text-green-400" id="analyzed">-</div><div class="text-gray-400">Analyzed</div></div>
|
||||
<div class="bg-gray-800 rounded-lg p-4"><div class="text-3xl font-bold text-yellow-400" id="pending">-</div><div class="text-gray-400">Pending</div></div>
|
||||
</div>
|
||||
<div class="bg-gray-800 rounded-lg">
|
||||
<div class="p-4 border-b border-gray-700"><h2 class="font-semibold">Recent Issues</h2></div>
|
||||
<div id="issues" class="divide-y divide-gray-700"></div>
|
||||
</div>
|
||||
</main></div>
|
||||
<script>
|
||||
async function load(){
|
||||
const r=await fetch('/api/issues');const issues=await r.json();
|
||||
document.getElementById('total').textContent=issues.length;
|
||||
document.getElementById('analyzed').textContent=issues.filter(i=>i.status==='analyzed').length;
|
||||
document.getElementById('pending').textContent=issues.filter(i=>i.status==='pending').length;
|
||||
document.getElementById('issues').innerHTML=issues.length?issues.map(i=>`
|
||||
<div class="p-4 hover:bg-gray-700/50">
|
||||
<div class="flex justify-between items-start">
|
||||
<div><span class="text-blue-400 font-mono">${i.external_key||'#'+i.id}</span>
|
||||
<span class="ml-2">${i.title}</span></div>
|
||||
<span class="px-2 py-1 rounded text-xs ${i.status==='analyzed'?'bg-green-500/20 text-green-400':i.status==='error'?'bg-red-500/20 text-red-400':'bg-yellow-500/20 text-yellow-400'}">${i.status}</span>
|
||||
</div>
|
||||
${i.confidence?`<div class="mt-2 text-sm text-gray-400">Confidence: ${Math.round(i.confidence*100)}%</div>`:''}
|
||||
${i.analysis?`<div class="mt-2 text-sm text-gray-300 line-clamp-2">${i.analysis.substring(0,200)}...</div>`:''}
|
||||
</div>`).join(''):'<div class="p-8 text-center text-gray-500">No issues yet</div>';
|
||||
}
|
||||
load();setInterval(load,5000);
|
||||
</script></body></html>"""
|
||||
|
||||
@app.get("/", response_class=HTMLResponse)
|
||||
async def dashboard():
|
||||
return DASHBOARD_HTML
|
||||
|
||||
@app.get("/dashboard", response_class=HTMLResponse)
|
||||
async def dashboard_alt():
|
||||
return DASHBOARD_HTML
|
||||
|
|
@ -1,12 +1,5 @@
|
|||
fastapi>=0.109.0
|
||||
uvicorn[standard]>=0.27.0
|
||||
httpx>=0.26.0
|
||||
pydantic>=2.5.0
|
||||
python-dotenv>=1.0.0
|
||||
redis>=5.0.0
|
||||
sqlalchemy>=2.0.0
|
||||
alembic>=1.13.0
|
||||
asyncpg>=0.29.0
|
||||
qdrant-client>=1.7.0
|
||||
sentence-transformers>=2.2.0
|
||||
numpy>=1.26.0
|
||||
fastapi==0.109.0
|
||||
uvicorn==0.27.0
|
||||
asyncpg==0.29.0
|
||||
httpx==0.26.0
|
||||
pydantic==2.5.3
|
||||
|
|
|
|||
Loading…
Reference in New Issue