jira-ai-fixer/app/api/issues.py

272 lines
9.4 KiB
Python

"""Issue management endpoints."""
from typing import List, Optional
from datetime import datetime
from fastapi import APIRouter, Depends, HTTPException, status, BackgroundTasks
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func
from app.core.database import get_db
from app.models.issue import Issue, IssueStatus, IssueComment
from app.models.organization import OrganizationMember
from app.models.integration import Integration
from app.schemas.issue import IssueCreate, IssueRead, IssueUpdate, IssueStats, IssueComment as IssueCommentSchema
from app.api.deps import get_current_user, require_role
from app.services.analysis import AnalysisService
from app.services.email import EmailService
router = APIRouter()
async def run_analysis(issue_id: int, db_url: str):
"""Background task to analyze issue."""
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession
from sqlalchemy.orm import sessionmaker
engine = create_async_engine(db_url)
async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False)
async with async_session() as db:
result = await db.execute(select(Issue).where(Issue.id == issue_id))
issue = result.scalar_one_or_none()
if not issue:
return
issue.status = IssueStatus.ANALYZING
issue.analysis_started_at = datetime.utcnow()
await db.commit()
try:
# Get integration repo if available
repo = None
if issue.integration_id:
int_result = await db.execute(select(Integration).where(Integration.id == issue.integration_id))
integration = int_result.scalar_one_or_none()
if integration and integration.repositories:
repo = integration.repositories[0].get("gitea_repo")
# Run analysis
analysis = await AnalysisService.analyze({
"title": issue.title,
"description": issue.description,
"priority": issue.priority.value if issue.priority else "medium"
}, repo)
issue.root_cause = analysis.get("root_cause")
issue.affected_files = analysis.get("affected_files", [])
issue.suggested_fix = analysis.get("suggested_fix")
issue.confidence = analysis.get("confidence", 0)
issue.analysis_raw = analysis
issue.status = IssueStatus.ANALYZED
issue.analysis_completed_at = datetime.utcnow()
# Create PR if enabled and confidence > 70%
if repo and issue.confidence and issue.confidence >= 0.7:
branch = f"fix/{issue.external_key or issue.id}-auto-fix"
pr_url = await AnalysisService.create_pull_request(
repo=repo,
branch=branch,
title=f"Fix: {issue.title}",
description=f"## Root Cause\n{issue.root_cause}\n\n## Suggested Fix\n{issue.suggested_fix}",
file_changes=[]
)
if pr_url:
issue.pr_url = pr_url
issue.pr_branch = branch
issue.status = IssueStatus.PR_CREATED
except Exception as e:
issue.status = IssueStatus.ERROR
issue.root_cause = f"Analysis failed: {str(e)}"
await db.commit()
@router.get("/", response_model=List[IssueRead])
async def list_issues(
org_id: int,
status: Optional[IssueStatus] = None,
source: Optional[str] = None,
limit: int = 50,
offset: int = 0,
member: OrganizationMember = Depends(require_role("viewer")),
db: AsyncSession = Depends(get_db)
):
"""List issues for organization."""
query = select(Issue).where(Issue.organization_id == org_id)
if status:
query = query.where(Issue.status == status)
if source:
query = query.where(Issue.source == source)
query = query.order_by(Issue.created_at.desc()).offset(offset).limit(limit)
result = await db.execute(query)
return result.scalars().all()
@router.get("/stats", response_model=IssueStats)
async def get_stats(
org_id: int,
member: OrganizationMember = Depends(require_role("viewer")),
db: AsyncSession = Depends(get_db)
):
"""Get issue statistics."""
# Total counts by status
total_result = await db.execute(
select(func.count(Issue.id)).where(Issue.organization_id == org_id)
)
total = total_result.scalar() or 0
status_counts = {}
for s in IssueStatus:
result = await db.execute(
select(func.count(Issue.id))
.where(Issue.organization_id == org_id)
.where(Issue.status == s)
)
status_counts[s.value] = result.scalar() or 0
# By source
source_result = await db.execute(
select(Issue.source, func.count(Issue.id))
.where(Issue.organization_id == org_id)
.group_by(Issue.source)
)
by_source = {row[0] or "unknown": row[1] for row in source_result.all()}
# By priority
priority_result = await db.execute(
select(Issue.priority, func.count(Issue.id))
.where(Issue.organization_id == org_id)
.group_by(Issue.priority)
)
by_priority = {str(row[0].value) if row[0] else "unknown": row[1] for row in priority_result.all()}
# Avg confidence
avg_result = await db.execute(
select(func.avg(Issue.confidence))
.where(Issue.organization_id == org_id)
.where(Issue.confidence.isnot(None))
)
avg_confidence = avg_result.scalar() or 0
# SLA breached
sla_result = await db.execute(
select(func.count(Issue.id))
.where(Issue.organization_id == org_id)
.where(Issue.sla_breached == True)
)
sla_breached = sla_result.scalar() or 0
return IssueStats(
total=total,
pending=status_counts.get("pending", 0),
analyzing=status_counts.get("analyzing", 0),
analyzed=status_counts.get("analyzed", 0),
pr_created=status_counts.get("pr_created", 0),
completed=status_counts.get("completed", 0),
error=status_counts.get("error", 0),
avg_confidence=avg_confidence,
by_source=by_source,
by_priority=by_priority,
sla_breached=sla_breached
)
@router.post("/", response_model=IssueRead, status_code=status.HTTP_201_CREATED)
async def create_issue(
org_id: int,
issue_in: IssueCreate,
background_tasks: BackgroundTasks,
member: OrganizationMember = Depends(require_role("analyst")),
db: AsyncSession = Depends(get_db)
):
"""Create and analyze a new issue."""
issue = Issue(
organization_id=org_id,
title=issue_in.title,
description=issue_in.description,
priority=issue_in.priority,
external_id=issue_in.external_id,
external_key=issue_in.external_key,
external_url=issue_in.external_url,
source=issue_in.source,
labels=issue_in.labels,
callback_url=issue_in.callback_url,
raw_payload=issue_in.raw_payload
)
db.add(issue)
await db.flush()
# Queue analysis
from app.core.config import settings
background_tasks.add_task(run_analysis, issue.id, settings.DATABASE_URL.replace("postgresql://", "postgresql+asyncpg://"))
return issue
@router.get("/{issue_id}", response_model=IssueRead)
async def get_issue(
org_id: int,
issue_id: int,
member: OrganizationMember = Depends(require_role("viewer")),
db: AsyncSession = Depends(get_db)
):
"""Get issue details."""
result = await db.execute(
select(Issue)
.where(Issue.id == issue_id)
.where(Issue.organization_id == org_id)
)
issue = result.scalar_one_or_none()
if not issue:
raise HTTPException(status_code=404, detail="Issue not found")
return issue
@router.post("/{issue_id}/reanalyze", response_model=IssueRead)
async def reanalyze_issue(
org_id: int,
issue_id: int,
background_tasks: BackgroundTasks,
member: OrganizationMember = Depends(require_role("analyst")),
db: AsyncSession = Depends(get_db)
):
"""Rerun analysis on issue."""
result = await db.execute(
select(Issue)
.where(Issue.id == issue_id)
.where(Issue.organization_id == org_id)
)
issue = result.scalar_one_or_none()
if not issue:
raise HTTPException(status_code=404, detail="Issue not found")
issue.status = IssueStatus.PENDING
from app.core.config import settings
background_tasks.add_task(run_analysis, issue.id, settings.DATABASE_URL.replace("postgresql://", "postgresql+asyncpg://"))
return issue
@router.post("/{issue_id}/comments")
async def add_comment(
org_id: int,
issue_id: int,
comment: IssueCommentSchema,
member: OrganizationMember = Depends(require_role("analyst")),
db: AsyncSession = Depends(get_db)
):
"""Add comment to issue."""
result = await db.execute(
select(Issue)
.where(Issue.id == issue_id)
.where(Issue.organization_id == org_id)
)
issue = result.scalar_one_or_none()
if not issue:
raise HTTPException(status_code=404, detail="Issue not found")
new_comment = IssueComment(
issue_id=issue_id,
author=comment.author,
content=comment.content,
author_type=comment.author_type
)
db.add(new_comment)
return {"status": "ok"}