jira-ai-fixer/app/api/reports.py

193 lines
6.3 KiB
Python

"""Reports and analytics endpoints."""
from typing import List, Optional
from datetime import datetime, timedelta
from fastapi import APIRouter, Depends
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func, and_
from app.core.database import get_db
from app.models.issue import Issue, IssueStatus
from app.models.organization import OrganizationMember
from app.api.deps import require_role
from pydantic import BaseModel
router = APIRouter()
class DailyStats(BaseModel):
date: str
total: int
analyzed: int
prs_created: int
avg_confidence: float
class ReportSummary(BaseModel):
period_start: datetime
period_end: datetime
total_issues: int
analyzed_issues: int
prs_created: int
avg_confidence: float
avg_analysis_time_hours: Optional[float]
top_sources: List[dict]
daily_breakdown: List[DailyStats]
@router.get("/summary", response_model=ReportSummary)
async def get_report_summary(
org_id: int,
days: int = 30,
member: OrganizationMember = Depends(require_role("viewer")),
db: AsyncSession = Depends(get_db)
):
"""Get summary report for organization."""
end_date = datetime.utcnow()
start_date = end_date - timedelta(days=days)
# Total issues
total_result = await db.execute(
select(func.count(Issue.id))
.where(Issue.organization_id == org_id)
.where(Issue.created_at >= start_date)
)
total = total_result.scalar() or 0
# Analyzed
analyzed_result = await db.execute(
select(func.count(Issue.id))
.where(Issue.organization_id == org_id)
.where(Issue.created_at >= start_date)
.where(Issue.status.in_([IssueStatus.ANALYZED, IssueStatus.PR_CREATED, IssueStatus.COMPLETED]))
)
analyzed = analyzed_result.scalar() or 0
# PRs created
prs_result = await db.execute(
select(func.count(Issue.id))
.where(Issue.organization_id == org_id)
.where(Issue.created_at >= start_date)
.where(Issue.pr_url.isnot(None))
)
prs = prs_result.scalar() or 0
# Avg confidence
avg_conf_result = await db.execute(
select(func.avg(Issue.confidence))
.where(Issue.organization_id == org_id)
.where(Issue.created_at >= start_date)
.where(Issue.confidence.isnot(None))
)
avg_confidence = avg_conf_result.scalar() or 0
# Top sources
sources_result = await db.execute(
select(Issue.source, func.count(Issue.id).label("count"))
.where(Issue.organization_id == org_id)
.where(Issue.created_at >= start_date)
.group_by(Issue.source)
.order_by(func.count(Issue.id).desc())
.limit(5)
)
top_sources = [{"source": r[0] or "unknown", "count": r[1]} for r in sources_result.all()]
# Daily breakdown (simplified)
daily_breakdown = []
for i in range(min(days, 30)):
day_start = start_date + timedelta(days=i)
day_end = day_start + timedelta(days=1)
day_total = await db.execute(
select(func.count(Issue.id))
.where(Issue.organization_id == org_id)
.where(Issue.created_at >= day_start)
.where(Issue.created_at < day_end)
)
day_analyzed = await db.execute(
select(func.count(Issue.id))
.where(Issue.organization_id == org_id)
.where(Issue.created_at >= day_start)
.where(Issue.created_at < day_end)
.where(Issue.status.in_([IssueStatus.ANALYZED, IssueStatus.PR_CREATED, IssueStatus.COMPLETED]))
)
day_prs = await db.execute(
select(func.count(Issue.id))
.where(Issue.organization_id == org_id)
.where(Issue.created_at >= day_start)
.where(Issue.created_at < day_end)
.where(Issue.pr_url.isnot(None))
)
day_conf = await db.execute(
select(func.avg(Issue.confidence))
.where(Issue.organization_id == org_id)
.where(Issue.created_at >= day_start)
.where(Issue.created_at < day_end)
.where(Issue.confidence.isnot(None))
)
daily_breakdown.append(DailyStats(
date=day_start.strftime("%Y-%m-%d"),
total=day_total.scalar() or 0,
analyzed=day_analyzed.scalar() or 0,
prs_created=day_prs.scalar() or 0,
avg_confidence=day_conf.scalar() or 0
))
return ReportSummary(
period_start=start_date,
period_end=end_date,
total_issues=total,
analyzed_issues=analyzed,
prs_created=prs,
avg_confidence=avg_confidence,
avg_analysis_time_hours=None, # TODO: calculate
top_sources=top_sources,
daily_breakdown=daily_breakdown
)
@router.get("/export/csv")
async def export_csv(
org_id: int,
days: int = 30,
member: OrganizationMember = Depends(require_role("manager")),
db: AsyncSession = Depends(get_db)
):
"""Export issues as CSV."""
from fastapi.responses import StreamingResponse
import io
import csv
start_date = datetime.utcnow() - timedelta(days=days)
result = await db.execute(
select(Issue)
.where(Issue.organization_id == org_id)
.where(Issue.created_at >= start_date)
.order_by(Issue.created_at.desc())
)
issues = result.scalars().all()
output = io.StringIO()
writer = csv.writer(output)
writer.writerow([
"ID", "Key", "Title", "Source", "Status", "Priority",
"Confidence", "PR URL", "Created At", "Analyzed At"
])
for issue in issues:
writer.writerow([
issue.id,
issue.external_key,
issue.title,
issue.source,
issue.status.value if issue.status else "",
issue.priority.value if issue.priority else "",
f"{issue.confidence:.0%}" if issue.confidence else "",
issue.pr_url or "",
issue.created_at.isoformat() if issue.created_at else "",
issue.analysis_completed_at.isoformat() if issue.analysis_completed_at else ""
])
output.seek(0)
return StreamingResponse(
iter([output.getvalue()]),
media_type="text/csv",
headers={"Content-Disposition": f"attachment; filename=issues-{datetime.utcnow().strftime('%Y%m%d')}.csv"}
)