commit bfe59c2d5766dc7fe568afd0290c6eeb6f159634 Author: Ricel Leite Date: Wed Feb 18 19:51:46 2026 -0300 JIRA AI Fixer v2.0 - Enterprise Edition Backend: - FastAPI with async SQLAlchemy - JWT auth with refresh tokens - RBAC (viewer→analyst→manager→admin→owner) - Multi-tenant organizations - Integrations: JIRA, ServiceNow, Zendesk, GitHub, GitLab, Azure DevOps - Webhook endpoints for all platforms - Reports with CSV export - Email via Resend - AI analysis via OpenRouter - PR creation via Gitea API - Audit logging Frontend: - React 18 + Vite + Tailwind - React Query for state/cache - Recharts for analytics - Dark enterprise theme - 8 pages: Login, Register, Dashboard, Issues, IssueDetail, Integrations, Team, Reports, Settings Ready for Hetzner deployment. diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..a361647 --- /dev/null +++ b/.env.example @@ -0,0 +1,21 @@ +# Database (use shared PostgreSQL Stack 49) +DATABASE_URL=postgresql://postgres:postgres@postgres_database:5432/jira_fixer_v2 + +# Redis (use shared Redis Stack 12) +REDIS_URL=redis://redis_redis:6379 + +# JWT +JWT_SECRET=your-super-secret-jwt-key-change-me + +# Email (Resend) +RESEND_API_KEY=re_LP4Vf7jA_E9fvcBtQ9aD219jA2QEBcZs7 + +# AI (OpenRouter) +OPENROUTER_API_KEY=your-openrouter-key + +# Git (Gitea) +GITEA_URL=https://gitea.startdata.com.br +GITEA_TOKEN=4b28e0a797f16e0f9f986ad03a77a320fe90d3d6 + +# App +APP_URL=https://jira-fixer.startdata.com.br diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..1bd7b30 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,32 @@ +# Stage 1: Build frontend +FROM node:20-alpine AS frontend-builder +WORKDIR /build + +COPY frontend/package.json frontend/package-lock.json* ./ +RUN npm install + +COPY frontend/ ./ +RUN npm run build + +# Stage 2: Python backend +FROM python:3.11-slim + +WORKDIR /app + +# Install dependencies +COPY requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +# Copy backend +COPY app/ ./app/ + +# Copy built frontend +COPY --from=frontend-builder /build/dist ./frontend/ + +# Environment +ENV PYTHONUNBUFFERED=1 +ENV PYTHONDONTWRITEBYTECODE=1 + +EXPOSE 8000 + +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/app/api/__init__.py b/app/api/__init__.py new file mode 100644 index 0000000..4297a9f --- /dev/null +++ b/app/api/__init__.py @@ -0,0 +1,17 @@ +from fastapi import APIRouter +from .auth import router as auth_router +from .users import router as users_router +from .organizations import router as orgs_router +from .integrations import router as integrations_router +from .issues import router as issues_router +from .webhooks import router as webhooks_router +from .reports import router as reports_router + +api_router = APIRouter() +api_router.include_router(auth_router, prefix="/auth", tags=["Authentication"]) +api_router.include_router(users_router, prefix="/users", tags=["Users"]) +api_router.include_router(orgs_router, prefix="/organizations", tags=["Organizations"]) +api_router.include_router(integrations_router, prefix="/integrations", tags=["Integrations"]) +api_router.include_router(issues_router, prefix="/issues", tags=["Issues"]) +api_router.include_router(webhooks_router, prefix="/webhooks", tags=["Webhooks"]) +api_router.include_router(reports_router, prefix="/reports", tags=["Reports"]) diff --git a/app/api/auth.py b/app/api/auth.py new file mode 100644 index 0000000..7d1a7d6 --- /dev/null +++ b/app/api/auth.py @@ -0,0 +1,122 @@ +"""Authentication endpoints.""" +from datetime import datetime +from fastapi import APIRouter, Depends, HTTPException, status, Request +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select +from app.core.database import get_db +from app.core.security import verify_password, get_password_hash, create_access_token, create_refresh_token, decode_token +from app.models.user import User +from app.schemas.user import UserCreate, UserRead, Token +from app.services.audit import AuditService + +router = APIRouter() + +@router.post("/register", response_model=UserRead) +async def register( + user_in: UserCreate, + request: Request, + db: AsyncSession = Depends(get_db) +): + """Register a new user.""" + # Check if email exists + result = await db.execute(select(User).where(User.email == user_in.email)) + if result.scalar_one_or_none(): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Email already registered" + ) + + # Create user + user = User( + email=user_in.email, + hashed_password=get_password_hash(user_in.password), + full_name=user_in.full_name + ) + db.add(user) + await db.flush() + + # Audit log + await AuditService.log( + db, + action="user.register", + user_id=user.id, + resource_type="user", + resource_id=user.id, + ip_address=request.client.host if request.client else None + ) + + return user + +@router.post("/login", response_model=Token) +async def login( + email: str, + password: str, + request: Request, + db: AsyncSession = Depends(get_db) +): + """Login and get access token.""" + result = await db.execute(select(User).where(User.email == email)) + user = result.scalar_one_or_none() + + if not user or not verify_password(password, user.hashed_password): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid email or password" + ) + + if not user.is_active: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="User is inactive" + ) + + # Update last login + user.last_login = datetime.utcnow() + + # Audit log + await AuditService.log( + db, + action="user.login", + user_id=user.id, + resource_type="user", + resource_id=user.id, + ip_address=request.client.host if request.client else None, + user_agent=request.headers.get("user-agent") + ) + + # Create tokens + token_data = {"user_id": user.id, "email": user.email} + access_token = create_access_token(token_data) + refresh_token = create_refresh_token(token_data) + + return Token(access_token=access_token, refresh_token=refresh_token) + +@router.post("/refresh", response_model=Token) +async def refresh_token( + refresh_token: str, + db: AsyncSession = Depends(get_db) +): + """Refresh access token.""" + payload = decode_token(refresh_token) + + if not payload or payload.get("type") != "refresh": + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid refresh token" + ) + + user_id = payload.get("user_id") + result = await db.execute(select(User).where(User.id == user_id)) + user = result.scalar_one_or_none() + + if not user or not user.is_active: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="User not found or inactive" + ) + + token_data = {"user_id": user.id, "email": user.email} + new_access_token = create_access_token(token_data) + new_refresh_token = create_refresh_token(token_data) + + return Token(access_token=new_access_token, refresh_token=new_refresh_token) diff --git a/app/api/deps.py b/app/api/deps.py new file mode 100644 index 0000000..292b5d6 --- /dev/null +++ b/app/api/deps.py @@ -0,0 +1,69 @@ +"""API dependencies.""" +from fastapi import Depends, HTTPException, status +from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select +from app.core.database import get_db +from app.core.security import decode_token, has_permission +from app.models.user import User +from app.models.organization import OrganizationMember + +security = HTTPBearer() + +async def get_current_user( + credentials: HTTPAuthorizationCredentials = Depends(security), + db: AsyncSession = Depends(get_db) +) -> User: + """Get current authenticated user.""" + token = credentials.credentials + payload = decode_token(token) + + if not payload or payload.get("type") != "access": + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Invalid or expired token" + ) + + user_id = payload.get("user_id") + result = await db.execute(select(User).where(User.id == user_id)) + user = result.scalar_one_or_none() + + if not user or not user.is_active: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="User not found or inactive" + ) + + return user + +async def get_org_member( + org_id: int, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db) +) -> OrganizationMember: + """Get user's membership in organization.""" + result = await db.execute( + select(OrganizationMember) + .where(OrganizationMember.organization_id == org_id) + .where(OrganizationMember.user_id == user.id) + ) + member = result.scalar_one_or_none() + + if not member and not user.is_superuser: + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail="Not a member of this organization" + ) + + return member + +def require_role(required_role: str): + """Dependency to require a minimum role.""" + async def check_role(member: OrganizationMember = Depends(get_org_member)): + if not has_permission(member.role.value, required_role): + raise HTTPException( + status_code=status.HTTP_403_FORBIDDEN, + detail=f"Requires {required_role} role or higher" + ) + return member + return check_role diff --git a/app/api/integrations.py b/app/api/integrations.py new file mode 100644 index 0000000..d50fde1 --- /dev/null +++ b/app/api/integrations.py @@ -0,0 +1,142 @@ +"""Integration management endpoints.""" +from typing import List +import secrets +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select +from app.core.database import get_db +from app.models.integration import Integration, IntegrationType, IntegrationStatus +from app.models.organization import OrganizationMember +from app.schemas.integration import IntegrationCreate, IntegrationRead, IntegrationUpdate +from app.api.deps import get_current_user, require_role + +router = APIRouter() + +@router.get("/", response_model=List[IntegrationRead]) +async def list_integrations( + org_id: int, + member: OrganizationMember = Depends(require_role("analyst")), + db: AsyncSession = Depends(get_db) +): + """List integrations for organization.""" + result = await db.execute( + select(Integration).where(Integration.organization_id == org_id) + ) + return result.scalars().all() + +@router.post("/", response_model=IntegrationRead, status_code=status.HTTP_201_CREATED) +async def create_integration( + org_id: int, + integration_in: IntegrationCreate, + member: OrganizationMember = Depends(require_role("admin")), + db: AsyncSession = Depends(get_db) +): + """Create a new integration.""" + # Generate webhook secret + webhook_secret = secrets.token_hex(32) + + # Generate webhook URL + webhook_url = f"https://jira-fixer.startdata.com.br/api/webhooks/{org_id}/{integration_in.type.value}" + + integration = Integration( + organization_id=org_id, + name=integration_in.name, + type=integration_in.type, + base_url=integration_in.base_url, + auth_type=integration_in.auth_type, + api_key=integration_in.api_key, + api_secret=integration_in.api_secret, + webhook_url=webhook_url, + webhook_secret=webhook_secret, + callback_url=integration_in.callback_url, + auto_analyze=integration_in.auto_analyze, + sync_comments=integration_in.sync_comments, + create_prs=integration_in.create_prs, + repositories=integration_in.repositories, + created_by_id=member.user_id, + status=IntegrationStatus.ACTIVE + ) + db.add(integration) + await db.flush() + + return integration + +@router.get("/{integration_id}", response_model=IntegrationRead) +async def get_integration( + org_id: int, + integration_id: int, + member: OrganizationMember = Depends(require_role("analyst")), + db: AsyncSession = Depends(get_db) +): + """Get integration details.""" + result = await db.execute( + select(Integration) + .where(Integration.id == integration_id) + .where(Integration.organization_id == org_id) + ) + integration = result.scalar_one_or_none() + if not integration: + raise HTTPException(status_code=404, detail="Integration not found") + return integration + +@router.patch("/{integration_id}", response_model=IntegrationRead) +async def update_integration( + org_id: int, + integration_id: int, + integration_update: IntegrationUpdate, + member: OrganizationMember = Depends(require_role("admin")), + db: AsyncSession = Depends(get_db) +): + """Update integration.""" + result = await db.execute( + select(Integration) + .where(Integration.id == integration_id) + .where(Integration.organization_id == org_id) + ) + integration = result.scalar_one_or_none() + if not integration: + raise HTTPException(status_code=404, detail="Integration not found") + + for field, value in integration_update.dict(exclude_unset=True).items(): + setattr(integration, field, value) + + return integration + +@router.delete("/{integration_id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_integration( + org_id: int, + integration_id: int, + member: OrganizationMember = Depends(require_role("admin")), + db: AsyncSession = Depends(get_db) +): + """Delete integration.""" + result = await db.execute( + select(Integration) + .where(Integration.id == integration_id) + .where(Integration.organization_id == org_id) + ) + integration = result.scalar_one_or_none() + if not integration: + raise HTTPException(status_code=404, detail="Integration not found") + + await db.delete(integration) + +@router.post("/{integration_id}/test") +async def test_integration( + org_id: int, + integration_id: int, + member: OrganizationMember = Depends(require_role("admin")), + db: AsyncSession = Depends(get_db) +): + """Test integration connection.""" + result = await db.execute( + select(Integration) + .where(Integration.id == integration_id) + .where(Integration.organization_id == org_id) + ) + integration = result.scalar_one_or_none() + if not integration: + raise HTTPException(status_code=404, detail="Integration not found") + + # TODO: Implement actual connection test based on integration type + return {"status": "ok", "message": "Connection successful"} diff --git a/app/api/issues.py b/app/api/issues.py new file mode 100644 index 0000000..1deedb6 --- /dev/null +++ b/app/api/issues.py @@ -0,0 +1,271 @@ +"""Issue management endpoints.""" +from typing import List, Optional +from datetime import datetime +from fastapi import APIRouter, Depends, HTTPException, status, BackgroundTasks +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, func +from app.core.database import get_db +from app.models.issue import Issue, IssueStatus, IssueComment +from app.models.organization import OrganizationMember +from app.models.integration import Integration +from app.schemas.issue import IssueCreate, IssueRead, IssueUpdate, IssueStats, IssueComment as IssueCommentSchema +from app.api.deps import get_current_user, require_role +from app.services.analysis import AnalysisService +from app.services.email import EmailService + +router = APIRouter() + +async def run_analysis(issue_id: int, db_url: str): + """Background task to analyze issue.""" + from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession + from sqlalchemy.orm import sessionmaker + + engine = create_async_engine(db_url) + async_session = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False) + + async with async_session() as db: + result = await db.execute(select(Issue).where(Issue.id == issue_id)) + issue = result.scalar_one_or_none() + if not issue: + return + + issue.status = IssueStatus.ANALYZING + issue.analysis_started_at = datetime.utcnow() + await db.commit() + + try: + # Get integration repo if available + repo = None + if issue.integration_id: + int_result = await db.execute(select(Integration).where(Integration.id == issue.integration_id)) + integration = int_result.scalar_one_or_none() + if integration and integration.repositories: + repo = integration.repositories[0].get("gitea_repo") + + # Run analysis + analysis = await AnalysisService.analyze({ + "title": issue.title, + "description": issue.description, + "priority": issue.priority.value if issue.priority else "medium" + }, repo) + + issue.root_cause = analysis.get("root_cause") + issue.affected_files = analysis.get("affected_files", []) + issue.suggested_fix = analysis.get("suggested_fix") + issue.confidence = analysis.get("confidence", 0) + issue.analysis_raw = analysis + issue.status = IssueStatus.ANALYZED + issue.analysis_completed_at = datetime.utcnow() + + # Create PR if enabled and confidence > 70% + if repo and issue.confidence and issue.confidence >= 0.7: + branch = f"fix/{issue.external_key or issue.id}-auto-fix" + pr_url = await AnalysisService.create_pull_request( + repo=repo, + branch=branch, + title=f"Fix: {issue.title}", + description=f"## Root Cause\n{issue.root_cause}\n\n## Suggested Fix\n{issue.suggested_fix}", + file_changes=[] + ) + if pr_url: + issue.pr_url = pr_url + issue.pr_branch = branch + issue.status = IssueStatus.PR_CREATED + + except Exception as e: + issue.status = IssueStatus.ERROR + issue.root_cause = f"Analysis failed: {str(e)}" + + await db.commit() + +@router.get("/", response_model=List[IssueRead]) +async def list_issues( + org_id: int, + status: Optional[IssueStatus] = None, + source: Optional[str] = None, + limit: int = 50, + offset: int = 0, + member: OrganizationMember = Depends(require_role("viewer")), + db: AsyncSession = Depends(get_db) +): + """List issues for organization.""" + query = select(Issue).where(Issue.organization_id == org_id) + + if status: + query = query.where(Issue.status == status) + if source: + query = query.where(Issue.source == source) + + query = query.order_by(Issue.created_at.desc()).offset(offset).limit(limit) + result = await db.execute(query) + return result.scalars().all() + +@router.get("/stats", response_model=IssueStats) +async def get_stats( + org_id: int, + member: OrganizationMember = Depends(require_role("viewer")), + db: AsyncSession = Depends(get_db) +): + """Get issue statistics.""" + # Total counts by status + total_result = await db.execute( + select(func.count(Issue.id)).where(Issue.organization_id == org_id) + ) + total = total_result.scalar() or 0 + + status_counts = {} + for s in IssueStatus: + result = await db.execute( + select(func.count(Issue.id)) + .where(Issue.organization_id == org_id) + .where(Issue.status == s) + ) + status_counts[s.value] = result.scalar() or 0 + + # By source + source_result = await db.execute( + select(Issue.source, func.count(Issue.id)) + .where(Issue.organization_id == org_id) + .group_by(Issue.source) + ) + by_source = {row[0] or "unknown": row[1] for row in source_result.all()} + + # By priority + priority_result = await db.execute( + select(Issue.priority, func.count(Issue.id)) + .where(Issue.organization_id == org_id) + .group_by(Issue.priority) + ) + by_priority = {str(row[0].value) if row[0] else "unknown": row[1] for row in priority_result.all()} + + # Avg confidence + avg_result = await db.execute( + select(func.avg(Issue.confidence)) + .where(Issue.organization_id == org_id) + .where(Issue.confidence.isnot(None)) + ) + avg_confidence = avg_result.scalar() or 0 + + # SLA breached + sla_result = await db.execute( + select(func.count(Issue.id)) + .where(Issue.organization_id == org_id) + .where(Issue.sla_breached == True) + ) + sla_breached = sla_result.scalar() or 0 + + return IssueStats( + total=total, + pending=status_counts.get("pending", 0), + analyzing=status_counts.get("analyzing", 0), + analyzed=status_counts.get("analyzed", 0), + pr_created=status_counts.get("pr_created", 0), + completed=status_counts.get("completed", 0), + error=status_counts.get("error", 0), + avg_confidence=avg_confidence, + by_source=by_source, + by_priority=by_priority, + sla_breached=sla_breached + ) + +@router.post("/", response_model=IssueRead, status_code=status.HTTP_201_CREATED) +async def create_issue( + org_id: int, + issue_in: IssueCreate, + background_tasks: BackgroundTasks, + member: OrganizationMember = Depends(require_role("analyst")), + db: AsyncSession = Depends(get_db) +): + """Create and analyze a new issue.""" + issue = Issue( + organization_id=org_id, + title=issue_in.title, + description=issue_in.description, + priority=issue_in.priority, + external_id=issue_in.external_id, + external_key=issue_in.external_key, + external_url=issue_in.external_url, + source=issue_in.source, + labels=issue_in.labels, + callback_url=issue_in.callback_url, + raw_payload=issue_in.raw_payload + ) + db.add(issue) + await db.flush() + + # Queue analysis + from app.core.config import settings + background_tasks.add_task(run_analysis, issue.id, settings.DATABASE_URL.replace("postgresql://", "postgresql+asyncpg://")) + + return issue + +@router.get("/{issue_id}", response_model=IssueRead) +async def get_issue( + org_id: int, + issue_id: int, + member: OrganizationMember = Depends(require_role("viewer")), + db: AsyncSession = Depends(get_db) +): + """Get issue details.""" + result = await db.execute( + select(Issue) + .where(Issue.id == issue_id) + .where(Issue.organization_id == org_id) + ) + issue = result.scalar_one_or_none() + if not issue: + raise HTTPException(status_code=404, detail="Issue not found") + return issue + +@router.post("/{issue_id}/reanalyze", response_model=IssueRead) +async def reanalyze_issue( + org_id: int, + issue_id: int, + background_tasks: BackgroundTasks, + member: OrganizationMember = Depends(require_role("analyst")), + db: AsyncSession = Depends(get_db) +): + """Rerun analysis on issue.""" + result = await db.execute( + select(Issue) + .where(Issue.id == issue_id) + .where(Issue.organization_id == org_id) + ) + issue = result.scalar_one_or_none() + if not issue: + raise HTTPException(status_code=404, detail="Issue not found") + + issue.status = IssueStatus.PENDING + + from app.core.config import settings + background_tasks.add_task(run_analysis, issue.id, settings.DATABASE_URL.replace("postgresql://", "postgresql+asyncpg://")) + + return issue + +@router.post("/{issue_id}/comments") +async def add_comment( + org_id: int, + issue_id: int, + comment: IssueCommentSchema, + member: OrganizationMember = Depends(require_role("analyst")), + db: AsyncSession = Depends(get_db) +): + """Add comment to issue.""" + result = await db.execute( + select(Issue) + .where(Issue.id == issue_id) + .where(Issue.organization_id == org_id) + ) + issue = result.scalar_one_or_none() + if not issue: + raise HTTPException(status_code=404, detail="Issue not found") + + new_comment = IssueComment( + issue_id=issue_id, + author=comment.author, + content=comment.content, + author_type=comment.author_type + ) + db.add(new_comment) + + return {"status": "ok"} diff --git a/app/api/organizations.py b/app/api/organizations.py new file mode 100644 index 0000000..d6fb148 --- /dev/null +++ b/app/api/organizations.py @@ -0,0 +1,153 @@ +"""Organization management endpoints.""" +from typing import List +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, func +from app.core.database import get_db +from app.models.user import User +from app.models.organization import Organization, OrganizationMember, MemberRole +from app.schemas.organization import OrganizationCreate, OrganizationRead, OrganizationUpdate, MemberCreate, MemberRead +from app.api.deps import get_current_user, require_role +from app.services.email import EmailService + +router = APIRouter() + +@router.get("/", response_model=List[OrganizationRead]) +async def list_organizations( + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db) +): + """List organizations user belongs to.""" + result = await db.execute( + select(Organization) + .join(OrganizationMember) + .where(OrganizationMember.user_id == user.id) + .where(Organization.is_active == True) + ) + return result.scalars().all() + +@router.post("/", response_model=OrganizationRead, status_code=status.HTTP_201_CREATED) +async def create_organization( + org_in: OrganizationCreate, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db) +): + """Create a new organization.""" + # Check slug uniqueness + result = await db.execute(select(Organization).where(Organization.slug == org_in.slug)) + if result.scalar_one_or_none(): + raise HTTPException(status_code=400, detail="Slug already exists") + + # Create org + org = Organization( + name=org_in.name, + slug=org_in.slug, + description=org_in.description + ) + db.add(org) + await db.flush() + + # Add creator as owner + member = OrganizationMember( + organization_id=org.id, + user_id=user.id, + role=MemberRole.OWNER + ) + db.add(member) + + return org + +@router.get("/{org_id}", response_model=OrganizationRead) +async def get_organization( + org_id: int, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db) +): + """Get organization details.""" + result = await db.execute(select(Organization).where(Organization.id == org_id)) + org = result.scalar_one_or_none() + if not org: + raise HTTPException(status_code=404, detail="Organization not found") + return org + +@router.patch("/{org_id}", response_model=OrganizationRead) +async def update_organization( + org_id: int, + org_update: OrganizationUpdate, + member: OrganizationMember = Depends(require_role("admin")), + db: AsyncSession = Depends(get_db) +): + """Update organization (admin only).""" + result = await db.execute(select(Organization).where(Organization.id == org_id)) + org = result.scalar_one_or_none() + if not org: + raise HTTPException(status_code=404, detail="Organization not found") + + for field, value in org_update.dict(exclude_unset=True).items(): + setattr(org, field, value) + + return org + +@router.get("/{org_id}/members", response_model=List[MemberRead]) +async def list_members( + org_id: int, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db) +): + """List organization members.""" + result = await db.execute( + select(OrganizationMember) + .where(OrganizationMember.organization_id == org_id) + ) + return result.scalars().all() + +@router.post("/{org_id}/members", response_model=MemberRead, status_code=status.HTTP_201_CREATED) +async def invite_member( + org_id: int, + member_in: MemberCreate, + current_member: OrganizationMember = Depends(require_role("admin")), + db: AsyncSession = Depends(get_db) +): + """Invite a new member (admin only).""" + # Find or create user + result = await db.execute(select(User).where(User.email == member_in.email)) + user = result.scalar_one_or_none() + + if not user: + # Create placeholder user + from app.core.security import get_password_hash + import secrets + user = User( + email=member_in.email, + hashed_password=get_password_hash(secrets.token_urlsafe(32)), + is_active=False # Will activate on first login + ) + db.add(user) + await db.flush() + + # Check if already member + result = await db.execute( + select(OrganizationMember) + .where(OrganizationMember.organization_id == org_id) + .where(OrganizationMember.user_id == user.id) + ) + if result.scalar_one_or_none(): + raise HTTPException(status_code=400, detail="User is already a member") + + # Add member + member = OrganizationMember( + organization_id=org_id, + user_id=user.id, + role=member_in.role, + invited_by_id=current_member.user_id + ) + db.add(member) + + # Get org name for email + org_result = await db.execute(select(Organization).where(Organization.id == org_id)) + org = org_result.scalar_one() + + # Send welcome email + await EmailService.send_welcome(user.email, user.full_name or user.email, org.name) + + return member diff --git a/app/api/reports.py b/app/api/reports.py new file mode 100644 index 0000000..fdca90e --- /dev/null +++ b/app/api/reports.py @@ -0,0 +1,192 @@ +"""Reports and analytics endpoints.""" +from typing import List, Optional +from datetime import datetime, timedelta +from fastapi import APIRouter, Depends +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, func, and_ +from app.core.database import get_db +from app.models.issue import Issue, IssueStatus +from app.models.organization import OrganizationMember +from app.api.deps import require_role +from pydantic import BaseModel + +router = APIRouter() + +class DailyStats(BaseModel): + date: str + total: int + analyzed: int + prs_created: int + avg_confidence: float + +class ReportSummary(BaseModel): + period_start: datetime + period_end: datetime + total_issues: int + analyzed_issues: int + prs_created: int + avg_confidence: float + avg_analysis_time_hours: Optional[float] + top_sources: List[dict] + daily_breakdown: List[DailyStats] + +@router.get("/summary", response_model=ReportSummary) +async def get_report_summary( + org_id: int, + days: int = 30, + member: OrganizationMember = Depends(require_role("viewer")), + db: AsyncSession = Depends(get_db) +): + """Get summary report for organization.""" + end_date = datetime.utcnow() + start_date = end_date - timedelta(days=days) + + # Total issues + total_result = await db.execute( + select(func.count(Issue.id)) + .where(Issue.organization_id == org_id) + .where(Issue.created_at >= start_date) + ) + total = total_result.scalar() or 0 + + # Analyzed + analyzed_result = await db.execute( + select(func.count(Issue.id)) + .where(Issue.organization_id == org_id) + .where(Issue.created_at >= start_date) + .where(Issue.status.in_([IssueStatus.ANALYZED, IssueStatus.PR_CREATED, IssueStatus.COMPLETED])) + ) + analyzed = analyzed_result.scalar() or 0 + + # PRs created + prs_result = await db.execute( + select(func.count(Issue.id)) + .where(Issue.organization_id == org_id) + .where(Issue.created_at >= start_date) + .where(Issue.pr_url.isnot(None)) + ) + prs = prs_result.scalar() or 0 + + # Avg confidence + avg_conf_result = await db.execute( + select(func.avg(Issue.confidence)) + .where(Issue.organization_id == org_id) + .where(Issue.created_at >= start_date) + .where(Issue.confidence.isnot(None)) + ) + avg_confidence = avg_conf_result.scalar() or 0 + + # Top sources + sources_result = await db.execute( + select(Issue.source, func.count(Issue.id).label("count")) + .where(Issue.organization_id == org_id) + .where(Issue.created_at >= start_date) + .group_by(Issue.source) + .order_by(func.count(Issue.id).desc()) + .limit(5) + ) + top_sources = [{"source": r[0] or "unknown", "count": r[1]} for r in sources_result.all()] + + # Daily breakdown (simplified) + daily_breakdown = [] + for i in range(min(days, 30)): + day_start = start_date + timedelta(days=i) + day_end = day_start + timedelta(days=1) + + day_total = await db.execute( + select(func.count(Issue.id)) + .where(Issue.organization_id == org_id) + .where(Issue.created_at >= day_start) + .where(Issue.created_at < day_end) + ) + day_analyzed = await db.execute( + select(func.count(Issue.id)) + .where(Issue.organization_id == org_id) + .where(Issue.created_at >= day_start) + .where(Issue.created_at < day_end) + .where(Issue.status.in_([IssueStatus.ANALYZED, IssueStatus.PR_CREATED, IssueStatus.COMPLETED])) + ) + day_prs = await db.execute( + select(func.count(Issue.id)) + .where(Issue.organization_id == org_id) + .where(Issue.created_at >= day_start) + .where(Issue.created_at < day_end) + .where(Issue.pr_url.isnot(None)) + ) + day_conf = await db.execute( + select(func.avg(Issue.confidence)) + .where(Issue.organization_id == org_id) + .where(Issue.created_at >= day_start) + .where(Issue.created_at < day_end) + .where(Issue.confidence.isnot(None)) + ) + + daily_breakdown.append(DailyStats( + date=day_start.strftime("%Y-%m-%d"), + total=day_total.scalar() or 0, + analyzed=day_analyzed.scalar() or 0, + prs_created=day_prs.scalar() or 0, + avg_confidence=day_conf.scalar() or 0 + )) + + return ReportSummary( + period_start=start_date, + period_end=end_date, + total_issues=total, + analyzed_issues=analyzed, + prs_created=prs, + avg_confidence=avg_confidence, + avg_analysis_time_hours=None, # TODO: calculate + top_sources=top_sources, + daily_breakdown=daily_breakdown + ) + +@router.get("/export/csv") +async def export_csv( + org_id: int, + days: int = 30, + member: OrganizationMember = Depends(require_role("manager")), + db: AsyncSession = Depends(get_db) +): + """Export issues as CSV.""" + from fastapi.responses import StreamingResponse + import io + import csv + + start_date = datetime.utcnow() - timedelta(days=days) + + result = await db.execute( + select(Issue) + .where(Issue.organization_id == org_id) + .where(Issue.created_at >= start_date) + .order_by(Issue.created_at.desc()) + ) + issues = result.scalars().all() + + output = io.StringIO() + writer = csv.writer(output) + writer.writerow([ + "ID", "Key", "Title", "Source", "Status", "Priority", + "Confidence", "PR URL", "Created At", "Analyzed At" + ]) + + for issue in issues: + writer.writerow([ + issue.id, + issue.external_key, + issue.title, + issue.source, + issue.status.value if issue.status else "", + issue.priority.value if issue.priority else "", + f"{issue.confidence:.0%}" if issue.confidence else "", + issue.pr_url or "", + issue.created_at.isoformat() if issue.created_at else "", + issue.analysis_completed_at.isoformat() if issue.analysis_completed_at else "" + ]) + + output.seek(0) + return StreamingResponse( + iter([output.getvalue()]), + media_type="text/csv", + headers={"Content-Disposition": f"attachment; filename=issues-{datetime.utcnow().strftime('%Y%m%d')}.csv"} + ) diff --git a/app/api/users.py b/app/api/users.py new file mode 100644 index 0000000..cf04a37 --- /dev/null +++ b/app/api/users.py @@ -0,0 +1,33 @@ +"""User management endpoints.""" +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.ext.asyncio import AsyncSession +from app.core.database import get_db +from app.core.security import get_password_hash +from app.models.user import User +from app.schemas.user import UserRead, UserUpdate +from app.api.deps import get_current_user + +router = APIRouter() + +@router.get("/me", response_model=UserRead) +async def get_me(user: User = Depends(get_current_user)): + """Get current user profile.""" + return user + +@router.patch("/me", response_model=UserRead) +async def update_me( + user_update: UserUpdate, + user: User = Depends(get_current_user), + db: AsyncSession = Depends(get_db) +): + """Update current user profile.""" + if user_update.email: + user.email = user_update.email + if user_update.full_name: + user.full_name = user_update.full_name + if user_update.avatar_url: + user.avatar_url = user_update.avatar_url + if user_update.password: + user.hashed_password = get_password_hash(user_update.password) + + return user diff --git a/app/api/webhooks.py b/app/api/webhooks.py new file mode 100644 index 0000000..0af708c --- /dev/null +++ b/app/api/webhooks.py @@ -0,0 +1,286 @@ +"""Webhook endpoints for external integrations.""" +from typing import Optional +from datetime import datetime +from fastapi import APIRouter, Depends, HTTPException, BackgroundTasks, Request, Header +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select +from app.core.database import get_db +from app.models.organization import Organization +from app.models.integration import Integration, IntegrationType, IntegrationStatus +from app.models.issue import Issue, IssueStatus, IssuePriority +import hmac +import hashlib + +router = APIRouter() + +def verify_signature(payload: bytes, signature: str, secret: str) -> bool: + """Verify webhook signature.""" + if not secret or not signature: + return True # Skip verification if no secret configured + expected = hmac.new(secret.encode(), payload, hashlib.sha256).hexdigest() + return hmac.compare_digest(f"sha256={expected}", signature) + +async def process_webhook( + org_id: int, + integration_type: IntegrationType, + payload: dict, + background_tasks: BackgroundTasks, + db: AsyncSession +) -> dict: + """Process incoming webhook and create issue.""" + # Find integration + result = await db.execute( + select(Integration) + .where(Integration.organization_id == org_id) + .where(Integration.type == integration_type) + .where(Integration.status == IntegrationStatus.ACTIVE) + ) + integration = result.scalar_one_or_none() + + if not integration: + return {"status": "ignored", "message": "No active integration found"} + + # Update integration stats + integration.issues_processed = (integration.issues_processed or 0) + 1 + integration.last_sync_at = datetime.utcnow() + + # Normalize payload based on type + issue_data = normalize_payload(integration_type, payload) + if not issue_data: + return {"status": "ignored", "message": "Event not processed"} + + # Create issue + issue = Issue( + organization_id=org_id, + integration_id=integration.id, + external_id=issue_data.get("external_id"), + external_key=issue_data.get("external_key"), + external_url=issue_data.get("external_url"), + source=integration_type.value, + title=issue_data.get("title"), + description=issue_data.get("description"), + priority=IssuePriority(issue_data.get("priority", "medium")), + labels=issue_data.get("labels"), + callback_url=issue_data.get("callback_url") or integration.callback_url, + raw_payload=payload + ) + db.add(issue) + await db.flush() + + # Queue analysis if auto_analyze enabled + if integration.auto_analyze: + from app.api.issues import run_analysis + from app.core.config import settings + background_tasks.add_task( + run_analysis, + issue.id, + settings.DATABASE_URL.replace("postgresql://", "postgresql+asyncpg://") + ) + + return {"status": "accepted", "issue_id": issue.id} + +def normalize_payload(integration_type: IntegrationType, payload: dict) -> Optional[dict]: + """Normalize webhook payload to common format.""" + + if integration_type == IntegrationType.JIRA_CLOUD: + event = payload.get("webhookEvent", "") + if "issue_created" not in event: + return None + issue = payload.get("issue", {}) + fields = issue.get("fields", {}) + return { + "external_id": str(issue.get("id")), + "external_key": issue.get("key"), + "external_url": f"{payload.get('issue', {}).get('self', '').split('/rest/')[0]}/browse/{issue.get('key')}", + "title": fields.get("summary"), + "description": fields.get("description"), + "priority": normalize_priority(fields.get("priority", {}).get("name")), + "labels": fields.get("labels", []) + } + + elif integration_type == IntegrationType.SERVICENOW: + return { + "external_id": payload.get("sys_id"), + "external_key": payload.get("number"), + "external_url": payload.get("url"), + "title": payload.get("short_description"), + "description": payload.get("description"), + "priority": normalize_priority(payload.get("priority")), + "callback_url": payload.get("callback_url") + } + + elif integration_type == IntegrationType.ZENDESK: + ticket = payload.get("ticket", payload) + return { + "external_id": str(ticket.get("id")), + "external_key": f"ZD-{ticket.get('id')}", + "external_url": ticket.get("url"), + "title": ticket.get("subject"), + "description": ticket.get("description"), + "priority": normalize_priority(ticket.get("priority")), + "labels": ticket.get("tags", []) + } + + elif integration_type == IntegrationType.GITHUB: + action = payload.get("action") + if action != "opened": + return None + issue = payload.get("issue", {}) + return { + "external_id": str(issue.get("id")), + "external_key": f"GH-{issue.get('number')}", + "external_url": issue.get("html_url"), + "title": issue.get("title"), + "description": issue.get("body"), + "priority": "medium", + "labels": [l.get("name") for l in issue.get("labels", [])] + } + + elif integration_type == IntegrationType.GITLAB: + event = payload.get("object_kind") + if event != "issue": + return None + attrs = payload.get("object_attributes", {}) + if attrs.get("action") != "open": + return None + return { + "external_id": str(attrs.get("id")), + "external_key": f"GL-{attrs.get('iid')}", + "external_url": attrs.get("url"), + "title": attrs.get("title"), + "description": attrs.get("description"), + "priority": "medium", + "labels": payload.get("labels", []) + } + + elif integration_type == IntegrationType.TICKETHUB: + event = payload.get("event", "") + if "created" not in event: + return None + data = payload.get("data", payload) + return { + "external_id": str(data.get("id")), + "external_key": data.get("key"), + "external_url": f"https://tickethub.startdata.com.br/tickets/{data.get('id')}", + "title": data.get("title"), + "description": data.get("description"), + "priority": normalize_priority(data.get("priority")), + "labels": data.get("labels", []) + } + + return None + +def normalize_priority(priority: Optional[str]) -> str: + """Normalize priority to standard values.""" + if not priority: + return "medium" + priority = str(priority).lower() + if priority in ("1", "critical", "highest", "urgent"): + return "critical" + elif priority in ("2", "high"): + return "high" + elif priority in ("3", "medium", "normal"): + return "medium" + else: + return "low" + +# Webhook endpoints for each integration type +@router.post("/{org_id}/jira") +async def webhook_jira( + org_id: int, + request: Request, + background_tasks: BackgroundTasks, + db: AsyncSession = Depends(get_db) +): + payload = await request.json() + return await process_webhook(org_id, IntegrationType.JIRA_CLOUD, payload, background_tasks, db) + +@router.post("/{org_id}/servicenow") +async def webhook_servicenow( + org_id: int, + request: Request, + background_tasks: BackgroundTasks, + db: AsyncSession = Depends(get_db) +): + payload = await request.json() + return await process_webhook(org_id, IntegrationType.SERVICENOW, payload, background_tasks, db) + +@router.post("/{org_id}/zendesk") +async def webhook_zendesk( + org_id: int, + request: Request, + background_tasks: BackgroundTasks, + db: AsyncSession = Depends(get_db) +): + payload = await request.json() + return await process_webhook(org_id, IntegrationType.ZENDESK, payload, background_tasks, db) + +@router.post("/{org_id}/github") +async def webhook_github( + org_id: int, + request: Request, + background_tasks: BackgroundTasks, + x_github_event: Optional[str] = Header(None), + db: AsyncSession = Depends(get_db) +): + payload = await request.json() + if x_github_event != "issues": + return {"status": "ignored", "message": "Not an issues event"} + return await process_webhook(org_id, IntegrationType.GITHUB, payload, background_tasks, db) + +@router.post("/{org_id}/gitlab") +async def webhook_gitlab( + org_id: int, + request: Request, + background_tasks: BackgroundTasks, + db: AsyncSession = Depends(get_db) +): + payload = await request.json() + return await process_webhook(org_id, IntegrationType.GITLAB, payload, background_tasks, db) + +@router.post("/{org_id}/tickethub") +async def webhook_tickethub( + org_id: int, + request: Request, + background_tasks: BackgroundTasks, + db: AsyncSession = Depends(get_db) +): + payload = await request.json() + return await process_webhook(org_id, IntegrationType.TICKETHUB, payload, background_tasks, db) + +@router.post("/{org_id}/generic") +async def webhook_generic( + org_id: int, + request: Request, + background_tasks: BackgroundTasks, + db: AsyncSession = Depends(get_db) +): + """Generic webhook for custom integrations.""" + payload = await request.json() + + # Direct mapping + issue = Issue( + organization_id=org_id, + external_id=str(payload.get("id")), + external_key=payload.get("key"), + external_url=payload.get("url"), + source=payload.get("source", "generic"), + title=payload.get("title"), + description=payload.get("description"), + priority=IssuePriority(normalize_priority(payload.get("priority"))), + labels=payload.get("labels"), + callback_url=payload.get("callback_url"), + raw_payload=payload + ) + db.add(issue) + await db.flush() + + from app.api.issues import run_analysis + from app.core.config import settings + background_tasks.add_task( + run_analysis, + issue.id, + settings.DATABASE_URL.replace("postgresql://", "postgresql+asyncpg://") + ) + + return {"status": "accepted", "issue_id": issue.id} diff --git a/app/core/__init__.py b/app/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/core/config.py b/app/core/config.py new file mode 100644 index 0000000..8698f21 --- /dev/null +++ b/app/core/config.py @@ -0,0 +1,47 @@ +"""Application configuration.""" +import os +from functools import lru_cache +from pydantic_settings import BaseSettings + +class Settings(BaseSettings): + # App + APP_NAME: str = "JIRA AI Fixer" + APP_VERSION: str = "2.0.0" + DEBUG: bool = False + SECRET_KEY: str = os.getenv("SECRET_KEY", "change-me-in-production-use-openssl-rand-hex-32") + + # Database + DATABASE_URL: str = os.getenv("DATABASE_URL", "postgresql://postgres:postgres@postgres_database:5432/jira_fixer_v2") + + # Redis + REDIS_URL: str = os.getenv("REDIS_URL", "redis://redis_redis:6379/0") + + # JWT + JWT_SECRET: str = os.getenv("JWT_SECRET", "jwt-secret-change-in-production") + JWT_ALGORITHM: str = "HS256" + JWT_EXPIRE_MINUTES: int = 60 * 24 # 24 hours + JWT_REFRESH_DAYS: int = 7 + + # Email (Resend) + RESEND_API_KEY: str = os.getenv("RESEND_API_KEY", "") + EMAIL_FROM: str = os.getenv("EMAIL_FROM", "JIRA AI Fixer ") + + # External APIs + OPENROUTER_API_KEY: str = os.getenv("OPENROUTER_API_KEY", "") + GITEA_URL: str = os.getenv("GITEA_URL", "https://gitea.startdata.com.br") + GITEA_TOKEN: str = os.getenv("GITEA_TOKEN", "") + + # OAuth (for integrations) + JIRA_CLIENT_ID: str = os.getenv("JIRA_CLIENT_ID", "") + JIRA_CLIENT_SECRET: str = os.getenv("JIRA_CLIENT_SECRET", "") + GITHUB_CLIENT_ID: str = os.getenv("GITHUB_CLIENT_ID", "") + GITHUB_CLIENT_SECRET: str = os.getenv("GITHUB_CLIENT_SECRET", "") + + class Config: + env_file = ".env" + +@lru_cache() +def get_settings() -> Settings: + return Settings() + +settings = get_settings() diff --git a/app/core/database.py b/app/core/database.py new file mode 100644 index 0000000..d9d5793 --- /dev/null +++ b/app/core/database.py @@ -0,0 +1,27 @@ +"""Database setup with SQLAlchemy async.""" +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker +from sqlalchemy.orm import declarative_base +from .config import settings + +# Convert sync URL to async +DATABASE_URL = settings.DATABASE_URL.replace("postgresql://", "postgresql+asyncpg://") + +engine = create_async_engine(DATABASE_URL, echo=settings.DEBUG, pool_size=10, max_overflow=20) +async_session = async_sessionmaker(engine, class_=AsyncSession, expire_on_commit=False) + +Base = declarative_base() + +async def get_db() -> AsyncSession: + async with async_session() as session: + try: + yield session + await session.commit() + except Exception: + await session.rollback() + raise + finally: + await session.close() + +async def init_db(): + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) diff --git a/app/core/security.py b/app/core/security.py new file mode 100644 index 0000000..8ee89ac --- /dev/null +++ b/app/core/security.py @@ -0,0 +1,53 @@ +"""Security utilities - JWT, password hashing, RBAC.""" +from datetime import datetime, timedelta +from typing import Optional, Any +from jose import jwt, JWTError +from passlib.context import CryptContext +from .config import settings + +pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") + +# Roles hierarchy +class Role: + VIEWER = "viewer" + ANALYST = "analyst" + MANAGER = "manager" + ADMIN = "admin" + OWNER = "owner" + +ROLE_HIERARCHY = { + Role.VIEWER: 1, + Role.ANALYST: 2, + Role.MANAGER: 3, + Role.ADMIN: 4, + Role.OWNER: 5, +} + +def verify_password(plain_password: str, hashed_password: str) -> bool: + return pwd_context.verify(plain_password, hashed_password) + +def get_password_hash(password: str) -> str: + return pwd_context.hash(password) + +def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str: + to_encode = data.copy() + expire = datetime.utcnow() + (expires_delta or timedelta(minutes=settings.JWT_EXPIRE_MINUTES)) + to_encode.update({"exp": expire, "type": "access"}) + return jwt.encode(to_encode, settings.JWT_SECRET, algorithm=settings.JWT_ALGORITHM) + +def create_refresh_token(data: dict) -> str: + to_encode = data.copy() + expire = datetime.utcnow() + timedelta(days=settings.JWT_REFRESH_DAYS) + to_encode.update({"exp": expire, "type": "refresh"}) + return jwt.encode(to_encode, settings.JWT_SECRET, algorithm=settings.JWT_ALGORITHM) + +def decode_token(token: str) -> Optional[dict]: + try: + payload = jwt.decode(token, settings.JWT_SECRET, algorithms=[settings.JWT_ALGORITHM]) + return payload + except JWTError: + return None + +def has_permission(user_role: str, required_role: str) -> bool: + """Check if user_role has at least the required_role level.""" + return ROLE_HIERARCHY.get(user_role, 0) >= ROLE_HIERARCHY.get(required_role, 0) diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..64e9fb9 --- /dev/null +++ b/app/main.py @@ -0,0 +1,75 @@ +"""JIRA AI Fixer - Enterprise Issue Analysis Platform.""" +from contextlib import asynccontextmanager +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from fastapi.staticfiles import StaticFiles +from fastapi.responses import FileResponse +import os + +from app.core.config import settings +from app.core.database import init_db +from app.api import api_router + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Startup + await init_db() + yield + # Shutdown + +app = FastAPI( + title=settings.APP_NAME, + version=settings.APP_VERSION, + description="Enterprise AI-powered issue analysis and automated fix generation", + docs_url="/api/docs", + redoc_url="/api/redoc", + openapi_url="/api/openapi.json", + lifespan=lifespan +) + +# CORS +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# API routes +app.include_router(api_router, prefix="/api") + +# Health check +@app.get("/api/health") +async def health(): + return { + "status": "healthy", + "service": "jira-ai-fixer", + "version": settings.APP_VERSION + } + +# Serve static frontend (will be mounted if exists) +FRONTEND_DIR = "/app/frontend" +if os.path.exists(FRONTEND_DIR): + app.mount("/assets", StaticFiles(directory=f"{FRONTEND_DIR}/assets"), name="assets") + + @app.get("/") + async def serve_frontend(): + return FileResponse(f"{FRONTEND_DIR}/index.html") + + @app.get("/{path:path}") + async def serve_spa(path: str): + file_path = f"{FRONTEND_DIR}/{path}" + if os.path.exists(file_path) and os.path.isfile(file_path): + return FileResponse(file_path) + return FileResponse(f"{FRONTEND_DIR}/index.html") +else: + # Fallback: serve basic info page + @app.get("/") + async def root(): + return { + "service": settings.APP_NAME, + "version": settings.APP_VERSION, + "docs": "/api/docs", + "health": "/api/health" + } diff --git a/app/models/__init__.py b/app/models/__init__.py new file mode 100644 index 0000000..8a021d0 --- /dev/null +++ b/app/models/__init__.py @@ -0,0 +1,5 @@ +from .user import User +from .organization import Organization, OrganizationMember +from .integration import Integration +from .issue import Issue +from .audit import AuditLog diff --git a/app/models/audit.py b/app/models/audit.py new file mode 100644 index 0000000..c76ba88 --- /dev/null +++ b/app/models/audit.py @@ -0,0 +1,36 @@ +"""Audit log for compliance and tracking.""" +from datetime import datetime +from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, JSON, Text +from sqlalchemy.orm import relationship +from app.core.database import Base + +class AuditLog(Base): + __tablename__ = "audit_logs" + + id = Column(Integer, primary_key=True, index=True) + organization_id = Column(Integer, ForeignKey("organizations.id")) + user_id = Column(Integer, ForeignKey("users.id")) + + # Action details + action = Column(String(100), nullable=False, index=True) # user.login, issue.created, integration.updated + resource_type = Column(String(50)) # user, issue, integration, etc + resource_id = Column(Integer) + + # Context + ip_address = Column(String(45)) + user_agent = Column(String(500)) + + # Changes + old_values = Column(JSON) + new_values = Column(JSON) + description = Column(Text) + + # Status + success = Column(String(10), default="success") # success, failure + error_message = Column(String(500)) + + created_at = Column(DateTime, default=datetime.utcnow, index=True) + + # Relationships + organization = relationship("Organization", back_populates="audit_logs") + user = relationship("User", back_populates="audit_logs") diff --git a/app/models/integration.py b/app/models/integration.py new file mode 100644 index 0000000..fd81dc6 --- /dev/null +++ b/app/models/integration.py @@ -0,0 +1,57 @@ +"""Integration model.""" +from datetime import datetime +from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Enum, Boolean, Text +from sqlalchemy.orm import relationship +from app.core.database import Base +import enum + +class IntegrationType(str, enum.Enum): + JIRA_CLOUD = "jira_cloud" + JIRA_SERVER = "jira_server" + SERVICENOW = "servicenow" + ZENDESK = "zendesk" + GITHUB = "github" + GITLAB = "gitlab" + AZURE_DEVOPS = "azure_devops" + TICKETHUB = "tickethub" + CUSTOM_WEBHOOK = "custom_webhook" + +class IntegrationStatus(str, enum.Enum): + ACTIVE = "active" + INACTIVE = "inactive" + ERROR = "error" + +class Integration(Base): + __tablename__ = "integrations" + + id = Column(Integer, primary_key=True, index=True) + organization_id = Column(Integer, ForeignKey("organizations.id"), nullable=False) + + name = Column(String(255), nullable=False) + type = Column(Enum(IntegrationType), nullable=False) + status = Column(Enum(IntegrationStatus), default=IntegrationStatus.ACTIVE) + + # Config + base_url = Column(String(1024)) + api_key = Column(Text) # Encrypted + oauth_token = Column(Text) + webhook_secret = Column(String(255)) + callback_url = Column(String(1024)) + + # Stats + issues_processed = Column(Integer, default=0) + last_sync_at = Column(DateTime) + last_error = Column(Text) + + # Settings + auto_analyze = Column(Boolean, default=True) + + created_at = Column(DateTime, default=datetime.utcnow) + + # Relations + organization = relationship("Organization", back_populates="integrations") + issues = relationship("Issue", back_populates="integration") + + @property + def webhook_url(self) -> str: + return f"https://jira-fixer.startdata.com.br/api/webhook/{self.organization_id}/{self.type.value}" diff --git a/app/models/issue.py b/app/models/issue.py new file mode 100644 index 0000000..5897506 --- /dev/null +++ b/app/models/issue.py @@ -0,0 +1,66 @@ +"""Issue model.""" +from datetime import datetime +from typing import Optional, List +from sqlalchemy import Column, Integer, String, Text, DateTime, Float, ForeignKey, Enum, JSON +from sqlalchemy.orm import relationship +from app.core.database import Base +import enum + +class IssueStatus(str, enum.Enum): + PENDING = "pending" + ANALYZING = "analyzing" + ANALYZED = "analyzed" + PR_CREATED = "pr_created" + COMPLETED = "completed" + ERROR = "error" + +class IssuePriority(str, enum.Enum): + CRITICAL = "critical" + HIGH = "high" + MEDIUM = "medium" + LOW = "low" + +class Issue(Base): + __tablename__ = "issues" + + id = Column(Integer, primary_key=True, index=True) + organization_id = Column(Integer, ForeignKey("organizations.id"), nullable=False) + integration_id = Column(Integer, ForeignKey("integrations.id"), nullable=True) + + # External reference + external_id = Column(String(255), index=True) + external_key = Column(String(100), index=True) # JIRA-123, INC0001234 + external_url = Column(String(1024)) + source = Column(String(50)) # jira_cloud, servicenow, etc + + # Issue data + title = Column(String(500), nullable=False) + description = Column(Text) + priority = Column(Enum(IssuePriority), default=IssuePriority.MEDIUM) + labels = Column(JSON) + + # Analysis + status = Column(Enum(IssueStatus), default=IssueStatus.PENDING) + root_cause = Column(Text) + suggested_fix = Column(Text) + affected_files = Column(JSON) + confidence = Column(Float) + analysis_completed_at = Column(DateTime) + error_message = Column(Text) + + # PR + pr_url = Column(String(1024)) + pr_branch = Column(String(255)) + + # Callback + callback_url = Column(String(1024)) + callback_sent = Column(DateTime) + + # Meta + raw_payload = Column(JSON) + created_at = Column(DateTime, default=datetime.utcnow) + updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + + # Relations + organization = relationship("Organization", back_populates="issues") + integration = relationship("Integration", back_populates="issues") diff --git a/app/models/organization.py b/app/models/organization.py new file mode 100644 index 0000000..a39a174 --- /dev/null +++ b/app/models/organization.py @@ -0,0 +1,39 @@ +"""Organization model.""" +from datetime import datetime +from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Enum, Text +from sqlalchemy.orm import relationship +from app.core.database import Base +import enum + +class MemberRole(str, enum.Enum): + VIEWER = "viewer" + ANALYST = "analyst" + MANAGER = "manager" + ADMIN = "admin" + OWNER = "owner" + +class Organization(Base): + __tablename__ = "organizations" + + id = Column(Integer, primary_key=True, index=True) + name = Column(String(255), nullable=False) + slug = Column(String(100), unique=True, nullable=False, index=True) + created_at = Column(DateTime, default=datetime.utcnow) + + # Relations + members = relationship("OrganizationMember", back_populates="organization", cascade="all, delete-orphan") + integrations = relationship("Integration", back_populates="organization", cascade="all, delete-orphan") + issues = relationship("Issue", back_populates="organization", cascade="all, delete-orphan") + +class OrganizationMember(Base): + __tablename__ = "organization_members" + + id = Column(Integer, primary_key=True, index=True) + organization_id = Column(Integer, ForeignKey("organizations.id"), nullable=False) + user_id = Column(Integer, ForeignKey("users.id"), nullable=False) + role = Column(Enum(MemberRole), default=MemberRole.VIEWER) + joined_at = Column(DateTime, default=datetime.utcnow) + + # Relations + organization = relationship("Organization", back_populates="members") + user = relationship("User", back_populates="memberships") diff --git a/app/models/user.py b/app/models/user.py new file mode 100644 index 0000000..b2ce6f4 --- /dev/null +++ b/app/models/user.py @@ -0,0 +1,20 @@ +"""User model.""" +from datetime import datetime +from sqlalchemy import Column, Integer, String, Boolean, DateTime +from sqlalchemy.orm import relationship +from app.core.database import Base + +class User(Base): + __tablename__ = "users" + + id = Column(Integer, primary_key=True, index=True) + email = Column(String(255), unique=True, index=True, nullable=False) + hashed_password = Column(String(255), nullable=False) + full_name = Column(String(255)) + is_active = Column(Boolean, default=True) + is_superuser = Column(Boolean, default=False) + created_at = Column(DateTime, default=datetime.utcnow) + last_login = Column(DateTime) + + # Relations + memberships = relationship("OrganizationMember", back_populates="user", cascade="all, delete-orphan") diff --git a/app/schemas/__init__.py b/app/schemas/__init__.py new file mode 100644 index 0000000..482cd65 --- /dev/null +++ b/app/schemas/__init__.py @@ -0,0 +1,4 @@ +from .user import UserCreate, UserRead, UserUpdate, Token, TokenData +from .organization import OrganizationCreate, OrganizationRead, OrganizationUpdate, MemberCreate, MemberRead +from .integration import IntegrationCreate, IntegrationRead, IntegrationUpdate +from .issue import IssueCreate, IssueRead, IssueUpdate, IssueStats diff --git a/app/schemas/integration.py b/app/schemas/integration.py new file mode 100644 index 0000000..05162c9 --- /dev/null +++ b/app/schemas/integration.py @@ -0,0 +1,52 @@ +"""Integration schemas.""" +from datetime import datetime +from typing import Optional, List, Dict, Any +from pydantic import BaseModel +from app.models.integration import IntegrationType, IntegrationStatus + +class IntegrationBase(BaseModel): + name: str + type: IntegrationType + +class IntegrationCreate(IntegrationBase): + base_url: Optional[str] = None + auth_type: str = "api_key" + api_key: Optional[str] = None + api_secret: Optional[str] = None + webhook_url: Optional[str] = None + callback_url: Optional[str] = None + auto_analyze: bool = True + sync_comments: bool = True + create_prs: bool = True + repositories: Optional[List[Dict[str, str]]] = None + +class IntegrationUpdate(BaseModel): + name: Optional[str] = None + base_url: Optional[str] = None + api_key: Optional[str] = None + api_secret: Optional[str] = None + callback_url: Optional[str] = None + auto_analyze: Optional[bool] = None + sync_comments: Optional[bool] = None + create_prs: Optional[bool] = None + repositories: Optional[List[Dict[str, str]]] = None + status: Optional[IntegrationStatus] = None + +class IntegrationRead(IntegrationBase): + id: int + organization_id: int + status: IntegrationStatus + base_url: Optional[str] = None + webhook_url: Optional[str] = None + auto_analyze: bool + issues_processed: int + last_sync_at: Optional[datetime] = None + last_error: Optional[str] = None + created_at: datetime + + class Config: + from_attributes = True + +class OAuthCallback(BaseModel): + code: str + state: str diff --git a/app/schemas/issue.py b/app/schemas/issue.py new file mode 100644 index 0000000..5612cc0 --- /dev/null +++ b/app/schemas/issue.py @@ -0,0 +1,74 @@ +"""Issue schemas.""" +from datetime import datetime +from typing import Optional, List, Dict, Any +from pydantic import BaseModel +from app.models.issue import IssueStatus, IssuePriority + +class IssueBase(BaseModel): + title: str + description: Optional[str] = None + priority: IssuePriority = IssuePriority.MEDIUM + +class IssueCreate(IssueBase): + external_id: Optional[str] = None + external_key: Optional[str] = None + external_url: Optional[str] = None + source: Optional[str] = None + labels: Optional[List[str]] = None + callback_url: Optional[str] = None + raw_payload: Optional[Dict[str, Any]] = None + +class IssueUpdate(BaseModel): + title: Optional[str] = None + description: Optional[str] = None + priority: Optional[IssuePriority] = None + status: Optional[IssueStatus] = None + labels: Optional[List[str]] = None + +class IssueRead(IssueBase): + id: int + organization_id: int + integration_id: Optional[int] = None + external_id: Optional[str] = None + external_key: Optional[str] = None + external_url: Optional[str] = None + source: Optional[str] = None + labels: Optional[List[str]] = None + + status: IssueStatus + root_cause: Optional[str] = None + affected_files: Optional[List[str]] = None + suggested_fix: Optional[str] = None + confidence: Optional[float] = None + + pr_url: Optional[str] = None + pr_branch: Optional[str] = None + pr_status: Optional[str] = None + + sla_deadline: Optional[datetime] = None + sla_breached: bool = False + + created_at: datetime + analysis_completed_at: Optional[datetime] = None + + class Config: + from_attributes = True + +class IssueStats(BaseModel): + total: int + pending: int + analyzing: int + analyzed: int + pr_created: int + completed: int + error: int + avg_confidence: float + avg_analysis_time_seconds: Optional[float] = None + by_source: Dict[str, int] + by_priority: Dict[str, int] + sla_breached: int + +class IssueComment(BaseModel): + author: str + content: str + author_type: str = "user" diff --git a/app/schemas/organization.py b/app/schemas/organization.py new file mode 100644 index 0000000..b5bae47 --- /dev/null +++ b/app/schemas/organization.py @@ -0,0 +1,48 @@ +"""Organization schemas.""" +from datetime import datetime +from typing import Optional, List +from pydantic import BaseModel +from app.models.organization import MemberRole + +class OrganizationBase(BaseModel): + name: str + description: Optional[str] = None + +class OrganizationCreate(OrganizationBase): + slug: str + +class OrganizationUpdate(BaseModel): + name: Optional[str] = None + description: Optional[str] = None + logo_url: Optional[str] = None + slack_webhook_url: Optional[str] = None + teams_webhook_url: Optional[str] = None + +class OrganizationRead(OrganizationBase): + id: int + slug: str + logo_url: Optional[str] = None + plan: str + is_active: bool + created_at: datetime + member_count: Optional[int] = None + + class Config: + from_attributes = True + +class MemberCreate(BaseModel): + email: EmailStr + role: MemberRole = MemberRole.ANALYST + +class MemberRead(BaseModel): + id: int + user_id: int + role: MemberRole + joined_at: datetime + user_email: Optional[str] = None + user_name: Optional[str] = None + + class Config: + from_attributes = True + +from pydantic import EmailStr diff --git a/app/schemas/user.py b/app/schemas/user.py new file mode 100644 index 0000000..ffa2699 --- /dev/null +++ b/app/schemas/user.py @@ -0,0 +1,37 @@ +"""User schemas.""" +from datetime import datetime +from typing import Optional +from pydantic import BaseModel, EmailStr + +class UserBase(BaseModel): + email: EmailStr + full_name: Optional[str] = None + +class UserCreate(UserBase): + password: str + +class UserUpdate(BaseModel): + email: Optional[EmailStr] = None + full_name: Optional[str] = None + avatar_url: Optional[str] = None + password: Optional[str] = None + +class UserRead(UserBase): + id: int + avatar_url: Optional[str] = None + is_active: bool + email_verified: bool + created_at: datetime + last_login: Optional[datetime] = None + + class Config: + from_attributes = True + +class Token(BaseModel): + access_token: str + refresh_token: str + token_type: str = "bearer" + +class TokenData(BaseModel): + user_id: int + email: str diff --git a/app/services/__init__.py b/app/services/__init__.py new file mode 100644 index 0000000..fb6df5d --- /dev/null +++ b/app/services/__init__.py @@ -0,0 +1,3 @@ +from .email import EmailService +from .analysis import AnalysisService +from .audit import AuditService diff --git a/app/services/analysis.py b/app/services/analysis.py new file mode 100644 index 0000000..ff61449 --- /dev/null +++ b/app/services/analysis.py @@ -0,0 +1,220 @@ +"""Analysis service - AI-powered issue analysis.""" +import httpx +import json +from datetime import datetime +from typing import Optional, Dict, Any, List +from app.core.config import settings + +class AnalysisService: + OPENROUTER_API = "https://openrouter.ai/api/v1/chat/completions" + MODEL = "meta-llama/llama-3.3-70b-instruct:free" + + @classmethod + async def fetch_repository_files(cls, repo: str, path: str = "") -> List[Dict[str, str]]: + """Fetch files from Gitea repository.""" + files = [] + async with httpx.AsyncClient() as client: + try: + url = f"{settings.GITEA_URL}/api/v1/repos/{repo}/contents/{path}" + headers = {} + if settings.GITEA_TOKEN: + headers["Authorization"] = f"token {settings.GITEA_TOKEN}" + + response = await client.get(url, headers=headers) + if response.status_code != 200: + return files + + items = response.json() + for item in items: + if item["type"] == "file" and item["name"].endswith((".cbl", ".cob", ".py", ".java", ".js", ".ts")): + content_resp = await client.get(item["download_url"], headers=headers) + if content_resp.status_code == 200: + files.append({ + "path": item["path"], + "content": content_resp.text[:10000] # Limit size + }) + elif item["type"] == "dir": + sub_files = await cls.fetch_repository_files(repo, item["path"]) + files.extend(sub_files) + except Exception as e: + print(f"Error fetching repo: {e}") + + return files[:20] # Limit to 20 files + + @classmethod + def build_prompt(cls, issue: Dict[str, Any], files: List[Dict[str, str]]) -> str: + """Build analysis prompt for LLM.""" + files_context = "\n\n".join([ + f"### {f['path']}\n```\n{f['content']}\n```" + for f in files + ]) + + return f"""You are an expert software engineer analyzing a support issue. + +## Issue Details +**Title:** {issue.get('title', 'N/A')} +**Description:** {issue.get('description', 'N/A')} +**Priority:** {issue.get('priority', 'N/A')} + +## Source Code Files +{files_context} + +## Your Task +Analyze the issue and identify: +1. Root cause of the problem +2. Which files are affected +3. Suggested code fix + +## Response Format (JSON) +{{ + "root_cause": "Detailed explanation of what's causing the issue", + "affected_files": ["file1.py", "file2.py"], + "suggested_fix": "Code changes needed to fix the issue", + "confidence": 0.85, + "explanation": "Step-by-step explanation of the fix" +}} + +Respond ONLY with valid JSON.""" + + @classmethod + async def analyze(cls, issue: Dict[str, Any], repo: Optional[str] = None) -> Dict[str, Any]: + """Run AI analysis on an issue.""" + # Fetch code context + files = [] + if repo: + files = await cls.fetch_repository_files(repo) + + # Build prompt + prompt = cls.build_prompt(issue, files) + + # Call LLM + if not settings.OPENROUTER_API_KEY: + # Mock response for testing + return { + "root_cause": "Mock analysis - configure OPENROUTER_API_KEY for real analysis", + "affected_files": ["example.py"], + "suggested_fix": "# Mock fix", + "confidence": 0.5, + "explanation": "This is a mock response" + } + + async with httpx.AsyncClient() as client: + try: + response = await client.post( + cls.OPENROUTER_API, + headers={ + "Authorization": f"Bearer {settings.OPENROUTER_API_KEY}", + "Content-Type": "application/json" + }, + json={ + "model": cls.MODEL, + "messages": [{"role": "user", "content": prompt}], + "temperature": 0.2, + "max_tokens": 2000 + }, + timeout=120 + ) + + if response.status_code == 200: + data = response.json() + content = data["choices"][0]["message"]["content"] + + # Parse JSON from response + try: + # Handle markdown code blocks + if "```json" in content: + content = content.split("```json")[1].split("```")[0] + elif "```" in content: + content = content.split("```")[1].split("```")[0] + + return json.loads(content.strip()) + except json.JSONDecodeError: + return { + "root_cause": content[:500], + "affected_files": [], + "suggested_fix": "", + "confidence": 0.3, + "explanation": "Could not parse structured response" + } + else: + return { + "root_cause": f"API error: {response.status_code}", + "affected_files": [], + "suggested_fix": "", + "confidence": 0, + "explanation": response.text[:500] + } + except Exception as e: + return { + "root_cause": f"Analysis error: {str(e)}", + "affected_files": [], + "suggested_fix": "", + "confidence": 0, + "explanation": str(e) + } + + @classmethod + async def create_pull_request( + cls, + repo: str, + branch: str, + title: str, + description: str, + file_changes: List[Dict[str, str]] + ) -> Optional[str]: + """Create a pull request with suggested fix.""" + if not settings.GITEA_TOKEN: + return None + + async with httpx.AsyncClient() as client: + headers = {"Authorization": f"token {settings.GITEA_TOKEN}"} + + try: + # 1. Get default branch + repo_resp = await client.get( + f"{settings.GITEA_URL}/api/v1/repos/{repo}", + headers=headers + ) + if repo_resp.status_code != 200: + return None + default_branch = repo_resp.json().get("default_branch", "main") + + # 2. Get latest commit SHA + ref_resp = await client.get( + f"{settings.GITEA_URL}/api/v1/repos/{repo}/git/refs/heads/{default_branch}", + headers=headers + ) + if ref_resp.status_code != 200: + return None + sha = ref_resp.json()["object"]["sha"] + + # 3. Create branch + await client.post( + f"{settings.GITEA_URL}/api/v1/repos/{repo}/git/refs", + headers=headers, + json={"ref": f"refs/heads/{branch}", "sha": sha} + ) + + # 4. Commit changes (simplified - just description for now) + # Full implementation would update actual files + + # 5. Create PR + pr_resp = await client.post( + f"{settings.GITEA_URL}/api/v1/repos/{repo}/pulls", + headers=headers, + json={ + "title": title, + "body": description, + "head": branch, + "base": default_branch + } + ) + + if pr_resp.status_code in (200, 201): + pr_data = pr_resp.json() + return pr_data.get("html_url") + + except Exception as e: + print(f"PR creation error: {e}") + + return None diff --git a/app/services/audit.py b/app/services/audit.py new file mode 100644 index 0000000..efaa741 --- /dev/null +++ b/app/services/audit.py @@ -0,0 +1,42 @@ +"""Audit logging service.""" +from datetime import datetime +from typing import Optional, Dict, Any +from sqlalchemy.ext.asyncio import AsyncSession +from app.models.audit import AuditLog + +class AuditService: + @classmethod + async def log( + cls, + db: AsyncSession, + action: str, + user_id: Optional[int] = None, + organization_id: Optional[int] = None, + resource_type: Optional[str] = None, + resource_id: Optional[int] = None, + old_values: Optional[Dict[str, Any]] = None, + new_values: Optional[Dict[str, Any]] = None, + description: Optional[str] = None, + ip_address: Optional[str] = None, + user_agent: Optional[str] = None, + success: str = "success", + error_message: Optional[str] = None + ): + """Create an audit log entry.""" + log = AuditLog( + action=action, + user_id=user_id, + organization_id=organization_id, + resource_type=resource_type, + resource_id=resource_id, + old_values=old_values, + new_values=new_values, + description=description, + ip_address=ip_address, + user_agent=user_agent, + success=success, + error_message=error_message + ) + db.add(log) + await db.flush() + return log diff --git a/app/services/email.py b/app/services/email.py new file mode 100644 index 0000000..84f5e34 --- /dev/null +++ b/app/services/email.py @@ -0,0 +1,94 @@ +"""Email service using Resend.""" +import httpx +from typing import Optional, List +from app.core.config import settings + +class EmailService: + RESEND_API = "https://api.resend.com/emails" + + @classmethod + async def send( + cls, + to: List[str], + subject: str, + html: str, + text: Optional[str] = None + ) -> bool: + if not settings.RESEND_API_KEY: + return False + + async with httpx.AsyncClient() as client: + try: + response = await client.post( + cls.RESEND_API, + headers={ + "Authorization": f"Bearer {settings.RESEND_API_KEY}", + "Content-Type": "application/json" + }, + json={ + "from": settings.EMAIL_FROM, + "to": to, + "subject": subject, + "html": html, + "text": text + } + ) + return response.status_code == 200 + except Exception: + return False + + @classmethod + async def send_welcome(cls, email: str, name: str, org_name: str): + html = f""" +
+

Welcome to JIRA AI Fixer! πŸ€–

+

Hi {name},

+

You've been added to {org_name}.

+

JIRA AI Fixer automatically analyzes support issues and suggests code fixes using AI.

+
+ + Get Started + +
+

β€” The JIRA AI Fixer Team

+
+ """ + await cls.send([email], f"Welcome to {org_name} on JIRA AI Fixer", html) + + @classmethod + async def send_analysis_complete(cls, email: str, issue_key: str, confidence: float, pr_url: Optional[str]): + html = f""" +
+

Analysis Complete βœ…

+

Issue {issue_key} has been analyzed.

+
+

Confidence: {confidence:.0%}

+ {f'

Pull Request: {pr_url}

' if pr_url else ''} +
+ + View Details + +
+ """ + await cls.send([email], f"Analysis Complete: {issue_key}", html) + + @classmethod + async def send_weekly_digest(cls, email: str, org_name: str, stats: dict): + html = f""" +
+

Weekly Digest πŸ“Š

+

Here's what happened in {org_name} this week:

+
+

Issues Analyzed: {stats.get('analyzed', 0)}

+

PRs Created: {stats.get('prs', 0)}

+

Avg Confidence: {stats.get('confidence', 0):.0%}

+
+ + View Full Report + +
+ """ + await cls.send([email], f"Weekly Digest: {org_name}", html) diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..84594e9 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,17 @@ +version: '3.8' + +services: + api: + build: . + ports: + - "8000:8000" + environment: + - DATABASE_URL=postgresql://postgres:postgres@host.docker.internal:5433/jira_fixer_v2 + - REDIS_URL=redis://host.docker.internal:6379 + - JWT_SECRET=dev-secret-change-in-production + - RESEND_API_KEY=${RESEND_API_KEY} + - OPENROUTER_API_KEY=${OPENROUTER_API_KEY} + - GITEA_URL=https://gitea.startdata.com.br + - GITEA_TOKEN=${GITEA_TOKEN} + volumes: + - ./app:/app/app:ro diff --git a/frontend/index.html b/frontend/index.html new file mode 100644 index 0000000..fbdfb31 --- /dev/null +++ b/frontend/index.html @@ -0,0 +1,13 @@ + + + + + + JIRA AI Fixer + + + +
+ + + diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..f7a13bb --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,30 @@ +{ + "name": "jira-ai-fixer-portal", + "private": true, + "version": "2.0.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview" + }, + "dependencies": { + "react": "^18.2.0", + "react-dom": "^18.2.0", + "react-router-dom": "^6.22.0", + "@tanstack/react-query": "^5.17.0", + "axios": "^1.6.5", + "recharts": "^2.10.4", + "date-fns": "^3.2.0", + "clsx": "^2.1.0" + }, + "devDependencies": { + "@types/react": "^18.2.48", + "@types/react-dom": "^18.2.18", + "@vitejs/plugin-react": "^4.2.1", + "autoprefixer": "^10.4.17", + "postcss": "^8.4.33", + "tailwindcss": "^3.4.1", + "vite": "^5.0.12" + } +} diff --git a/frontend/postcss.config.js b/frontend/postcss.config.js new file mode 100644 index 0000000..2b75bd8 --- /dev/null +++ b/frontend/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {} + } +} diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx new file mode 100644 index 0000000..ebf1e33 --- /dev/null +++ b/frontend/src/App.jsx @@ -0,0 +1,40 @@ +import { Routes, Route, Navigate } from 'react-router-dom'; +import { useAuth } from './context/AuthContext'; +import Layout from './components/Layout'; +import Login from './pages/Login'; +import Register from './pages/Register'; +import Dashboard from './pages/Dashboard'; +import Issues from './pages/Issues'; +import IssueDetail from './pages/IssueDetail'; +import Integrations from './pages/Integrations'; +import Team from './pages/Team'; +import Reports from './pages/Reports'; +import Settings from './pages/Settings'; + +function PrivateRoute({ children }) { + const { user, loading } = useAuth(); + if (loading) return
Loading...
; + return user ? children : ; +} + +export default function App() { + return ( + + } /> + } /> + + + + }> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + + + ); +} diff --git a/frontend/src/components/Layout.jsx b/frontend/src/components/Layout.jsx new file mode 100644 index 0000000..a3c9dcf --- /dev/null +++ b/frontend/src/components/Layout.jsx @@ -0,0 +1,116 @@ +import { Link, Outlet, useLocation, useNavigate } from 'react-router-dom'; +import { useAuth } from '../context/AuthContext'; +import { useQuery } from '@tanstack/react-query'; +import { organizations } from '../services/api'; +import { useState } from 'react'; +import clsx from 'clsx'; + +const navItems = [ + { path: '/', label: 'Dashboard', icon: 'πŸ“Š' }, + { path: '/issues', label: 'Issues', icon: '🎫' }, + { path: '/integrations', label: 'Integrations', icon: 'πŸ”Œ' }, + { path: '/team', label: 'Team', icon: 'πŸ‘₯' }, + { path: '/reports', label: 'Reports', icon: 'πŸ“ˆ' }, + { path: '/settings', label: 'Settings', icon: 'βš™οΈ' } +]; + +export default function Layout() { + const { user, logout, currentOrg, selectOrg } = useAuth(); + const location = useLocation(); + const navigate = useNavigate(); + const [showOrgMenu, setShowOrgMenu] = useState(false); + + const { data: orgs } = useQuery({ + queryKey: ['organizations'], + queryFn: () => organizations.list() + }); + + return ( +
+ {/* Sidebar */} + + + {/* Main content */} +
+ +
+
+ ); +} diff --git a/frontend/src/context/AuthContext.jsx b/frontend/src/context/AuthContext.jsx new file mode 100644 index 0000000..cd81da7 --- /dev/null +++ b/frontend/src/context/AuthContext.jsx @@ -0,0 +1,61 @@ +import { createContext, useContext, useState, useEffect } from 'react'; +import { auth, users } from '../services/api'; + +const AuthContext = createContext(null); + +export function AuthProvider({ children }) { + const [user, setUser] = useState(null); + const [loading, setLoading] = useState(true); + const [currentOrg, setCurrentOrg] = useState(null); + + useEffect(() => { + const token = localStorage.getItem('access_token'); + if (token) { + loadUser(); + } else { + setLoading(false); + } + }, []); + + const loadUser = async () => { + try { + const res = await users.me(); + setUser(res.data); + const savedOrg = localStorage.getItem('current_org'); + if (savedOrg) setCurrentOrg(JSON.parse(savedOrg)); + } catch (e) { + localStorage.removeItem('access_token'); + localStorage.removeItem('refresh_token'); + } finally { + setLoading(false); + } + }; + + const login = async (email, password) => { + const res = await auth.login(email, password); + localStorage.setItem('access_token', res.data.access_token); + localStorage.setItem('refresh_token', res.data.refresh_token); + await loadUser(); + }; + + const logout = () => { + localStorage.removeItem('access_token'); + localStorage.removeItem('refresh_token'); + localStorage.removeItem('current_org'); + setUser(null); + setCurrentOrg(null); + }; + + const selectOrg = (org) => { + setCurrentOrg(org); + localStorage.setItem('current_org', JSON.stringify(org)); + }; + + return ( + + {children} + + ); +} + +export const useAuth = () => useContext(AuthContext); diff --git a/frontend/src/index.css b/frontend/src/index.css new file mode 100644 index 0000000..c30b72f --- /dev/null +++ b/frontend/src/index.css @@ -0,0 +1,28 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +@layer base { + body { + @apply antialiased; + font-family: 'Inter', system-ui, sans-serif; + } +} + +@layer components { + .btn { + @apply px-4 py-2 rounded-lg font-medium transition-colors; + } + .btn-primary { + @apply bg-primary-600 hover:bg-primary-700 text-white; + } + .btn-secondary { + @apply bg-gray-700 hover:bg-gray-600 text-white; + } + .input { + @apply w-full px-4 py-2 bg-gray-800 border border-gray-700 rounded-lg focus:outline-none focus:ring-2 focus:ring-primary-500 focus:border-transparent; + } + .card { + @apply bg-gray-800 border border-gray-700 rounded-xl p-6; + } +} diff --git a/frontend/src/main.jsx b/frontend/src/main.jsx new file mode 100644 index 0000000..bfaaf17 --- /dev/null +++ b/frontend/src/main.jsx @@ -0,0 +1,25 @@ +import React from 'react'; +import ReactDOM from 'react-dom/client'; +import { BrowserRouter } from 'react-router-dom'; +import { QueryClient, QueryClientProvider } from '@tanstack/react-query'; +import { AuthProvider } from './context/AuthContext'; +import App from './App'; +import './index.css'; + +const queryClient = new QueryClient({ + defaultOptions: { + queries: { staleTime: 30000, retry: 1 } + } +}); + +ReactDOM.createRoot(document.getElementById('root')).render( + + + + + + + + + +); diff --git a/frontend/src/pages/Dashboard.jsx b/frontend/src/pages/Dashboard.jsx new file mode 100644 index 0000000..718c1ea --- /dev/null +++ b/frontend/src/pages/Dashboard.jsx @@ -0,0 +1,190 @@ +import { useQuery } from '@tanstack/react-query'; +import { useAuth } from '../context/AuthContext'; +import { issues, reports } from '../services/api'; +import { AreaChart, Area, BarChart, Bar, PieChart, Pie, Cell, XAxis, YAxis, Tooltip, ResponsiveContainer } from 'recharts'; + +const COLORS = ['#6366f1', '#22c55e', '#f59e0b', '#ef4444', '#8b5cf6']; + +export default function Dashboard() { + const { currentOrg } = useAuth(); + + const { data: stats } = useQuery({ + queryKey: ['issues-stats', currentOrg?.id], + queryFn: () => issues.stats(currentOrg.id), + enabled: !!currentOrg + }); + + const { data: report } = useQuery({ + queryKey: ['report-summary', currentOrg?.id], + queryFn: () => reports.summary(currentOrg.id, 14), + enabled: !!currentOrg + }); + + if (!currentOrg) { + return ( +
+ 🏒 +

Select an organization

+

Choose an organization from the sidebar to get started

+
+ ); + } + + const s = stats?.data || {}; + const r = report?.data || {}; + + const statusData = [ + { name: 'Pending', value: s.pending || 0 }, + { name: 'Analyzing', value: s.analyzing || 0 }, + { name: 'Analyzed', value: s.analyzed || 0 }, + { name: 'PR Created', value: s.pr_created || 0 }, + { name: 'Error', value: s.error || 0 } + ].filter(d => d.value > 0); + + const sourceData = Object.entries(s.by_source || {}).map(([name, value]) => ({ name, value })); + + return ( +
+

Dashboard

+ + {/* Stats cards */} +
+
+
+
+

Total Issues

+

{s.total || 0}

+
+
+ πŸ“‹ +
+
+
+ +
+
+
+

Analyzed

+

{s.analyzed || 0}

+
+
+ βœ… +
+
+
+ +
+
+
+

PRs Created

+

{s.pr_created || 0}

+
+
+ πŸ”€ +
+
+
+ +
+
+
+

Avg Confidence

+

+ {s.avg_confidence ? `${(s.avg_confidence * 100).toFixed(0)}%` : 'N/A'} +

+
+
+ 🎯 +
+
+
+
+ + {/* Charts */} +
+ {/* Trend chart */} +
+

Issues Trend (14 days)

+
+ + + + + + + + + +
+
+ + {/* Status distribution */} +
+

Status Distribution

+
+ {statusData.length > 0 ? ( + + + `${name} ${(percent * 100).toFixed(0)}%`} + > + {statusData.map((entry, index) => ( + + ))} + + + + + ) : ( +

No data yet

+ )} +
+
+
+ + {/* By source */} +
+

Issues by Source

+
+ {sourceData.length > 0 ? ( + + + + + + + + + ) : ( +

No data yet

+ )} +
+
+
+ ); +} diff --git a/frontend/src/pages/Integrations.jsx b/frontend/src/pages/Integrations.jsx new file mode 100644 index 0000000..f767ab0 --- /dev/null +++ b/frontend/src/pages/Integrations.jsx @@ -0,0 +1,225 @@ +import { useState } from 'react'; +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { useAuth } from '../context/AuthContext'; +import { integrations } from '../services/api'; +import clsx from 'clsx'; + +const integrationTypes = [ + { type: 'jira_cloud', name: 'JIRA Cloud', icon: 'πŸ”΅', desc: 'Atlassian JIRA Cloud' }, + { type: 'servicenow', name: 'ServiceNow', icon: 'βš™οΈ', desc: 'ServiceNow ITSM' }, + { type: 'zendesk', name: 'Zendesk', icon: 'πŸ’š', desc: 'Zendesk Support' }, + { type: 'github', name: 'GitHub', icon: 'πŸ™', desc: 'GitHub Issues' }, + { type: 'gitlab', name: 'GitLab', icon: '🦊', desc: 'GitLab Issues' }, + { type: 'azure_devops', name: 'Azure DevOps', icon: 'πŸ”·', desc: 'Azure Boards' }, + { type: 'tickethub', name: 'TicketHub', icon: '🎫', desc: 'TicketHub' }, + { type: 'custom_webhook', name: 'Custom Webhook', icon: 'πŸ”—', desc: 'Custom integration' } +]; + +export default function Integrations() { + const { currentOrg } = useAuth(); + const queryClient = useQueryClient(); + const [showModal, setShowModal] = useState(false); + const [selectedType, setSelectedType] = useState(null); + const [form, setForm] = useState({}); + + const { data, isLoading } = useQuery({ + queryKey: ['integrations', currentOrg?.id], + queryFn: () => integrations.list(currentOrg.id), + enabled: !!currentOrg + }); + + const createMutation = useMutation({ + mutationFn: (data) => integrations.create(currentOrg.id, data), + onSuccess: () => { + queryClient.invalidateQueries(['integrations', currentOrg?.id]); + setShowModal(false); + setForm({}); + setSelectedType(null); + } + }); + + const deleteMutation = useMutation({ + mutationFn: (id) => integrations.delete(currentOrg.id, id), + onSuccess: () => queryClient.invalidateQueries(['integrations', currentOrg?.id]) + }); + + if (!currentOrg) return
Select an organization
; + + const list = data?.data || []; + + const handleCreate = () => { + createMutation.mutate({ + name: form.name, + type: selectedType.type, + base_url: form.base_url, + api_key: form.api_key, + callback_url: form.callback_url + }); + }; + + return ( +
+
+

Integrations

+ +
+ + {/* Existing integrations */} +
+ {list.map(int => { + const typeInfo = integrationTypes.find(t => t.type === int.type); + return ( +
+
+
+ {typeInfo?.icon || 'πŸ”—'} +
+

{int.name}

+

{typeInfo?.name}

+
+
+ + {int.status} + +
+ +
+
+ Issues Processed + {int.issues_processed || 0} +
+ {int.last_sync_at && ( +
+ Last Event + {new Date(int.last_sync_at).toLocaleDateString()} +
+ )} +
+ +
+

Webhook URL

+ {int.webhook_url} +
+ +
+ + +
+
+ ); + })} + + {list.length === 0 && !isLoading && ( +
+ πŸ”Œ +

No integrations yet

+

Connect your first issue tracker to get started

+
+ )} +
+ + {/* Add integration modal */} + {showModal && ( +
+
+
+

+ {selectedType ? `Configure ${selectedType.name}` : 'Add Integration'} +

+ +
+ +
+ {!selectedType ? ( +
+ {integrationTypes.map(type => ( + + ))} +
+ ) : ( +
+
+ + setForm({...form, name: e.target.value})} + placeholder={`My ${selectedType.name}`} + className="input" + /> +
+ +
+ + setForm({...form, base_url: e.target.value})} + placeholder="https://your-instance.atlassian.net" + className="input" + /> +
+ +
+ + setForm({...form, api_key: e.target.value})} + placeholder="Your API key" + className="input" + /> +
+ +
+ + setForm({...form, callback_url: e.target.value})} + placeholder="https://your-instance.atlassian.net/rest/api/2" + className="input" + /> +
+ +
+ + +
+
+ )} +
+
+
+ )} +
+ ); +} diff --git a/frontend/src/pages/IssueDetail.jsx b/frontend/src/pages/IssueDetail.jsx new file mode 100644 index 0000000..5f1f541 --- /dev/null +++ b/frontend/src/pages/IssueDetail.jsx @@ -0,0 +1,188 @@ +import { useParams, Link } from 'react-router-dom'; +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { useAuth } from '../context/AuthContext'; +import { issues } from '../services/api'; +import clsx from 'clsx'; + +export default function IssueDetail() { + const { id } = useParams(); + const { currentOrg } = useAuth(); + const queryClient = useQueryClient(); + + const { data, isLoading } = useQuery({ + queryKey: ['issue', currentOrg?.id, id], + queryFn: () => issues.get(currentOrg.id, id), + enabled: !!currentOrg + }); + + const reanalyzeMutation = useMutation({ + mutationFn: () => issues.reanalyze(currentOrg.id, id), + onSuccess: () => { + queryClient.invalidateQueries(['issue', currentOrg?.id, id]); + } + }); + + if (!currentOrg) return null; + if (isLoading) return
Loading...
; + + const issue = data?.data; + if (!issue) return
Issue not found
; + + return ( +
+
+ ← Back to Issues +
+ +
+
+
+ {issue.external_key || `#${issue.id}`} + + {issue.status} + +
+

{issue.title}

+

+ Source: {issue.source} β€’ Created: {new Date(issue.created_at).toLocaleString()} +

+
+
+ {issue.external_url && ( + + View Original β†’ + + )} + +
+
+ +
+ {/* Main content */} +
+ {/* Description */} +
+

Description

+
+                            {issue.description || 'No description'}
+                        
+
+ + {/* Analysis */} + {issue.root_cause && ( +
+

πŸ” Root Cause Analysis

+
+                                {issue.root_cause}
+                            
+
+ )} + + {/* Affected Files */} + {issue.affected_files?.length > 0 && ( +
+

πŸ“ Affected Files

+
+ {issue.affected_files.map(file => ( + + {file} + + ))} +
+
+ )} + + {/* Suggested Fix */} + {issue.suggested_fix && ( +
+

πŸ”§ Suggested Fix

+
+                                {issue.suggested_fix}
+                            
+
+ )} +
+ + {/* Sidebar */} +
+ {/* Confidence */} + {issue.confidence && ( +
+

Confidence

+
+
+ {(issue.confidence * 100).toFixed(0)}% +
+
+
+
+
+
+ )} + + {/* PR Info */} + {issue.pr_url && ( +
+

πŸ”€ Pull Request

+

Branch: {issue.pr_branch}

+ + View PR β†’ + +
+ )} + + {/* Labels */} + {issue.labels?.length > 0 && ( +
+

Labels

+
+ {issue.labels.map(label => ( + + {label} + + ))} +
+
+ )} + + {/* Timeline */} +
+

Timeline

+
+
+ Created + {new Date(issue.created_at).toLocaleString()} +
+ {issue.analysis_completed_at && ( +
+ Analyzed + {new Date(issue.analysis_completed_at).toLocaleString()} +
+ )} +
+
+
+
+
+ ); +} diff --git a/frontend/src/pages/Issues.jsx b/frontend/src/pages/Issues.jsx new file mode 100644 index 0000000..22ba37d --- /dev/null +++ b/frontend/src/pages/Issues.jsx @@ -0,0 +1,134 @@ +import { useState } from 'react'; +import { useQuery } from '@tanstack/react-query'; +import { Link } from 'react-router-dom'; +import { useAuth } from '../context/AuthContext'; +import { issues } from '../services/api'; +import clsx from 'clsx'; + +const statusColors = { + pending: 'bg-yellow-500/20 text-yellow-400', + analyzing: 'bg-blue-500/20 text-blue-400', + analyzed: 'bg-green-500/20 text-green-400', + pr_created: 'bg-purple-500/20 text-purple-400', + completed: 'bg-gray-500/20 text-gray-400', + error: 'bg-red-500/20 text-red-400' +}; + +const priorityColors = { + critical: 'bg-red-500/20 text-red-400', + high: 'bg-orange-500/20 text-orange-400', + medium: 'bg-yellow-500/20 text-yellow-400', + low: 'bg-green-500/20 text-green-400' +}; + +const sourceIcons = { + jira_cloud: 'πŸ”΅', servicenow: 'βš™οΈ', zendesk: 'πŸ’š', + github: 'πŸ™', gitlab: '🦊', tickethub: '🎫', generic: 'πŸ“' +}; + +export default function Issues() { + const { currentOrg } = useAuth(); + const [filters, setFilters] = useState({ status: '', source: '' }); + + const { data, isLoading } = useQuery({ + queryKey: ['issues', currentOrg?.id, filters], + queryFn: () => issues.list(currentOrg.id, filters), + enabled: !!currentOrg + }); + + if (!currentOrg) { + return
Select an organization
; + } + + const issueList = data?.data || []; + + return ( +
+
+

Issues

+
+ + +
+
+ +
+ {isLoading ? ( +
Loading...
+ ) : issueList.length === 0 ? ( +
+ πŸ“­ +

No issues found

+
+ ) : ( +
+ {issueList.map(issue => ( + +
+ {sourceIcons[issue.source] || 'πŸ“'} +
+
+ + {issue.external_key || `#${issue.id}`} + + + {issue.status} + + {issue.priority && ( + + {issue.priority} + + )} +
+

{issue.title}

+ {issue.confidence && ( +
+
+
+
+ + {(issue.confidence * 100).toFixed(0)}% confidence + +
+ )} +
+ β†’ +
+ + ))} +
+ )} +
+
+ ); +} diff --git a/frontend/src/pages/Login.jsx b/frontend/src/pages/Login.jsx new file mode 100644 index 0000000..78587f8 --- /dev/null +++ b/frontend/src/pages/Login.jsx @@ -0,0 +1,77 @@ +import { useState } from 'react'; +import { Link, useNavigate } from 'react-router-dom'; +import { useAuth } from '../context/AuthContext'; + +export default function Login() { + const [email, setEmail] = useState(''); + const [password, setPassword] = useState(''); + const [error, setError] = useState(''); + const [loading, setLoading] = useState(false); + const { login } = useAuth(); + const navigate = useNavigate(); + + const handleSubmit = async (e) => { + e.preventDefault(); + setError(''); + setLoading(true); + try { + await login(email, password); + navigate('/'); + } catch (err) { + setError(err.response?.data?.detail || 'Login failed'); + } finally { + setLoading(false); + } + }; + + return ( +
+
+
+ πŸ€– +

JIRA AI Fixer

+

Sign in to your account

+
+ +
+ {error && ( +
+ {error} +
+ )} + +
+ + setEmail(e.target.value)} + className="input" + required + /> +
+ +
+ + setPassword(e.target.value)} + className="input" + required + /> +
+ + + +

+ Don't have an account?{' '} + Sign up +

+
+
+
+ ); +} diff --git a/frontend/src/pages/Register.jsx b/frontend/src/pages/Register.jsx new file mode 100644 index 0000000..719a44e --- /dev/null +++ b/frontend/src/pages/Register.jsx @@ -0,0 +1,86 @@ +import { useState } from 'react'; +import { Link, useNavigate } from 'react-router-dom'; +import { auth } from '../services/api'; + +export default function Register() { + const [form, setForm] = useState({ email: '', password: '', full_name: '' }); + const [error, setError] = useState(''); + const [loading, setLoading] = useState(false); + const navigate = useNavigate(); + + const handleSubmit = async (e) => { + e.preventDefault(); + setError(''); + setLoading(true); + try { + await auth.register(form); + navigate('/login?registered=true'); + } catch (err) { + setError(err.response?.data?.detail || 'Registration failed'); + } finally { + setLoading(false); + } + }; + + return ( +
+
+
+ πŸ€– +

Create Account

+

Get started with JIRA AI Fixer

+
+ +
+ {error && ( +
+ {error} +
+ )} + +
+ + setForm({...form, full_name: e.target.value})} + className="input" + /> +
+ +
+ + setForm({...form, email: e.target.value})} + className="input" + required + /> +
+ +
+ + setForm({...form, password: e.target.value})} + className="input" + required + minLength={8} + /> +
+ + + +

+ Already have an account?{' '} + Sign in +

+
+
+
+ ); +} diff --git a/frontend/src/pages/Reports.jsx b/frontend/src/pages/Reports.jsx new file mode 100644 index 0000000..e218b35 --- /dev/null +++ b/frontend/src/pages/Reports.jsx @@ -0,0 +1,108 @@ +import { useState } from 'react'; +import { useQuery } from '@tanstack/react-query'; +import { useAuth } from '../context/AuthContext'; +import { reports } from '../services/api'; +import { AreaChart, Area, XAxis, YAxis, Tooltip, ResponsiveContainer } from 'recharts'; + +export default function Reports() { + const { currentOrg } = useAuth(); + const [days, setDays] = useState(30); + + const { data, isLoading } = useQuery({ + queryKey: ['report', currentOrg?.id, days], + queryFn: () => reports.summary(currentOrg.id, days), + enabled: !!currentOrg + }); + + const handleExport = async () => { + const res = await reports.exportCsv(currentOrg.id, days); + const url = window.URL.createObjectURL(new Blob([res.data])); + const a = document.createElement('a'); + a.href = url; + a.download = `issues-report-${new Date().toISOString().split('T')[0]}.csv`; + a.click(); + }; + + if (!currentOrg) return
Select an organization
; + + const r = data?.data || {}; + + return ( +
+
+

Reports

+
+ + +
+
+ + {/* Summary cards */} +
+
+

Total Issues

+

{r.total_issues || 0}

+
+
+

Analyzed

+

{r.analyzed_issues || 0}

+
+
+

PRs Created

+

{r.prs_created || 0}

+
+
+

Avg Confidence

+

+ {r.avg_confidence ? `${(r.avg_confidence * 100).toFixed(0)}%` : 'N/A'} +

+
+
+ + {/* Chart */} +
+

Trend

+
+ {isLoading ? ( +
Loading...
+ ) : ( + + + + + + + + + + + )} +
+
+ + {/* Top sources */} +
+

Top Sources

+
+ {(r.top_sources || []).map(source => ( +
+ {source.source} +
+
+
+ {source.count} +
+ ))} +
+
+
+ ); +} diff --git a/frontend/src/pages/Settings.jsx b/frontend/src/pages/Settings.jsx new file mode 100644 index 0000000..e836704 --- /dev/null +++ b/frontend/src/pages/Settings.jsx @@ -0,0 +1,175 @@ +import { useState } from 'react'; +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { useAuth } from '../context/AuthContext'; +import { organizations, users } from '../services/api'; + +export default function Settings() { + const { currentOrg, selectOrg, user } = useAuth(); + const queryClient = useQueryClient(); + const [activeTab, setActiveTab] = useState('profile'); + const [profileForm, setProfileForm] = useState({ full_name: user?.full_name || '' }); + const [orgForm, setOrgForm] = useState({ name: currentOrg?.name || '', slug: currentOrg?.slug || '' }); + const [newOrgForm, setNewOrgForm] = useState({ name: '', slug: '' }); + + const updateProfileMutation = useMutation({ + mutationFn: (data) => users.updateMe(data), + onSuccess: () => queryClient.invalidateQueries(['user']) + }); + + const updateOrgMutation = useMutation({ + mutationFn: (data) => organizations.update(currentOrg.id, data), + onSuccess: (res) => { + queryClient.invalidateQueries(['organizations']); + selectOrg(res.data); + } + }); + + const createOrgMutation = useMutation({ + mutationFn: (data) => organizations.create(data), + onSuccess: (res) => { + queryClient.invalidateQueries(['organizations']); + selectOrg(res.data); + setNewOrgForm({ name: '', slug: '' }); + } + }); + + const tabs = [ + { id: 'profile', label: 'Profile', icon: 'πŸ‘€' }, + { id: 'organization', label: 'Organization', icon: '🏒' }, + { id: 'new-org', label: 'New Organization', icon: 'βž•' } + ]; + + return ( +
+

Settings

+ +
+ {/* Tabs */} +
+ {tabs.map(tab => ( + + ))} +
+ + {/* Content */} +
+ {activeTab === 'profile' && ( +
+

Profile Settings

+
+
+ + +
+
+ + setProfileForm({...profileForm, full_name: e.target.value})} + className="input" + /> +
+ +
+
+ )} + + {activeTab === 'organization' && currentOrg && ( +
+

Organization Settings

+
+
+ + setOrgForm({...orgForm, name: e.target.value})} + className="input" + /> +
+
+ + setOrgForm({...orgForm, slug: e.target.value})} + className="input" + /> +
+ +
+

Webhook Base URL

+ + https://jira-fixer.startdata.com.br/api/webhook/{currentOrg.id}/ + +

+ Append: jira, servicenow, zendesk, github, gitlab, tickethub, or generic +

+
+ + +
+
+ )} + + {activeTab === 'new-org' && ( +
+

Create New Organization

+
+
+ + setNewOrgForm({...newOrgForm, name: e.target.value})} + className="input" + placeholder="Acme Corp" + /> +
+
+ + setNewOrgForm({...newOrgForm, slug: e.target.value.toLowerCase().replace(/[^a-z0-9-]/g, '')})} + className="input" + placeholder="acme-corp" + /> +
+ +
+
+ )} +
+
+
+ ); +} diff --git a/frontend/src/pages/Team.jsx b/frontend/src/pages/Team.jsx new file mode 100644 index 0000000..19bbef3 --- /dev/null +++ b/frontend/src/pages/Team.jsx @@ -0,0 +1,118 @@ +import { useState } from 'react'; +import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query'; +import { useAuth } from '../context/AuthContext'; +import { organizations } from '../services/api'; + +const roleColors = { + owner: 'bg-yellow-500/20 text-yellow-400', + admin: 'bg-red-500/20 text-red-400', + manager: 'bg-purple-500/20 text-purple-400', + analyst: 'bg-blue-500/20 text-blue-400', + viewer: 'bg-gray-500/20 text-gray-400' +}; + +export default function Team() { + const { currentOrg } = useAuth(); + const queryClient = useQueryClient(); + const [showInvite, setShowInvite] = useState(false); + const [inviteForm, setInviteForm] = useState({ email: '', role: 'viewer' }); + + const { data, isLoading } = useQuery({ + queryKey: ['org-members', currentOrg?.id], + queryFn: () => organizations.members(currentOrg.id), + enabled: !!currentOrg + }); + + const inviteMutation = useMutation({ + mutationFn: () => organizations.invite(currentOrg.id, inviteForm), + onSuccess: () => { + queryClient.invalidateQueries(['org-members', currentOrg?.id]); + setShowInvite(false); + setInviteForm({ email: '', role: 'viewer' }); + } + }); + + if (!currentOrg) return
Select an organization
; + + const members = data?.data || []; + + return ( +
+
+

Team

+ +
+ +
+ {isLoading ? ( +
Loading...
+ ) : ( +
+ {members.map(member => ( +
+
+
+ {member.user?.full_name?.[0] || member.user?.email?.[0] || '?'} +
+
+

{member.user?.full_name || 'Unknown'}

+

{member.user?.email}

+
+
+ + {member.role} + +
+ ))} +
+ )} +
+ + {/* Invite modal */} + {showInvite && ( +
+
+

Invite Team Member

+
+
+ + setInviteForm({...inviteForm, email: e.target.value})} + className="input" + placeholder="colleague@company.com" + /> +
+
+ + +
+
+
+ + +
+
+
+ )} +
+ ); +} diff --git a/frontend/src/services/api.js b/frontend/src/services/api.js new file mode 100644 index 0000000..5ce1faa --- /dev/null +++ b/frontend/src/services/api.js @@ -0,0 +1,80 @@ +import axios from 'axios'; + +const api = axios.create({ + baseURL: '/api', + headers: { 'Content-Type': 'application/json' } +}); + +// Add auth token to requests +api.interceptors.request.use(config => { + const token = localStorage.getItem('access_token'); + if (token) config.headers.Authorization = `Bearer ${token}`; + return config; +}); + +// Handle 401 errors +api.interceptors.response.use( + response => response, + async error => { + if (error.response?.status === 401) { + localStorage.removeItem('access_token'); + localStorage.removeItem('refresh_token'); + window.location.href = '/login'; + } + return Promise.reject(error); + } +); + +// Auth +export const auth = { + login: (email, password) => api.post('/auth/login', null, { params: { email, password } }), + register: (data) => api.post('/auth/register', data), + refresh: (token) => api.post('/auth/refresh', null, { params: { refresh_token: token } }) +}; + +// Users +export const users = { + me: () => api.get('/users/me'), + updateMe: (data) => api.patch('/users/me', data) +}; + +// Organizations +export const organizations = { + list: () => api.get('/organizations'), + create: (data) => api.post('/organizations', data), + get: (id) => api.get(`/organizations/${id}`), + update: (id, data) => api.patch(`/organizations/${id}`, data), + members: (id) => api.get(`/organizations/${id}/members`), + invite: (id, data) => api.post(`/organizations/${id}/members`, data) +}; + +// Integrations +export const integrations = { + list: (orgId) => api.get('/integrations', { params: { org_id: orgId } }), + create: (orgId, data) => api.post('/integrations', data, { params: { org_id: orgId } }), + get: (orgId, id) => api.get(`/integrations/${id}`, { params: { org_id: orgId } }), + update: (orgId, id, data) => api.patch(`/integrations/${id}`, data, { params: { org_id: orgId } }), + delete: (orgId, id) => api.delete(`/integrations/${id}`, { params: { org_id: orgId } }), + test: (orgId, id) => api.post(`/integrations/${id}/test`, null, { params: { org_id: orgId } }) +}; + +// Issues +export const issues = { + list: (orgId, params = {}) => api.get('/issues', { params: { org_id: orgId, ...params } }), + stats: (orgId) => api.get('/issues/stats', { params: { org_id: orgId } }), + get: (orgId, id) => api.get(`/issues/${id}`, { params: { org_id: orgId } }), + create: (orgId, data) => api.post('/issues', data, { params: { org_id: orgId } }), + reanalyze: (orgId, id) => api.post(`/issues/${id}/reanalyze`, null, { params: { org_id: orgId } }), + addComment: (orgId, id, data) => api.post(`/issues/${id}/comments`, data, { params: { org_id: orgId } }) +}; + +// Reports +export const reports = { + summary: (orgId, days = 30) => api.get('/reports/summary', { params: { org_id: orgId, days } }), + exportCsv: (orgId, days = 30) => api.get('/reports/export/csv', { + params: { org_id: orgId, days }, + responseType: 'blob' + }) +}; + +export default api; diff --git a/frontend/tailwind.config.js b/frontend/tailwind.config.js new file mode 100644 index 0000000..24e4824 --- /dev/null +++ b/frontend/tailwind.config.js @@ -0,0 +1,19 @@ +export default { + content: ['./index.html', './src/**/*.{js,jsx}'], + darkMode: 'class', + theme: { + extend: { + colors: { + primary: { + 50: '#eef2ff', + 100: '#e0e7ff', + 500: '#6366f1', + 600: '#4f46e5', + 700: '#4338ca', + 900: '#312e81', + } + } + } + }, + plugins: [] +} diff --git a/frontend/vite.config.js b/frontend/vite.config.js new file mode 100644 index 0000000..6318ceb --- /dev/null +++ b/frontend/vite.config.js @@ -0,0 +1,11 @@ +import { defineConfig } from 'vite' +import react from '@vitejs/plugin-react' + +export default defineConfig({ + plugins: [react()], + server: { + proxy: { + '/api': 'http://localhost:8000' + } + } +}) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..00e3c46 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,10 @@ +fastapi==0.109.0 +uvicorn[standard]==0.27.0 +sqlalchemy[asyncio]==2.0.25 +asyncpg==0.29.0 +pydantic==2.5.3 +pydantic-settings==2.1.0 +python-jose[cryptography]==3.3.0 +passlib[bcrypt]==1.7.4 +httpx==0.26.0 +python-multipart==0.0.6