Some checks failed
Build and Deploy Log Center / build-and-deploy (push) Failing after 1m55s
490 lines
17 KiB
Python
490 lines
17 KiB
Python
from fastapi import FastAPI, Depends, HTTPException, Query
|
||
from fastapi.middleware.cors import CORSMiddleware
|
||
from sqlmodel.ext.asyncio.session import AsyncSession
|
||
from sqlmodel import select, func
|
||
from .database import init_db, get_session
|
||
from .models import ErrorLog, ErrorLogCreate, LogStatus, TaskStatusUpdate, RepairTask, RepairTaskCreate, Project, ProjectUpdate
|
||
from .gitea_client import GiteaClient
|
||
from datetime import datetime, timedelta
|
||
from typing import Optional, List
|
||
from pydantic import BaseModel
|
||
import hashlib
|
||
import json
|
||
|
||
app = FastAPI(title="Log Center & AIOps Control Plane")
|
||
|
||
# CORS for frontend
|
||
app.add_middleware(
|
||
CORSMiddleware,
|
||
allow_origins=["*"], # In production, restrict to your domain
|
||
allow_credentials=True,
|
||
allow_methods=["*"],
|
||
allow_headers=["*"],
|
||
)
|
||
|
||
@app.on_event("startup")
|
||
async def on_startup():
|
||
await init_db()
|
||
|
||
def generate_fingerprint(log: ErrorLogCreate) -> str:
|
||
source = log.source
|
||
|
||
if source == "cicd":
|
||
ctx = log.context or {}
|
||
raw = f"{log.project_id}|cicd|{log.error.get('type')}|{ctx.get('job_name', 'unknown')}|{ctx.get('step_name', 'unknown')}"
|
||
elif source == "deployment":
|
||
ctx = log.context or {}
|
||
raw = f"{log.project_id}|deployment|{log.error.get('type')}|{ctx.get('namespace', 'default')}|{ctx.get('deployment_name', 'unknown')}"
|
||
else:
|
||
raw = f"{log.project_id}|{log.error.get('type')}|{log.error.get('file_path')}|{log.error.get('line_number')}"
|
||
|
||
return hashlib.md5(raw.encode()).hexdigest()
|
||
|
||
# ==================== Log Reporting ====================
|
||
@app.post("/api/v1/logs/report", tags=["Logs"])
|
||
async def report_log(log_data: ErrorLogCreate, session: AsyncSession = Depends(get_session)):
|
||
fingerprint = generate_fingerprint(log_data)
|
||
|
||
# Check deduplication
|
||
statement = select(ErrorLog).where(ErrorLog.fingerprint == fingerprint)
|
||
results = await session.exec(statement)
|
||
existing_log = results.first()
|
||
|
||
if existing_log:
|
||
# If exists and not resolved, just ignore or update count (implied)
|
||
if existing_log.status not in [LogStatus.DEPLOYED, LogStatus.FIXED, LogStatus.VERIFIED]:
|
||
return {"message": "Log deduplicated", "id": existing_log.id, "status": existing_log.status}
|
||
# If it was resolved but happened again -> Regression! Reset to NEW?
|
||
existing_log.status = LogStatus.NEW
|
||
existing_log.timestamp = log_data.timestamp or datetime.utcnow()
|
||
existing_log.retry_count = 0 # Reset retries for new occurrence
|
||
session.add(existing_log)
|
||
await session.commit()
|
||
await session.refresh(existing_log)
|
||
return {"message": "Regression detected, reopened", "id": existing_log.id}
|
||
|
||
# Upsert Project record
|
||
proj_stmt = select(Project).where(Project.project_id == log_data.project_id)
|
||
proj_result = await session.exec(proj_stmt)
|
||
project = proj_result.first()
|
||
if not project:
|
||
project = Project(project_id=log_data.project_id)
|
||
if log_data.repo_url:
|
||
project.repo_url = log_data.repo_url
|
||
project.updated_at = datetime.utcnow()
|
||
session.add(project)
|
||
|
||
# Create new
|
||
new_log = ErrorLog(
|
||
project_id=log_data.project_id,
|
||
environment=log_data.environment,
|
||
level=log_data.level,
|
||
source=log_data.source,
|
||
error_type=log_data.error.get("type"),
|
||
error_message=log_data.error.get("message"),
|
||
file_path=log_data.error.get("file_path"),
|
||
line_number=log_data.error.get("line_number"),
|
||
stack_trace=log_data.error.get("stack_trace"),
|
||
context=log_data.context,
|
||
version=log_data.version,
|
||
commit_hash=log_data.commit_hash,
|
||
fingerprint=fingerprint,
|
||
timestamp=log_data.timestamp or datetime.utcnow()
|
||
)
|
||
|
||
session.add(new_log)
|
||
await session.commit()
|
||
await session.refresh(new_log)
|
||
|
||
return {"message": "Log reported", "id": new_log.id}
|
||
|
||
# ==================== Agent Tasks ====================
|
||
@app.get("/api/v1/tasks/pending", tags=["Tasks"])
|
||
async def get_pending_tasks(project_id: str = None, source: Optional[str] = None, session: AsyncSession = Depends(get_session)):
|
||
query = select(ErrorLog).where(ErrorLog.status == LogStatus.NEW)
|
||
if project_id:
|
||
query = query.where(ErrorLog.project_id == project_id)
|
||
if source:
|
||
query = query.where(ErrorLog.source == source)
|
||
|
||
results = await session.exec(query)
|
||
return results.all()
|
||
|
||
@app.put("/api/v1/tasks/{task_id}/status", tags=["Tasks"])
|
||
async def update_task_status(
|
||
task_id: int,
|
||
status_update: TaskStatusUpdate,
|
||
session: AsyncSession = Depends(get_session)
|
||
):
|
||
statement = select(ErrorLog).where(ErrorLog.id == task_id)
|
||
results = await session.exec(statement)
|
||
task = results.first()
|
||
|
||
if not task:
|
||
raise HTTPException(status_code=404, detail="Task not found")
|
||
|
||
task.status = status_update.status
|
||
if status_update.message and status_update.status == LogStatus.FIX_FAILED:
|
||
task.failure_reason = status_update.message
|
||
|
||
session.add(task)
|
||
await session.commit()
|
||
await session.refresh(task)
|
||
|
||
|
||
return {"message": "Status updated", "id": task.id, "status": task.status}
|
||
|
||
|
||
# ==================== Repair Reports ====================
|
||
@app.post("/api/v1/repair/reports", tags=["Repair"])
|
||
async def create_repair_report(report: RepairTaskCreate, session: AsyncSession = Depends(get_session)):
|
||
"""Upload a new repair report"""
|
||
# 1. Create repair task record
|
||
repair_task = RepairTask.from_orm(report)
|
||
session.add(repair_task)
|
||
|
||
# 2. Update error log status and failure_reason
|
||
if report.status in [LogStatus.FIXED, LogStatus.FIX_FAILED]:
|
||
log_stmt = select(ErrorLog).where(ErrorLog.id == report.error_log_id)
|
||
results = await session.exec(log_stmt)
|
||
error_log = results.first()
|
||
if error_log:
|
||
error_log.status = report.status
|
||
if report.failure_reason and report.status == LogStatus.FIX_FAILED:
|
||
error_log.failure_reason = report.failure_reason
|
||
session.add(error_log)
|
||
|
||
await session.commit()
|
||
await session.refresh(repair_task)
|
||
return {"message": "Report uploaded", "id": repair_task.id}
|
||
|
||
@app.get("/api/v1/repair/reports", tags=["Repair"])
|
||
async def get_repair_reports(
|
||
page: int = Query(1, ge=1),
|
||
page_size: int = Query(20, ge=1, le=100),
|
||
project_id: Optional[str] = None,
|
||
error_log_id: Optional[int] = None,
|
||
session: AsyncSession = Depends(get_session)
|
||
):
|
||
"""Get repair reports list, optionally filtered by project or bug"""
|
||
query = select(RepairTask).order_by(RepairTask.created_at.desc())
|
||
|
||
if project_id:
|
||
query = query.where(RepairTask.project_id == project_id)
|
||
if error_log_id:
|
||
query = query.where(RepairTask.error_log_id == error_log_id)
|
||
|
||
offset = (page - 1) * page_size
|
||
query = query.offset(offset).limit(page_size)
|
||
|
||
results = await session.exec(query)
|
||
tasks = results.all()
|
||
|
||
# Get total
|
||
count_query = select(func.count(RepairTask.id))
|
||
if project_id:
|
||
count_query = count_query.where(RepairTask.project_id == project_id)
|
||
if error_log_id:
|
||
count_query = count_query.where(RepairTask.error_log_id == error_log_id)
|
||
count_result = await session.exec(count_query)
|
||
total = count_result.one()
|
||
|
||
return {
|
||
"items": tasks,
|
||
"total": total,
|
||
"page": page,
|
||
"page_size": page_size,
|
||
"total_pages": (total + page_size - 1) // page_size
|
||
}
|
||
|
||
@app.get("/api/v1/repair/reports/{report_id}", tags=["Repair"])
|
||
async def get_repair_report_detail(report_id: int, session: AsyncSession = Depends(get_session)):
|
||
"""Get detailed repair report"""
|
||
statement = select(RepairTask).where(RepairTask.id == report_id)
|
||
results = await session.exec(statement)
|
||
task = results.first()
|
||
|
||
if not task:
|
||
raise HTTPException(status_code=404, detail="Report not found")
|
||
|
||
return task
|
||
|
||
|
||
|
||
# ==================== Dashboard APIs ====================
|
||
@app.get("/api/v1/dashboard/stats", tags=["Dashboard"])
|
||
async def get_dashboard_stats(source: Optional[str] = None, session: AsyncSession = Depends(get_session)):
|
||
"""Get overall statistics for dashboard"""
|
||
today = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)
|
||
|
||
def _apply_source(q):
|
||
return q.where(ErrorLog.source == source) if source else q
|
||
|
||
# Total bugs
|
||
total_query = _apply_source(select(func.count(ErrorLog.id)))
|
||
total_result = await session.exec(total_query)
|
||
total_bugs = total_result.one()
|
||
|
||
# Today's new bugs
|
||
today_query = _apply_source(select(func.count(ErrorLog.id)).where(ErrorLog.timestamp >= today))
|
||
today_result = await session.exec(today_query)
|
||
today_bugs = today_result.one()
|
||
|
||
# Count by status
|
||
status_counts = {}
|
||
for status in LogStatus:
|
||
count_query = _apply_source(select(func.count(ErrorLog.id)).where(ErrorLog.status == status))
|
||
count_result = await session.exec(count_query)
|
||
status_counts[status.value] = count_result.one()
|
||
|
||
# 修复率 = (FIXED + VERIFIED + DEPLOYED + CANNOT_REPRODUCE) / Total
|
||
resolved_count = (
|
||
status_counts.get("FIXED", 0)
|
||
+ status_counts.get("VERIFIED", 0)
|
||
+ status_counts.get("DEPLOYED", 0)
|
||
+ status_counts.get("CANNOT_REPRODUCE", 0)
|
||
)
|
||
fix_rate = round((resolved_count / total_bugs * 100), 2) if total_bugs > 0 else 0
|
||
|
||
# Source distribution
|
||
from .models import LogSource
|
||
source_counts = {}
|
||
for src in LogSource:
|
||
sq = select(func.count(ErrorLog.id)).where(ErrorLog.source == src.value)
|
||
sr = await session.exec(sq)
|
||
source_counts[src.value] = sr.one()
|
||
|
||
return {
|
||
"total_bugs": total_bugs,
|
||
"today_bugs": today_bugs,
|
||
"fix_rate": fix_rate,
|
||
"status_distribution": status_counts,
|
||
"source_distribution": source_counts,
|
||
}
|
||
|
||
@app.get("/api/v1/bugs", tags=["Dashboard"])
|
||
async def get_bugs_list(
|
||
page: int = Query(1, ge=1),
|
||
page_size: int = Query(20, ge=1, le=100),
|
||
status: Optional[LogStatus] = None,
|
||
project_id: Optional[str] = None,
|
||
source: Optional[str] = None,
|
||
session: AsyncSession = Depends(get_session)
|
||
):
|
||
"""Get paginated list of bugs with optional filters"""
|
||
query = select(ErrorLog).order_by(ErrorLog.timestamp.desc())
|
||
|
||
if status:
|
||
query = query.where(ErrorLog.status == status)
|
||
if project_id:
|
||
query = query.where(ErrorLog.project_id == project_id)
|
||
if source:
|
||
query = query.where(ErrorLog.source == source)
|
||
|
||
# Pagination
|
||
offset = (page - 1) * page_size
|
||
query = query.offset(offset).limit(page_size)
|
||
|
||
results = await session.exec(query)
|
||
bugs = results.all()
|
||
|
||
# Get total count for pagination info
|
||
count_query = select(func.count(ErrorLog.id))
|
||
if status:
|
||
count_query = count_query.where(ErrorLog.status == status)
|
||
if project_id:
|
||
count_query = count_query.where(ErrorLog.project_id == project_id)
|
||
if source:
|
||
count_query = count_query.where(ErrorLog.source == source)
|
||
count_result = await session.exec(count_query)
|
||
total = count_result.one()
|
||
|
||
return {
|
||
"items": bugs,
|
||
"total": total,
|
||
"page": page,
|
||
"page_size": page_size,
|
||
"total_pages": (total + page_size - 1) // page_size
|
||
}
|
||
|
||
@app.get("/api/v1/bugs/{bug_id}", tags=["Dashboard"])
|
||
async def get_bug_detail(bug_id: int, session: AsyncSession = Depends(get_session)):
|
||
"""Get detailed information about a specific bug"""
|
||
statement = select(ErrorLog).where(ErrorLog.id == bug_id)
|
||
results = await session.exec(statement)
|
||
bug = results.first()
|
||
|
||
if not bug:
|
||
raise HTTPException(status_code=404, detail="Bug not found")
|
||
|
||
return bug
|
||
|
||
# ==================== Project Management ====================
|
||
@app.get("/api/v1/projects", tags=["Projects"])
|
||
async def get_projects(session: AsyncSession = Depends(get_session)):
|
||
"""Get list of all projects with full info"""
|
||
query = select(Project).order_by(Project.updated_at.desc())
|
||
results = await session.exec(query)
|
||
projects = results.all()
|
||
return {"projects": projects}
|
||
|
||
@app.get("/api/v1/projects/{project_id}", tags=["Projects"])
|
||
async def get_project_detail(project_id: str, session: AsyncSession = Depends(get_session)):
|
||
"""Get single project detail"""
|
||
statement = select(Project).where(Project.project_id == project_id)
|
||
results = await session.exec(statement)
|
||
project = results.first()
|
||
if not project:
|
||
raise HTTPException(status_code=404, detail="Project not found")
|
||
return project
|
||
|
||
@app.put("/api/v1/projects/{project_id}", tags=["Projects"])
|
||
async def update_project(project_id: str, data: ProjectUpdate, session: AsyncSession = Depends(get_session)):
|
||
"""Update project config (repo_url, local_path, name, description)"""
|
||
statement = select(Project).where(Project.project_id == project_id)
|
||
results = await session.exec(statement)
|
||
project = results.first()
|
||
if not project:
|
||
raise HTTPException(status_code=404, detail="Project not found")
|
||
|
||
update_data = data.model_dump(exclude_unset=True)
|
||
for key, value in update_data.items():
|
||
setattr(project, key, value)
|
||
project.updated_at = datetime.utcnow()
|
||
|
||
session.add(project)
|
||
await session.commit()
|
||
await session.refresh(project)
|
||
return project
|
||
|
||
@app.get("/", tags=["Health"])
|
||
async def health_check():
|
||
return {"status": "ok"}
|
||
|
||
|
||
# ==================== PR 操作 ====================
|
||
|
||
class PRRejectRequest(BaseModel):
|
||
"""拒绝 PR 请求"""
|
||
reason: str # 拒绝原因
|
||
|
||
|
||
@app.post("/api/v1/bugs/{bug_id}/merge-pr", tags=["PR Operations"])
|
||
async def merge_pr(
|
||
bug_id: int,
|
||
session: AsyncSession = Depends(get_session),
|
||
):
|
||
"""
|
||
批准并合并 PR
|
||
|
||
流程:
|
||
1. 调用 Gitea API 合并 PR
|
||
2. 更新 Bug 状态 → MERGED
|
||
"""
|
||
statement = select(ErrorLog).where(ErrorLog.id == bug_id)
|
||
results = await session.exec(statement)
|
||
bug = results.first()
|
||
|
||
if not bug:
|
||
raise HTTPException(status_code=404, detail="Bug not found")
|
||
|
||
if bug.status != LogStatus.PENDING_FIX:
|
||
raise HTTPException(
|
||
status_code=400,
|
||
detail=f"Bug 状态不是待修复,当前状态: {bug.status}"
|
||
)
|
||
|
||
if not bug.pr_url:
|
||
raise HTTPException(status_code=400, detail="Bug 没有关联的 PR")
|
||
|
||
# 调用 Gitea API 合并 PR
|
||
gitea_client = GiteaClient()
|
||
success, message = gitea_client.merge_pr_by_url(bug.pr_url)
|
||
|
||
if success:
|
||
# 更新 Bug 状态
|
||
bug.status = LogStatus.FIXED
|
||
bug.merged_at = datetime.utcnow()
|
||
session.add(bug)
|
||
await session.commit()
|
||
await session.refresh(bug)
|
||
|
||
return {
|
||
"message": "PR 已合并",
|
||
"pr_url": bug.pr_url,
|
||
"bug_id": bug.id,
|
||
"new_status": bug.status,
|
||
}
|
||
else:
|
||
raise HTTPException(status_code=500, detail=f"合并失败: {message}")
|
||
|
||
|
||
@app.post("/api/v1/bugs/{bug_id}/close-pr", tags=["PR Operations"])
|
||
async def close_pr(
|
||
bug_id: int,
|
||
request: PRRejectRequest,
|
||
session: AsyncSession = Depends(get_session),
|
||
):
|
||
"""
|
||
拒绝修复并关闭 PR
|
||
|
||
流程:
|
||
1. 添加拒绝原因评论到 PR
|
||
2. 调用 Gitea API 关闭 PR
|
||
3. 更新 Bug 状态 → PENDING_FIX
|
||
4. 记录拒绝原因
|
||
5. Agent 会自动检测并重新修复
|
||
"""
|
||
statement = select(ErrorLog).where(ErrorLog.id == bug_id)
|
||
results = await session.exec(statement)
|
||
bug = results.first()
|
||
|
||
if not bug:
|
||
raise HTTPException(status_code=404, detail="Bug not found")
|
||
|
||
if bug.status != LogStatus.PENDING_FIX:
|
||
raise HTTPException(
|
||
status_code=400,
|
||
detail=f"Bug 状态不是待修复,当前状态: {bug.status}"
|
||
)
|
||
|
||
if not bug.pr_url:
|
||
raise HTTPException(status_code=400, detail="Bug 没有关联的 PR")
|
||
|
||
gitea_client = GiteaClient()
|
||
|
||
# 关闭 PR(带拒绝原因)
|
||
success, message = gitea_client.close_pr_by_url(bug.pr_url, request.reason)
|
||
|
||
if not success:
|
||
raise HTTPException(status_code=500, detail=f"关闭 PR 失败: {message}")
|
||
|
||
# 更新 Bug 状态
|
||
bug.status = LogStatus.PENDING_FIX
|
||
bug.rejection_count = (bug.rejection_count or 0) + 1
|
||
bug.last_rejected_at = datetime.utcnow()
|
||
|
||
# 记录拒绝原因
|
||
rejection_info = {
|
||
"rejected_at": datetime.utcnow().isoformat(),
|
||
"reason": request.reason,
|
||
"previous_pr": {
|
||
"pr_number": bug.pr_number,
|
||
"pr_url": bug.pr_url,
|
||
"branch": bug.branch_name,
|
||
},
|
||
}
|
||
bug.rejection_reason = json.dumps(rejection_info, ensure_ascii=False)
|
||
|
||
session.add(bug)
|
||
await session.commit()
|
||
await session.refresh(bug)
|
||
|
||
return {
|
||
"message": "PR 已拒绝,Bug 将重新修复",
|
||
"rejection_count": bug.rejection_count,
|
||
"bug_id": bug.id,
|
||
"new_status": bug.status,
|
||
}
|
||
|