feat: add AI prediction, energy strategy, carbon asset, and AI ops modules
Inspired by Envision (远景) AI Energy System visit. Adds 4 major feature modules: - AI Prediction (AI预测): PV/load/COP forecasting, self-consumption optimization - Energy Strategy (策略优化): TOU pricing, peak/valley strategy, weather integration - Carbon Asset (碳资产): CCER, green certificates, targets, benchmarks, reports - AI Operations (AI运维): device health scoring, anomaly detection, predictive maintenance 20 new models, 4 services, 58 API endpoints, 23 frontend pages. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
from fastapi import APIRouter
|
||||
from app.api.v1 import auth, users, devices, energy, monitoring, alarms, reports, carbon, dashboard, collectors, websocket, audit, settings, charging, quota, cost, maintenance, management
|
||||
from app.api.v1 import auth, users, devices, energy, monitoring, alarms, reports, carbon, dashboard, collectors, websocket, audit, settings, charging, quota, cost, maintenance, management, prediction, energy_strategy, weather, ai_ops
|
||||
|
||||
api_router = APIRouter(prefix="/api/v1")
|
||||
|
||||
@@ -21,3 +21,7 @@ api_router.include_router(quota.router)
|
||||
api_router.include_router(cost.router)
|
||||
api_router.include_router(maintenance.router)
|
||||
api_router.include_router(management.router)
|
||||
api_router.include_router(prediction.router)
|
||||
api_router.include_router(energy_strategy.router)
|
||||
api_router.include_router(weather.router)
|
||||
api_router.include_router(ai_ops.router)
|
||||
|
||||
590
backend/app/api/v1/ai_ops.py
Normal file
590
backend/app/api/v1/ai_ops.py
Normal file
@@ -0,0 +1,590 @@
|
||||
"""AI运维智能体 API - 设备健康、异常检测、诊断、预测维护、洞察"""
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func, and_
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from pydantic import BaseModel
|
||||
from app.core.database import get_db
|
||||
from app.core.deps import get_current_user
|
||||
from app.models.user import User
|
||||
from app.models.device import Device
|
||||
from app.models.ai_ops import (
|
||||
DeviceHealthScore, AnomalyDetection, DiagnosticReport,
|
||||
MaintenancePrediction, OpsInsight,
|
||||
)
|
||||
from app.services.ai_ops import (
|
||||
calculate_device_health, scan_anomalies, run_diagnostics,
|
||||
generate_maintenance_predictions, generate_insights, get_dashboard_data,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/ai-ops", tags=["AI运维智能体"])
|
||||
|
||||
|
||||
# ── Device Health ───────────────────────────────────────────────────
|
||||
|
||||
@router.get("/health")
|
||||
async def get_all_health(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取所有设备最新健康评分"""
|
||||
subq = (
|
||||
select(
|
||||
DeviceHealthScore.device_id,
|
||||
func.max(DeviceHealthScore.timestamp).label("max_ts"),
|
||||
).group_by(DeviceHealthScore.device_id).subquery()
|
||||
)
|
||||
result = await db.execute(
|
||||
select(DeviceHealthScore).join(
|
||||
subq, and_(
|
||||
DeviceHealthScore.device_id == subq.c.device_id,
|
||||
DeviceHealthScore.timestamp == subq.c.max_ts,
|
||||
)
|
||||
)
|
||||
)
|
||||
scores = result.scalars().all()
|
||||
|
||||
# Get device info
|
||||
device_ids = [s.device_id for s in scores]
|
||||
dev_map = {}
|
||||
if device_ids:
|
||||
dev_result = await db.execute(
|
||||
select(Device.id, Device.name, Device.device_type, Device.code)
|
||||
.where(Device.id.in_(device_ids))
|
||||
)
|
||||
dev_map = {r.id: {"name": r.name, "type": r.device_type, "code": r.code} for r in dev_result.all()}
|
||||
|
||||
return [{
|
||||
"device_id": s.device_id,
|
||||
"device_name": dev_map.get(s.device_id, {}).get("name", f"#{s.device_id}"),
|
||||
"device_type": dev_map.get(s.device_id, {}).get("type", "unknown"),
|
||||
"device_code": dev_map.get(s.device_id, {}).get("code", ""),
|
||||
"health_score": s.health_score,
|
||||
"status": s.status,
|
||||
"trend": s.trend,
|
||||
"factors": s.factors,
|
||||
"timestamp": str(s.timestamp),
|
||||
} for s in scores]
|
||||
|
||||
|
||||
@router.get("/health/{device_id}")
|
||||
async def get_device_health(
|
||||
device_id: int,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取单设备健康详情"""
|
||||
result = await db.execute(
|
||||
select(DeviceHealthScore).where(
|
||||
DeviceHealthScore.device_id == device_id
|
||||
).order_by(DeviceHealthScore.timestamp.desc()).limit(1)
|
||||
)
|
||||
score = result.scalar_one_or_none()
|
||||
if not score:
|
||||
raise HTTPException(status_code=404, detail="暂无该设备健康数据")
|
||||
|
||||
dev_result = await db.execute(select(Device).where(Device.id == device_id))
|
||||
device = dev_result.scalar_one_or_none()
|
||||
|
||||
return {
|
||||
"device_id": score.device_id,
|
||||
"device_name": device.name if device else f"#{device_id}",
|
||||
"device_type": device.device_type if device else "unknown",
|
||||
"health_score": score.health_score,
|
||||
"status": score.status,
|
||||
"trend": score.trend,
|
||||
"factors": score.factors,
|
||||
"timestamp": str(score.timestamp),
|
||||
}
|
||||
|
||||
|
||||
@router.get("/health/{device_id}/history")
|
||||
async def get_health_history(
|
||||
device_id: int,
|
||||
days: int = Query(7, ge=1, le=90),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取设备健康评分历史"""
|
||||
cutoff = datetime.now(timezone.utc) - timedelta(days=days)
|
||||
result = await db.execute(
|
||||
select(DeviceHealthScore).where(and_(
|
||||
DeviceHealthScore.device_id == device_id,
|
||||
DeviceHealthScore.timestamp >= cutoff,
|
||||
)).order_by(DeviceHealthScore.timestamp.asc())
|
||||
)
|
||||
scores = result.scalars().all()
|
||||
return [{
|
||||
"timestamp": str(s.timestamp),
|
||||
"health_score": s.health_score,
|
||||
"status": s.status,
|
||||
"trend": s.trend,
|
||||
"factors": s.factors,
|
||||
} for s in scores]
|
||||
|
||||
|
||||
@router.post("/health/calculate")
|
||||
async def trigger_health_calculation(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""触发全部设备健康评分计算"""
|
||||
result = await db.execute(select(Device).where(Device.is_active == True))
|
||||
devices = result.scalars().all()
|
||||
scores = []
|
||||
for device in devices:
|
||||
try:
|
||||
score = await calculate_device_health(db, device)
|
||||
scores.append({
|
||||
"device_id": score.device_id,
|
||||
"health_score": score.health_score,
|
||||
"status": score.status,
|
||||
})
|
||||
except Exception as e:
|
||||
scores.append({"device_id": device.id, "error": str(e)})
|
||||
return {"calculated": len(scores), "results": scores}
|
||||
|
||||
|
||||
# ── Anomaly Detection ───────────────────────────────────────────────
|
||||
|
||||
@router.get("/anomalies")
|
||||
async def list_anomalies(
|
||||
device_id: int | None = None,
|
||||
severity: str | None = None,
|
||||
status: str | None = None,
|
||||
days: int = Query(7, ge=1, le=90),
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(20, ge=1, le=100),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""列出异常检测记录"""
|
||||
cutoff = datetime.now(timezone.utc) - timedelta(days=days)
|
||||
query = select(AnomalyDetection).where(AnomalyDetection.detected_at >= cutoff)
|
||||
if device_id:
|
||||
query = query.where(AnomalyDetection.device_id == device_id)
|
||||
if severity:
|
||||
query = query.where(AnomalyDetection.severity == severity)
|
||||
if status:
|
||||
query = query.where(AnomalyDetection.status == status)
|
||||
|
||||
count_q = select(func.count()).select_from(query.subquery())
|
||||
total = (await db.execute(count_q)).scalar()
|
||||
|
||||
query = query.order_by(AnomalyDetection.detected_at.desc()).offset((page - 1) * page_size).limit(page_size)
|
||||
result = await db.execute(query)
|
||||
anomalies = result.scalars().all()
|
||||
|
||||
# Get device names
|
||||
dev_ids = list(set(a.device_id for a in anomalies))
|
||||
dev_map = {}
|
||||
if dev_ids:
|
||||
dev_result = await db.execute(select(Device.id, Device.name).where(Device.id.in_(dev_ids)))
|
||||
dev_map = {r.id: r.name for r in dev_result.all()}
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"items": [{
|
||||
"id": a.id,
|
||||
"device_id": a.device_id,
|
||||
"device_name": dev_map.get(a.device_id, f"#{a.device_id}"),
|
||||
"detected_at": str(a.detected_at),
|
||||
"anomaly_type": a.anomaly_type,
|
||||
"severity": a.severity,
|
||||
"description": a.description,
|
||||
"metric_name": a.metric_name,
|
||||
"expected_value": a.expected_value,
|
||||
"actual_value": a.actual_value,
|
||||
"deviation_percent": a.deviation_percent,
|
||||
"status": a.status,
|
||||
"resolution_notes": a.resolution_notes,
|
||||
} for a in anomalies],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/anomalies/{device_id}")
|
||||
async def get_device_anomalies(
|
||||
device_id: int,
|
||||
days: int = Query(7, ge=1, le=90),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取设备异常记录"""
|
||||
cutoff = datetime.now(timezone.utc) - timedelta(days=days)
|
||||
result = await db.execute(
|
||||
select(AnomalyDetection).where(and_(
|
||||
AnomalyDetection.device_id == device_id,
|
||||
AnomalyDetection.detected_at >= cutoff,
|
||||
)).order_by(AnomalyDetection.detected_at.desc())
|
||||
)
|
||||
anomalies = result.scalars().all()
|
||||
return [{
|
||||
"id": a.id,
|
||||
"detected_at": str(a.detected_at),
|
||||
"anomaly_type": a.anomaly_type,
|
||||
"severity": a.severity,
|
||||
"description": a.description,
|
||||
"metric_name": a.metric_name,
|
||||
"expected_value": a.expected_value,
|
||||
"actual_value": a.actual_value,
|
||||
"deviation_percent": a.deviation_percent,
|
||||
"status": a.status,
|
||||
} for a in anomalies]
|
||||
|
||||
|
||||
class AnomalyStatusUpdate(BaseModel):
|
||||
status: str # investigating, resolved, false_positive
|
||||
resolution_notes: str | None = None
|
||||
|
||||
|
||||
@router.put("/anomalies/{anomaly_id}/status")
|
||||
async def update_anomaly_status(
|
||||
anomaly_id: int,
|
||||
data: AnomalyStatusUpdate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""更新异常状态"""
|
||||
result = await db.execute(select(AnomalyDetection).where(AnomalyDetection.id == anomaly_id))
|
||||
anomaly = result.scalar_one_or_none()
|
||||
if not anomaly:
|
||||
raise HTTPException(status_code=404, detail="异常记录不存在")
|
||||
anomaly.status = data.status
|
||||
if data.resolution_notes:
|
||||
anomaly.resolution_notes = data.resolution_notes
|
||||
return {"message": "已更新", "id": anomaly.id, "status": anomaly.status}
|
||||
|
||||
|
||||
@router.post("/anomalies/scan")
|
||||
async def trigger_anomaly_scan(
|
||||
device_id: int | None = None,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""触发异常扫描"""
|
||||
anomalies = await scan_anomalies(db, device_id)
|
||||
return {
|
||||
"scanned_at": str(datetime.now(timezone.utc)),
|
||||
"anomalies_found": len(anomalies),
|
||||
"anomalies": [{
|
||||
"device_id": a.device_id,
|
||||
"anomaly_type": a.anomaly_type,
|
||||
"severity": a.severity,
|
||||
"description": a.description,
|
||||
} for a in anomalies],
|
||||
}
|
||||
|
||||
|
||||
# ── Diagnostics ─────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/diagnostics")
|
||||
async def list_diagnostics(
|
||||
device_id: int | None = None,
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(20, ge=1, le=100),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""列出诊断报告"""
|
||||
query = select(DiagnosticReport)
|
||||
if device_id:
|
||||
query = query.where(DiagnosticReport.device_id == device_id)
|
||||
|
||||
count_q = select(func.count()).select_from(query.subquery())
|
||||
total = (await db.execute(count_q)).scalar()
|
||||
|
||||
query = query.order_by(DiagnosticReport.generated_at.desc()).offset((page - 1) * page_size).limit(page_size)
|
||||
result = await db.execute(query)
|
||||
reports = result.scalars().all()
|
||||
|
||||
dev_ids = list(set(r.device_id for r in reports))
|
||||
dev_map = {}
|
||||
if dev_ids:
|
||||
dev_result = await db.execute(select(Device.id, Device.name).where(Device.id.in_(dev_ids)))
|
||||
dev_map = {r.id: r.name for r in dev_result.all()}
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"items": [{
|
||||
"id": r.id,
|
||||
"device_id": r.device_id,
|
||||
"device_name": dev_map.get(r.device_id, f"#{r.device_id}"),
|
||||
"generated_at": str(r.generated_at),
|
||||
"report_type": r.report_type,
|
||||
"findings": r.findings,
|
||||
"recommendations": r.recommendations,
|
||||
"estimated_impact": r.estimated_impact,
|
||||
"status": r.status,
|
||||
} for r in reports],
|
||||
}
|
||||
|
||||
|
||||
@router.post("/diagnostics/{device_id}/run")
|
||||
async def trigger_diagnostics(
|
||||
device_id: int,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""对指定设备运行诊断"""
|
||||
try:
|
||||
report = await run_diagnostics(db, device_id)
|
||||
return {
|
||||
"id": report.id,
|
||||
"device_id": report.device_id,
|
||||
"report_type": report.report_type,
|
||||
"findings": report.findings,
|
||||
"recommendations": report.recommendations,
|
||||
"estimated_impact": report.estimated_impact,
|
||||
"status": report.status,
|
||||
"generated_at": str(report.generated_at),
|
||||
}
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/diagnostics/{report_id}")
|
||||
async def get_diagnostic_detail(
|
||||
report_id: int,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取诊断报告详情"""
|
||||
result = await db.execute(select(DiagnosticReport).where(DiagnosticReport.id == report_id))
|
||||
report = result.scalar_one_or_none()
|
||||
if not report:
|
||||
raise HTTPException(status_code=404, detail="诊断报告不存在")
|
||||
|
||||
dev_result = await db.execute(select(Device.name).where(Device.id == report.device_id))
|
||||
device_name = dev_result.scalar() or f"#{report.device_id}"
|
||||
|
||||
return {
|
||||
"id": report.id,
|
||||
"device_id": report.device_id,
|
||||
"device_name": device_name,
|
||||
"generated_at": str(report.generated_at),
|
||||
"report_type": report.report_type,
|
||||
"findings": report.findings,
|
||||
"recommendations": report.recommendations,
|
||||
"estimated_impact": report.estimated_impact,
|
||||
"status": report.status,
|
||||
}
|
||||
|
||||
|
||||
# ── Predictive Maintenance ──────────────────────────────────────────
|
||||
|
||||
@router.get("/maintenance/predictions")
|
||||
async def list_predictions(
|
||||
status: str | None = None,
|
||||
urgency: str | None = None,
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(20, ge=1, le=100),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""列出维护预测"""
|
||||
query = select(MaintenancePrediction)
|
||||
if status:
|
||||
query = query.where(MaintenancePrediction.status == status)
|
||||
if urgency:
|
||||
query = query.where(MaintenancePrediction.urgency == urgency)
|
||||
|
||||
count_q = select(func.count()).select_from(query.subquery())
|
||||
total = (await db.execute(count_q)).scalar()
|
||||
|
||||
query = query.order_by(MaintenancePrediction.predicted_at.desc()).offset((page - 1) * page_size).limit(page_size)
|
||||
result = await db.execute(query)
|
||||
predictions = result.scalars().all()
|
||||
|
||||
dev_ids = list(set(p.device_id for p in predictions))
|
||||
dev_map = {}
|
||||
if dev_ids:
|
||||
dev_result = await db.execute(select(Device.id, Device.name).where(Device.id.in_(dev_ids)))
|
||||
dev_map = {r.id: r.name for r in dev_result.all()}
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"items": [{
|
||||
"id": p.id,
|
||||
"device_id": p.device_id,
|
||||
"device_name": dev_map.get(p.device_id, f"#{p.device_id}"),
|
||||
"predicted_at": str(p.predicted_at),
|
||||
"component": p.component,
|
||||
"failure_mode": p.failure_mode,
|
||||
"probability": p.probability,
|
||||
"predicted_failure_date": str(p.predicted_failure_date) if p.predicted_failure_date else None,
|
||||
"recommended_action": p.recommended_action,
|
||||
"urgency": p.urgency,
|
||||
"estimated_downtime_hours": p.estimated_downtime_hours,
|
||||
"estimated_repair_cost": p.estimated_repair_cost,
|
||||
"status": p.status,
|
||||
} for p in predictions],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/maintenance/schedule")
|
||||
async def get_maintenance_schedule(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取推荐维护计划"""
|
||||
result = await db.execute(
|
||||
select(MaintenancePrediction).where(
|
||||
MaintenancePrediction.status.in_(["predicted", "scheduled"])
|
||||
).order_by(MaintenancePrediction.predicted_failure_date.asc())
|
||||
)
|
||||
predictions = result.scalars().all()
|
||||
|
||||
dev_ids = list(set(p.device_id for p in predictions))
|
||||
dev_map = {}
|
||||
if dev_ids:
|
||||
dev_result = await db.execute(select(Device.id, Device.name).where(Device.id.in_(dev_ids)))
|
||||
dev_map = {r.id: r.name for r in dev_result.all()}
|
||||
|
||||
return [{
|
||||
"id": p.id,
|
||||
"device_id": p.device_id,
|
||||
"device_name": dev_map.get(p.device_id, f"#{p.device_id}"),
|
||||
"component": p.component,
|
||||
"failure_mode": p.failure_mode,
|
||||
"probability": p.probability,
|
||||
"predicted_failure_date": str(p.predicted_failure_date) if p.predicted_failure_date else None,
|
||||
"recommended_action": p.recommended_action,
|
||||
"urgency": p.urgency,
|
||||
"estimated_downtime_hours": p.estimated_downtime_hours,
|
||||
"estimated_repair_cost": p.estimated_repair_cost,
|
||||
"status": p.status,
|
||||
} for p in predictions]
|
||||
|
||||
|
||||
class PredictionStatusUpdate(BaseModel):
|
||||
status: str # scheduled, completed, false_alarm
|
||||
|
||||
|
||||
@router.put("/maintenance/predictions/{prediction_id}")
|
||||
async def update_prediction(
|
||||
prediction_id: int,
|
||||
data: PredictionStatusUpdate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""更新预测状态"""
|
||||
result = await db.execute(select(MaintenancePrediction).where(MaintenancePrediction.id == prediction_id))
|
||||
pred = result.scalar_one_or_none()
|
||||
if not pred:
|
||||
raise HTTPException(status_code=404, detail="预测记录不存在")
|
||||
pred.status = data.status
|
||||
return {"message": "已更新", "id": pred.id, "status": pred.status}
|
||||
|
||||
|
||||
@router.post("/maintenance/predict")
|
||||
async def trigger_predictions(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""触发维护预测生成"""
|
||||
predictions = await generate_maintenance_predictions(db)
|
||||
return {
|
||||
"generated": len(predictions),
|
||||
"predictions": [{
|
||||
"device_id": p.device_id,
|
||||
"component": p.component,
|
||||
"urgency": p.urgency,
|
||||
"probability": p.probability,
|
||||
} for p in predictions],
|
||||
}
|
||||
|
||||
|
||||
# ── Insights ────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/insights")
|
||||
async def list_insights(
|
||||
insight_type: str | None = None,
|
||||
impact_level: str | None = None,
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(20, ge=1, le=100),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""列出运营洞察"""
|
||||
query = select(OpsInsight)
|
||||
if insight_type:
|
||||
query = query.where(OpsInsight.insight_type == insight_type)
|
||||
if impact_level:
|
||||
query = query.where(OpsInsight.impact_level == impact_level)
|
||||
|
||||
count_q = select(func.count()).select_from(query.subquery())
|
||||
total = (await db.execute(count_q)).scalar()
|
||||
|
||||
query = query.order_by(OpsInsight.generated_at.desc()).offset((page - 1) * page_size).limit(page_size)
|
||||
result = await db.execute(query)
|
||||
insights = result.scalars().all()
|
||||
|
||||
return {
|
||||
"total": total,
|
||||
"items": [{
|
||||
"id": i.id,
|
||||
"insight_type": i.insight_type,
|
||||
"title": i.title,
|
||||
"description": i.description,
|
||||
"data": i.data,
|
||||
"impact_level": i.impact_level,
|
||||
"actionable": i.actionable,
|
||||
"recommended_action": i.recommended_action,
|
||||
"generated_at": str(i.generated_at),
|
||||
"valid_until": str(i.valid_until) if i.valid_until else None,
|
||||
} for i in insights],
|
||||
}
|
||||
|
||||
|
||||
@router.get("/insights/latest")
|
||||
async def get_latest_insights(
|
||||
limit: int = Query(5, ge=1, le=20),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取最新洞察"""
|
||||
now = datetime.now(timezone.utc)
|
||||
result = await db.execute(
|
||||
select(OpsInsight).where(
|
||||
OpsInsight.valid_until >= now
|
||||
).order_by(OpsInsight.generated_at.desc()).limit(limit)
|
||||
)
|
||||
insights = result.scalars().all()
|
||||
return [{
|
||||
"id": i.id,
|
||||
"insight_type": i.insight_type,
|
||||
"title": i.title,
|
||||
"description": i.description,
|
||||
"impact_level": i.impact_level,
|
||||
"actionable": i.actionable,
|
||||
"recommended_action": i.recommended_action,
|
||||
"generated_at": str(i.generated_at),
|
||||
} for i in insights]
|
||||
|
||||
|
||||
@router.post("/insights/generate")
|
||||
async def trigger_insights(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""触发洞察生成"""
|
||||
insights = await generate_insights(db)
|
||||
return {
|
||||
"generated": len(insights),
|
||||
"insights": [{
|
||||
"title": i.title,
|
||||
"insight_type": i.insight_type,
|
||||
"impact_level": i.impact_level,
|
||||
} for i in insights],
|
||||
}
|
||||
|
||||
|
||||
# ── Dashboard ───────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/dashboard")
|
||||
async def ai_ops_dashboard(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""AI运维总览仪表盘"""
|
||||
return await get_dashboard_data(db)
|
||||
@@ -1,16 +1,55 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from typing import Optional
|
||||
from fastapi import APIRouter, Depends, Query, HTTPException, Body
|
||||
from pydantic import BaseModel, Field
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func, and_, text
|
||||
from app.core.database import get_db
|
||||
from app.core.config import get_settings
|
||||
from app.core.deps import get_current_user
|
||||
from app.models.carbon import CarbonEmission, EmissionFactor
|
||||
from app.models.carbon import (
|
||||
CarbonEmission, EmissionFactor, CarbonTarget, CarbonReduction,
|
||||
GreenCertificate, CarbonReport, CarbonBenchmark,
|
||||
)
|
||||
from app.models.user import User
|
||||
from app.services import carbon_asset
|
||||
|
||||
router = APIRouter(prefix="/carbon", tags=["碳排放管理"])
|
||||
|
||||
|
||||
# --------------- Pydantic Schemas ---------------
|
||||
|
||||
class TargetCreate(BaseModel):
|
||||
year: int
|
||||
month: Optional[int] = None
|
||||
target_emission_tons: float
|
||||
|
||||
class TargetUpdate(BaseModel):
|
||||
target_emission_tons: Optional[float] = None
|
||||
status: Optional[str] = None
|
||||
|
||||
class CertificateCreate(BaseModel):
|
||||
certificate_type: str
|
||||
certificate_number: str
|
||||
issue_date: date
|
||||
expiry_date: Optional[date] = None
|
||||
energy_mwh: float
|
||||
price_yuan: float = 0
|
||||
status: str = "active"
|
||||
source_device_id: Optional[int] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
class CertificateUpdate(BaseModel):
|
||||
status: Optional[str] = None
|
||||
price_yuan: Optional[float] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
class ReportGenerate(BaseModel):
|
||||
report_type: str = Field(..., pattern="^(monthly|quarterly|annual)$")
|
||||
period_start: date
|
||||
period_end: date
|
||||
|
||||
|
||||
@router.get("/overview")
|
||||
async def carbon_overview(db: AsyncSession = Depends(get_db), user: User = Depends(get_current_user)):
|
||||
"""碳排放总览"""
|
||||
@@ -79,3 +118,317 @@ async def list_factors(db: AsyncSession = Depends(get_db), user: User = Depends(
|
||||
"id": f.id, "name": f.name, "energy_type": f.energy_type, "factor": f.factor,
|
||||
"unit": f.unit, "scope": f.scope, "region": f.region, "source": f.source,
|
||||
} for f in result.scalars().all()]
|
||||
|
||||
|
||||
# =============== Carbon Dashboard ===============
|
||||
|
||||
@router.get("/dashboard")
|
||||
async def carbon_dashboard(db: AsyncSession = Depends(get_db), user: User = Depends(get_current_user)):
|
||||
"""综合碳资产仪表盘"""
|
||||
return await carbon_asset.get_carbon_dashboard(db)
|
||||
|
||||
|
||||
# =============== Carbon Targets ===============
|
||||
|
||||
@router.get("/targets")
|
||||
async def list_targets(
|
||||
year: int = Query(None),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""碳减排目标列表"""
|
||||
q = select(CarbonTarget).order_by(CarbonTarget.year.desc(), CarbonTarget.month)
|
||||
if year:
|
||||
q = q.where(CarbonTarget.year == year)
|
||||
result = await db.execute(q)
|
||||
targets = result.scalars().all()
|
||||
return [{
|
||||
"id": t.id, "year": t.year, "month": t.month,
|
||||
"target_emission_tons": t.target_emission_tons,
|
||||
"actual_emission_tons": t.actual_emission_tons,
|
||||
"status": t.status,
|
||||
} for t in targets]
|
||||
|
||||
|
||||
@router.post("/targets")
|
||||
async def create_target(
|
||||
data: TargetCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""创建碳减排目标"""
|
||||
target = CarbonTarget(
|
||||
year=data.year,
|
||||
month=data.month,
|
||||
target_emission_tons=data.target_emission_tons,
|
||||
)
|
||||
db.add(target)
|
||||
await db.flush()
|
||||
return {"id": target.id, "message": "目标创建成功"}
|
||||
|
||||
|
||||
@router.put("/targets/{target_id}")
|
||||
async def update_target(
|
||||
target_id: int,
|
||||
data: TargetUpdate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""更新碳减排目标"""
|
||||
result = await db.execute(select(CarbonTarget).where(CarbonTarget.id == target_id))
|
||||
target = result.scalar_one_or_none()
|
||||
if not target:
|
||||
raise HTTPException(404, "目标不存在")
|
||||
if data.target_emission_tons is not None:
|
||||
target.target_emission_tons = data.target_emission_tons
|
||||
if data.status is not None:
|
||||
target.status = data.status
|
||||
return {"message": "更新成功"}
|
||||
|
||||
|
||||
@router.get("/targets/progress")
|
||||
async def target_progress(
|
||||
year: int = Query(None),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""碳目标进度"""
|
||||
if year is None:
|
||||
year = datetime.now(timezone.utc).year
|
||||
return await carbon_asset.get_target_progress(db, year)
|
||||
|
||||
|
||||
# =============== Carbon Reductions ===============
|
||||
|
||||
@router.get("/reductions")
|
||||
async def list_reductions(
|
||||
start: date = Query(None),
|
||||
end: date = Query(None),
|
||||
source_type: str = Query(None),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""碳减排活动列表"""
|
||||
q = select(CarbonReduction).order_by(CarbonReduction.date.desc())
|
||||
if start:
|
||||
q = q.where(CarbonReduction.date >= start)
|
||||
if end:
|
||||
q = q.where(CarbonReduction.date <= end)
|
||||
if source_type:
|
||||
q = q.where(CarbonReduction.source_type == source_type)
|
||||
result = await db.execute(q.limit(500))
|
||||
items = result.scalars().all()
|
||||
return [{
|
||||
"id": r.id, "source_type": r.source_type, "date": str(r.date),
|
||||
"reduction_tons": r.reduction_tons, "equivalent_trees": r.equivalent_trees,
|
||||
"methodology": r.methodology, "verified": r.verified,
|
||||
} for r in items]
|
||||
|
||||
|
||||
@router.get("/reductions/summary")
|
||||
async def reduction_summary(
|
||||
start: date = Query(None),
|
||||
end: date = Query(None),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""减排汇总(按来源类型)"""
|
||||
if not start:
|
||||
start = date(datetime.now(timezone.utc).year, 1, 1)
|
||||
if not end:
|
||||
end = datetime.now(timezone.utc).date()
|
||||
return await carbon_asset.get_reduction_summary(db, start, end)
|
||||
|
||||
|
||||
@router.post("/reductions/calculate")
|
||||
async def calculate_reductions(
|
||||
start: date = Query(None),
|
||||
end: date = Query(None),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""触发减排量计算"""
|
||||
if not start:
|
||||
start = date(datetime.now(timezone.utc).year, 1, 1)
|
||||
if not end:
|
||||
end = datetime.now(timezone.utc).date()
|
||||
return await carbon_asset.trigger_reduction_calculation(db, start, end)
|
||||
|
||||
|
||||
# =============== Green Certificates ===============
|
||||
|
||||
@router.get("/certificates")
|
||||
async def list_certificates(
|
||||
status: str = Query(None),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""绿证列表"""
|
||||
q = select(GreenCertificate).order_by(GreenCertificate.issue_date.desc())
|
||||
if status:
|
||||
q = q.where(GreenCertificate.status == status)
|
||||
result = await db.execute(q)
|
||||
certs = result.scalars().all()
|
||||
return [{
|
||||
"id": c.id, "certificate_type": c.certificate_type,
|
||||
"certificate_number": c.certificate_number,
|
||||
"issue_date": str(c.issue_date), "expiry_date": str(c.expiry_date) if c.expiry_date else None,
|
||||
"energy_mwh": c.energy_mwh, "price_yuan": c.price_yuan,
|
||||
"status": c.status, "notes": c.notes,
|
||||
} for c in certs]
|
||||
|
||||
|
||||
@router.post("/certificates")
|
||||
async def create_certificate(
|
||||
data: CertificateCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""登记绿证"""
|
||||
cert = GreenCertificate(**data.model_dump())
|
||||
db.add(cert)
|
||||
await db.flush()
|
||||
return {"id": cert.id, "message": "绿证登记成功"}
|
||||
|
||||
|
||||
@router.put("/certificates/{cert_id}")
|
||||
async def update_certificate(
|
||||
cert_id: int,
|
||||
data: CertificateUpdate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""更新绿证"""
|
||||
result = await db.execute(select(GreenCertificate).where(GreenCertificate.id == cert_id))
|
||||
cert = result.scalar_one_or_none()
|
||||
if not cert:
|
||||
raise HTTPException(404, "绿证不存在")
|
||||
for k, v in data.model_dump(exclude_unset=True).items():
|
||||
setattr(cert, k, v)
|
||||
return {"message": "更新成功"}
|
||||
|
||||
|
||||
@router.get("/certificates/value")
|
||||
async def certificate_portfolio_value(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""绿证组合价值"""
|
||||
return await carbon_asset.get_certificate_portfolio_value(db)
|
||||
|
||||
|
||||
# =============== Carbon Reports ===============
|
||||
|
||||
@router.get("/reports")
|
||||
async def list_reports(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""报告列表"""
|
||||
result = await db.execute(
|
||||
select(CarbonReport).order_by(CarbonReport.generated_at.desc()).limit(50)
|
||||
)
|
||||
reports = result.scalars().all()
|
||||
return [{
|
||||
"id": r.id, "report_type": r.report_type,
|
||||
"period_start": str(r.period_start), "period_end": str(r.period_end),
|
||||
"generated_at": str(r.generated_at),
|
||||
"total_tons": r.total_tons, "reduction_tons": r.reduction_tons,
|
||||
"net_tons": r.net_tons,
|
||||
} for r in reports]
|
||||
|
||||
|
||||
@router.post("/reports/generate")
|
||||
async def generate_report(
|
||||
data: ReportGenerate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""生成碳报告"""
|
||||
report = await carbon_asset.generate_carbon_report(
|
||||
db, data.report_type, data.period_start, data.period_end,
|
||||
)
|
||||
await db.flush()
|
||||
return {
|
||||
"id": report.id,
|
||||
"total_tons": report.total_tons,
|
||||
"reduction_tons": report.reduction_tons,
|
||||
"net_tons": report.net_tons,
|
||||
"message": "报告生成成功",
|
||||
}
|
||||
|
||||
|
||||
@router.get("/reports/{report_id}")
|
||||
async def get_report(
|
||||
report_id: int,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""报告详情"""
|
||||
result = await db.execute(select(CarbonReport).where(CarbonReport.id == report_id))
|
||||
r = result.scalar_one_or_none()
|
||||
if not r:
|
||||
raise HTTPException(404, "报告不存在")
|
||||
return {
|
||||
"id": r.id, "report_type": r.report_type,
|
||||
"period_start": str(r.period_start), "period_end": str(r.period_end),
|
||||
"generated_at": str(r.generated_at),
|
||||
"scope1_tons": r.scope1_tons, "scope2_tons": r.scope2_tons,
|
||||
"scope3_tons": r.scope3_tons,
|
||||
"total_tons": r.total_tons, "reduction_tons": r.reduction_tons,
|
||||
"net_tons": r.net_tons, "report_data": r.report_data,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/reports/{report_id}/download")
|
||||
async def download_report(
|
||||
report_id: int,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""下载报告数据"""
|
||||
result = await db.execute(select(CarbonReport).where(CarbonReport.id == report_id))
|
||||
r = result.scalar_one_or_none()
|
||||
if not r:
|
||||
raise HTTPException(404, "报告不存在")
|
||||
return {
|
||||
"report_type": r.report_type,
|
||||
"period": f"{r.period_start} ~ {r.period_end}",
|
||||
"scope1_tons": r.scope1_tons, "scope2_tons": r.scope2_tons,
|
||||
"total_tons": r.total_tons, "reduction_tons": r.reduction_tons,
|
||||
"net_tons": r.net_tons,
|
||||
"detail": r.report_data,
|
||||
}
|
||||
|
||||
|
||||
# =============== Carbon Benchmarks ===============
|
||||
|
||||
@router.get("/benchmarks")
|
||||
async def list_benchmarks(
|
||||
year: int = Query(None),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""行业基准列表"""
|
||||
q = select(CarbonBenchmark).order_by(CarbonBenchmark.year.desc())
|
||||
if year:
|
||||
q = q.where(CarbonBenchmark.year == year)
|
||||
result = await db.execute(q)
|
||||
items = result.scalars().all()
|
||||
return [{
|
||||
"id": b.id, "industry": b.industry, "metric_name": b.metric_name,
|
||||
"benchmark_value": b.benchmark_value, "unit": b.unit,
|
||||
"year": b.year, "source": b.source, "notes": b.notes,
|
||||
} for b in items]
|
||||
|
||||
|
||||
@router.get("/benchmarks/comparison")
|
||||
async def benchmark_comparison(
|
||||
year: int = Query(None),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""行业对标比较"""
|
||||
if year is None:
|
||||
year = datetime.now(timezone.utc).year
|
||||
return await carbon_asset.compare_with_benchmarks(db, year)
|
||||
|
||||
376
backend/app/api/v1/energy_strategy.py
Normal file
376
backend/app/api/v1/energy_strategy.py
Normal file
@@ -0,0 +1,376 @@
|
||||
from datetime import date, datetime
|
||||
from fastapi import APIRouter, Depends, Query, HTTPException
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select
|
||||
from pydantic import BaseModel
|
||||
from app.core.database import get_db
|
||||
from app.core.deps import get_current_user
|
||||
from app.models.energy_strategy import (
|
||||
TouPricing, TouPricingPeriod, EnergyStrategy, StrategyExecution,
|
||||
)
|
||||
from app.models.user import User
|
||||
from app.services.energy_strategy import (
|
||||
get_active_tou_pricing, get_tou_periods,
|
||||
calculate_monthly_cost_breakdown, get_recommendations,
|
||||
get_savings_report, simulate_strategy_impact, DEFAULT_PERIODS, PERIOD_LABELS,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/strategy", tags=["策略优化"])
|
||||
|
||||
|
||||
# ---- Schemas ----
|
||||
|
||||
class TouPricingCreate(BaseModel):
|
||||
name: str
|
||||
region: str = "北京"
|
||||
effective_date: str | None = None
|
||||
end_date: str | None = None
|
||||
|
||||
|
||||
class TouPricingPeriodCreate(BaseModel):
|
||||
period_type: str # sharp_peak, peak, flat, valley
|
||||
start_time: str # HH:MM
|
||||
end_time: str # HH:MM
|
||||
price_yuan_per_kwh: float
|
||||
month_range: str | None = None
|
||||
|
||||
|
||||
class TouPricingPeriodsSet(BaseModel):
|
||||
periods: list[TouPricingPeriodCreate]
|
||||
|
||||
|
||||
class EnergyStrategyCreate(BaseModel):
|
||||
name: str
|
||||
strategy_type: str # heat_storage, load_shift, pv_priority
|
||||
description: str | None = None
|
||||
parameters: dict | None = None
|
||||
priority: int = 0
|
||||
|
||||
|
||||
class EnergyStrategyUpdate(BaseModel):
|
||||
name: str | None = None
|
||||
description: str | None = None
|
||||
parameters: dict | None = None
|
||||
is_enabled: bool | None = None
|
||||
priority: int | None = None
|
||||
|
||||
|
||||
class SimulateRequest(BaseModel):
|
||||
daily_consumption_kwh: float = 2000
|
||||
pv_daily_kwh: float = 800
|
||||
strategies: list[str] = ["heat_storage", "pv_priority", "load_shift"]
|
||||
|
||||
|
||||
# ---- TOU Pricing ----
|
||||
|
||||
@router.get("/pricing")
|
||||
async def list_tou_pricing(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取分时电价配置列表"""
|
||||
result = await db.execute(
|
||||
select(TouPricing).order_by(TouPricing.created_at.desc())
|
||||
)
|
||||
pricings = result.scalars().all()
|
||||
items = []
|
||||
for p in pricings:
|
||||
periods = await get_tou_periods(db, p.id)
|
||||
items.append({
|
||||
"id": p.id, "name": p.name, "region": p.region,
|
||||
"effective_date": str(p.effective_date) if p.effective_date else None,
|
||||
"end_date": str(p.end_date) if p.end_date else None,
|
||||
"is_active": p.is_active,
|
||||
"created_at": str(p.created_at),
|
||||
"periods": [
|
||||
{
|
||||
"id": pp.id,
|
||||
"period_type": pp.period_type,
|
||||
"period_label": PERIOD_LABELS.get(pp.period_type, pp.period_type),
|
||||
"start_time": pp.start_time,
|
||||
"end_time": pp.end_time,
|
||||
"price_yuan_per_kwh": pp.price_yuan_per_kwh,
|
||||
"month_range": pp.month_range,
|
||||
}
|
||||
for pp in periods
|
||||
],
|
||||
})
|
||||
return items
|
||||
|
||||
|
||||
@router.post("/pricing")
|
||||
async def create_tou_pricing(
|
||||
data: TouPricingCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""创建分时电价配置"""
|
||||
pricing = TouPricing(
|
||||
name=data.name,
|
||||
region=data.region,
|
||||
effective_date=date.fromisoformat(data.effective_date) if data.effective_date else None,
|
||||
end_date=date.fromisoformat(data.end_date) if data.end_date else None,
|
||||
created_by=user.id,
|
||||
)
|
||||
db.add(pricing)
|
||||
await db.flush()
|
||||
return {"id": pricing.id, "message": "分时电价配置创建成功"}
|
||||
|
||||
|
||||
@router.put("/pricing/{pricing_id}")
|
||||
async def update_tou_pricing(
|
||||
pricing_id: int,
|
||||
data: TouPricingCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""更新分时电价配置"""
|
||||
result = await db.execute(select(TouPricing).where(TouPricing.id == pricing_id))
|
||||
pricing = result.scalar_one_or_none()
|
||||
if not pricing:
|
||||
raise HTTPException(status_code=404, detail="电价配置不存在")
|
||||
pricing.name = data.name
|
||||
pricing.region = data.region
|
||||
pricing.effective_date = date.fromisoformat(data.effective_date) if data.effective_date else None
|
||||
pricing.end_date = date.fromisoformat(data.end_date) if data.end_date else None
|
||||
return {"message": "电价配置更新成功"}
|
||||
|
||||
|
||||
@router.get("/pricing/{pricing_id}/periods")
|
||||
async def get_pricing_periods(
|
||||
pricing_id: int,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取电价时段"""
|
||||
periods = await get_tou_periods(db, pricing_id)
|
||||
return [
|
||||
{
|
||||
"id": p.id,
|
||||
"period_type": p.period_type,
|
||||
"period_label": PERIOD_LABELS.get(p.period_type, p.period_type),
|
||||
"start_time": p.start_time,
|
||||
"end_time": p.end_time,
|
||||
"price_yuan_per_kwh": p.price_yuan_per_kwh,
|
||||
"month_range": p.month_range,
|
||||
}
|
||||
for p in periods
|
||||
]
|
||||
|
||||
|
||||
@router.post("/pricing/{pricing_id}/periods")
|
||||
async def set_pricing_periods(
|
||||
pricing_id: int,
|
||||
data: TouPricingPeriodsSet,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""设置电价时段(替换所有现有时段)"""
|
||||
result = await db.execute(select(TouPricing).where(TouPricing.id == pricing_id))
|
||||
if not result.scalar_one_or_none():
|
||||
raise HTTPException(status_code=404, detail="电价配置不存在")
|
||||
|
||||
# Delete existing periods
|
||||
existing = await db.execute(
|
||||
select(TouPricingPeriod).where(TouPricingPeriod.pricing_id == pricing_id)
|
||||
)
|
||||
for p in existing.scalars().all():
|
||||
await db.delete(p)
|
||||
|
||||
# Create new periods
|
||||
for period in data.periods:
|
||||
pp = TouPricingPeriod(
|
||||
pricing_id=pricing_id,
|
||||
period_type=period.period_type,
|
||||
start_time=period.start_time,
|
||||
end_time=period.end_time,
|
||||
price_yuan_per_kwh=period.price_yuan_per_kwh,
|
||||
month_range=period.month_range,
|
||||
)
|
||||
db.add(pp)
|
||||
|
||||
return {"message": f"已设置{len(data.periods)}个时段"}
|
||||
|
||||
|
||||
# ---- Strategies ----
|
||||
|
||||
@router.get("/strategies")
|
||||
async def list_strategies(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取优化策略列表"""
|
||||
result = await db.execute(
|
||||
select(EnergyStrategy).order_by(EnergyStrategy.priority.desc())
|
||||
)
|
||||
strategies = result.scalars().all()
|
||||
|
||||
if not strategies:
|
||||
# Return defaults
|
||||
return [
|
||||
{
|
||||
"id": None, "name": "谷电蓄热", "strategy_type": "heat_storage",
|
||||
"description": "在低谷电价时段(23:00-7:00)预热水箱,减少尖峰时段热泵运行",
|
||||
"is_enabled": False, "priority": 3,
|
||||
"parameters": {"shift_ratio": 0.3, "valley_start": "23:00", "valley_end": "07:00"},
|
||||
},
|
||||
{
|
||||
"id": None, "name": "光伏自消纳优先", "strategy_type": "pv_priority",
|
||||
"description": "优先使用光伏发电供给园区负荷,减少向电网购电",
|
||||
"is_enabled": True, "priority": 2,
|
||||
"parameters": {"min_self_consumption_ratio": 0.7},
|
||||
},
|
||||
{
|
||||
"id": None, "name": "负荷转移", "strategy_type": "load_shift",
|
||||
"description": "将可调负荷从尖峰时段转移至平段或低谷时段",
|
||||
"is_enabled": False, "priority": 1,
|
||||
"parameters": {"max_shift_ratio": 0.15, "target_periods": ["flat", "valley"]},
|
||||
},
|
||||
]
|
||||
|
||||
return [
|
||||
{
|
||||
"id": s.id, "name": s.name, "strategy_type": s.strategy_type,
|
||||
"description": s.description, "is_enabled": s.is_enabled,
|
||||
"priority": s.priority, "parameters": s.parameters or {},
|
||||
}
|
||||
for s in strategies
|
||||
]
|
||||
|
||||
|
||||
@router.post("/strategies")
|
||||
async def create_strategy(
|
||||
data: EnergyStrategyCreate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""创建优化策略"""
|
||||
strategy = EnergyStrategy(
|
||||
name=data.name,
|
||||
strategy_type=data.strategy_type,
|
||||
description=data.description,
|
||||
parameters=data.parameters or {},
|
||||
priority=data.priority,
|
||||
)
|
||||
db.add(strategy)
|
||||
await db.flush()
|
||||
return {"id": strategy.id, "message": "策略创建成功"}
|
||||
|
||||
|
||||
@router.put("/strategies/{strategy_id}")
|
||||
async def update_strategy(
|
||||
strategy_id: int,
|
||||
data: EnergyStrategyUpdate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""更新优化策略"""
|
||||
result = await db.execute(select(EnergyStrategy).where(EnergyStrategy.id == strategy_id))
|
||||
strategy = result.scalar_one_or_none()
|
||||
if not strategy:
|
||||
raise HTTPException(status_code=404, detail="策略不存在")
|
||||
|
||||
if data.name is not None:
|
||||
strategy.name = data.name
|
||||
if data.description is not None:
|
||||
strategy.description = data.description
|
||||
if data.parameters is not None:
|
||||
strategy.parameters = data.parameters
|
||||
if data.is_enabled is not None:
|
||||
strategy.is_enabled = data.is_enabled
|
||||
if data.priority is not None:
|
||||
strategy.priority = data.priority
|
||||
|
||||
return {"message": "策略更新成功"}
|
||||
|
||||
|
||||
@router.put("/strategies/{strategy_id}/toggle")
|
||||
async def toggle_strategy(
|
||||
strategy_id: int,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""启用/停用策略"""
|
||||
result = await db.execute(select(EnergyStrategy).where(EnergyStrategy.id == strategy_id))
|
||||
strategy = result.scalar_one_or_none()
|
||||
if not strategy:
|
||||
raise HTTPException(status_code=404, detail="策略不存在")
|
||||
strategy.is_enabled = not strategy.is_enabled
|
||||
return {"is_enabled": strategy.is_enabled, "message": f"策略已{'启用' if strategy.is_enabled else '停用'}"}
|
||||
|
||||
|
||||
# ---- Analysis ----
|
||||
|
||||
@router.get("/cost-analysis")
|
||||
async def cost_analysis(
|
||||
year: int = Query(default=2026),
|
||||
month: int = Query(default=4, ge=1, le=12),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""月度费用分析"""
|
||||
return await calculate_monthly_cost_breakdown(db, year, month)
|
||||
|
||||
|
||||
@router.get("/savings-report")
|
||||
async def savings_report(
|
||||
year: int = Query(default=2026),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""节约报告"""
|
||||
return await get_savings_report(db, year)
|
||||
|
||||
|
||||
@router.get("/recommendations")
|
||||
async def strategy_recommendations(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取当前推荐策略"""
|
||||
return await get_recommendations(db)
|
||||
|
||||
|
||||
@router.post("/simulate")
|
||||
async def simulate(
|
||||
data: SimulateRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""模拟策略影响"""
|
||||
pricing = await get_active_tou_pricing(db)
|
||||
if pricing:
|
||||
periods = await get_tou_periods(db, pricing.id)
|
||||
else:
|
||||
# Use default periods
|
||||
periods = [
|
||||
TouPricingPeriod(
|
||||
period_type=p["period_type"],
|
||||
start_time=p["start_time"],
|
||||
end_time=p["end_time"],
|
||||
price_yuan_per_kwh=p["price"],
|
||||
)
|
||||
for p in DEFAULT_PERIODS
|
||||
]
|
||||
|
||||
return simulate_strategy_impact(
|
||||
daily_consumption_kwh=data.daily_consumption_kwh,
|
||||
pv_daily_kwh=data.pv_daily_kwh,
|
||||
periods=periods,
|
||||
strategies=data.strategies,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/default-pricing")
|
||||
async def get_default_pricing(
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取北京工业默认电价"""
|
||||
return {
|
||||
"region": "北京",
|
||||
"type": "工业用电",
|
||||
"periods": [
|
||||
{**p, "period_label": PERIOD_LABELS.get(p["period_type"], p["period_type"])}
|
||||
for p in DEFAULT_PERIODS
|
||||
],
|
||||
}
|
||||
185
backend/app/api/v1/prediction.py
Normal file
185
backend/app/api/v1/prediction.py
Normal file
@@ -0,0 +1,185 @@
|
||||
"""AI预测引擎 API - 光伏/负荷/热泵预测 & 自发自用优化"""
|
||||
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, Query, HTTPException
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, and_
|
||||
|
||||
from app.core.database import get_db
|
||||
from app.core.deps import get_current_user, require_roles
|
||||
from app.models.user import User
|
||||
from app.models.prediction import PredictionTask, PredictionResult, OptimizationSchedule
|
||||
from app.services.ai_prediction import (
|
||||
forecast_pv, forecast_load, forecast_heatpump_cop,
|
||||
optimize_self_consumption, get_prediction_accuracy, run_prediction,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/prediction", tags=["AI预测"])
|
||||
|
||||
|
||||
# ── Schemas ────────────────────────────────────────────────────────────
|
||||
|
||||
class RunPredictionRequest(BaseModel):
|
||||
device_id: Optional[int] = None
|
||||
prediction_type: str # pv, load, heatpump, optimization
|
||||
horizon_hours: int = 24
|
||||
parameters: Optional[dict] = None
|
||||
|
||||
|
||||
# ── Endpoints ──────────────────────────────────────────────────────────
|
||||
|
||||
@router.get("/forecast")
|
||||
async def get_forecast(
|
||||
device_id: Optional[int] = None,
|
||||
type: str = Query("pv", pattern="^(pv|load|heatpump)$"),
|
||||
horizon: int = Query(24, ge=1, le=168),
|
||||
building_type: str = Query("office", pattern="^(office|factory)$"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取预测结果 - PV发电/负荷/热泵COP"""
|
||||
if type == "pv":
|
||||
if not device_id:
|
||||
raise HTTPException(400, "光伏预测需要指定device_id")
|
||||
return await forecast_pv(db, device_id, horizon)
|
||||
elif type == "load":
|
||||
return await forecast_load(db, device_id, building_type, horizon)
|
||||
elif type == "heatpump":
|
||||
if not device_id:
|
||||
raise HTTPException(400, "热泵预测需要指定device_id")
|
||||
return await forecast_heatpump_cop(db, device_id, horizon)
|
||||
|
||||
|
||||
@router.post("/run")
|
||||
async def trigger_prediction(
|
||||
req: RunPredictionRequest,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""触发新的预测任务"""
|
||||
task = await run_prediction(
|
||||
db, req.device_id, req.prediction_type,
|
||||
req.horizon_hours, req.parameters,
|
||||
)
|
||||
return {
|
||||
"task_id": task.id,
|
||||
"status": task.status,
|
||||
"prediction_type": task.prediction_type,
|
||||
"horizon_hours": task.horizon_hours,
|
||||
"error_message": task.error_message,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/accuracy")
|
||||
async def prediction_accuracy(
|
||||
type: Optional[str] = Query(None, pattern="^(pv|load|heatpump|optimization)$"),
|
||||
days: int = Query(7, ge=1, le=90),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取预测精度指标 (MAE, RMSE, MAPE)"""
|
||||
return await get_prediction_accuracy(db, type, days)
|
||||
|
||||
|
||||
@router.get("/optimization")
|
||||
async def get_optimization(
|
||||
horizon: int = Query(24, ge=1, le=72),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取自发自用优化建议"""
|
||||
return await optimize_self_consumption(db, horizon)
|
||||
|
||||
|
||||
@router.post("/optimization/{schedule_id}/approve")
|
||||
async def approve_optimization(
|
||||
schedule_id: int,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(require_roles("admin", "energy_manager")),
|
||||
):
|
||||
"""审批优化调度方案"""
|
||||
result = await db.execute(
|
||||
select(OptimizationSchedule).where(OptimizationSchedule.id == schedule_id)
|
||||
)
|
||||
schedule = result.scalar_one_or_none()
|
||||
if not schedule:
|
||||
raise HTTPException(404, "优化方案不存在")
|
||||
if schedule.status != "pending":
|
||||
raise HTTPException(400, f"方案状态为 {schedule.status},无法审批")
|
||||
|
||||
schedule.status = "approved"
|
||||
schedule.approved_by = user.id
|
||||
schedule.approved_at = datetime.now(timezone.utc)
|
||||
return {"id": schedule.id, "status": "approved"}
|
||||
|
||||
|
||||
@router.get("/history")
|
||||
async def prediction_history(
|
||||
type: Optional[str] = Query(None),
|
||||
days: int = Query(7, ge=1, le=30),
|
||||
page: int = Query(1, ge=1),
|
||||
page_size: int = Query(20, ge=1, le=100),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""历史预测任务列表"""
|
||||
cutoff = datetime.now(timezone.utc) - timedelta(days=days)
|
||||
conditions = [PredictionTask.created_at >= cutoff]
|
||||
if type:
|
||||
conditions.append(PredictionTask.prediction_type == type)
|
||||
|
||||
query = (
|
||||
select(PredictionTask)
|
||||
.where(and_(*conditions))
|
||||
.order_by(PredictionTask.created_at.desc())
|
||||
.offset((page - 1) * page_size)
|
||||
.limit(page_size)
|
||||
)
|
||||
result = await db.execute(query)
|
||||
tasks = result.scalars().all()
|
||||
|
||||
return [{
|
||||
"id": t.id,
|
||||
"device_id": t.device_id,
|
||||
"prediction_type": t.prediction_type,
|
||||
"horizon_hours": t.horizon_hours,
|
||||
"status": t.status,
|
||||
"created_at": str(t.created_at) if t.created_at else None,
|
||||
"completed_at": str(t.completed_at) if t.completed_at else None,
|
||||
"error_message": t.error_message,
|
||||
} for t in tasks]
|
||||
|
||||
|
||||
@router.get("/schedules")
|
||||
async def list_schedules(
|
||||
status: Optional[str] = Query(None, pattern="^(pending|approved|executed|rejected)$"),
|
||||
days: int = Query(7, ge=1, le=30),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取优化调度方案列表"""
|
||||
cutoff = datetime.now(timezone.utc) - timedelta(days=days)
|
||||
conditions = [OptimizationSchedule.created_at >= cutoff]
|
||||
if status:
|
||||
conditions.append(OptimizationSchedule.status == status)
|
||||
|
||||
result = await db.execute(
|
||||
select(OptimizationSchedule)
|
||||
.where(and_(*conditions))
|
||||
.order_by(OptimizationSchedule.created_at.desc())
|
||||
)
|
||||
schedules = result.scalars().all()
|
||||
|
||||
return [{
|
||||
"id": s.id,
|
||||
"device_id": s.device_id,
|
||||
"date": str(s.date) if s.date else None,
|
||||
"expected_savings_kwh": s.expected_savings_kwh,
|
||||
"expected_savings_yuan": s.expected_savings_yuan,
|
||||
"status": s.status,
|
||||
"schedule_data": s.schedule_data,
|
||||
"created_at": str(s.created_at) if s.created_at else None,
|
||||
} for s in schedules]
|
||||
83
backend/app/api/v1/weather.py
Normal file
83
backend/app/api/v1/weather.py
Normal file
@@ -0,0 +1,83 @@
|
||||
from datetime import datetime
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from pydantic import BaseModel
|
||||
from app.core.database import get_db
|
||||
from app.core.deps import get_current_user
|
||||
from app.models.user import User
|
||||
from app.services.weather_service import (
|
||||
get_current_weather, get_forecast, get_weather_history,
|
||||
get_weather_impact, get_weather_config, update_weather_config,
|
||||
)
|
||||
|
||||
router = APIRouter(prefix="/weather", tags=["气象数据"])
|
||||
|
||||
|
||||
class WeatherConfigUpdate(BaseModel):
|
||||
api_provider: str | None = None
|
||||
api_key: str | None = None
|
||||
location_lat: float | None = None
|
||||
location_lon: float | None = None
|
||||
fetch_interval_minutes: int | None = None
|
||||
is_enabled: bool | None = None
|
||||
|
||||
|
||||
@router.get("/current")
|
||||
async def current_weather(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取当前天气"""
|
||||
return await get_current_weather(db)
|
||||
|
||||
|
||||
@router.get("/forecast")
|
||||
async def weather_forecast(
|
||||
hours: int = Query(default=72, ge=1, le=168),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取72h天气预报"""
|
||||
return await get_forecast(db, hours)
|
||||
|
||||
|
||||
@router.get("/history")
|
||||
async def weather_history(
|
||||
start_date: str = Query(..., description="开始日期 e.g. 2026-03-01"),
|
||||
end_date: str = Query(..., description="结束日期 e.g. 2026-04-01"),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取历史天气数据"""
|
||||
start = datetime.fromisoformat(start_date)
|
||||
end = datetime.fromisoformat(end_date)
|
||||
return await get_weather_history(db, start, end)
|
||||
|
||||
|
||||
@router.get("/impact")
|
||||
async def weather_impact(
|
||||
days: int = Query(default=30, ge=1, le=365),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""天气对能源的影响分析"""
|
||||
return await get_weather_impact(db, days)
|
||||
|
||||
|
||||
@router.get("/config")
|
||||
async def get_config(
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""获取气象API配置"""
|
||||
return await get_weather_config(db)
|
||||
|
||||
|
||||
@router.put("/config")
|
||||
async def set_config(
|
||||
data: WeatherConfigUpdate,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
user: User = Depends(get_current_user),
|
||||
):
|
||||
"""更新气象API配置"""
|
||||
return await update_weather_config(db, data.model_dump(exclude_none=True))
|
||||
@@ -2,7 +2,10 @@ from app.models.user import User, Role, AuditLog
|
||||
from app.models.device import Device, DeviceGroup, DeviceType
|
||||
from app.models.energy import EnergyData, EnergyDailySummary, EnergyCategory
|
||||
from app.models.alarm import AlarmRule, AlarmEvent
|
||||
from app.models.carbon import CarbonEmission, EmissionFactor
|
||||
from app.models.carbon import (
|
||||
CarbonEmission, EmissionFactor, CarbonTarget, CarbonReduction,
|
||||
GreenCertificate, CarbonReport, CarbonBenchmark,
|
||||
)
|
||||
from app.models.report import ReportTemplate, ReportTask
|
||||
from app.models.setting import SystemSetting
|
||||
from app.models.charging import (
|
||||
@@ -13,13 +16,18 @@ from app.models.quota import EnergyQuota, QuotaUsage
|
||||
from app.models.pricing import ElectricityPricing, PricingPeriod
|
||||
from app.models.maintenance import InspectionPlan, InspectionRecord, RepairOrder, DutySchedule
|
||||
from app.models.management import Regulation, Standard, ProcessDoc, EmergencyPlan
|
||||
from app.models.prediction import PredictionTask, PredictionResult, OptimizationSchedule
|
||||
from app.models.energy_strategy import TouPricing, TouPricingPeriod, EnergyStrategy, StrategyExecution, MonthlyCostReport
|
||||
from app.models.weather import WeatherData, WeatherConfig
|
||||
from app.models.ai_ops import DeviceHealthScore, AnomalyDetection, DiagnosticReport, MaintenancePrediction, OpsInsight
|
||||
|
||||
__all__ = [
|
||||
"User", "Role", "AuditLog",
|
||||
"Device", "DeviceGroup", "DeviceType",
|
||||
"EnergyData", "EnergyDailySummary", "EnergyCategory",
|
||||
"AlarmRule", "AlarmEvent",
|
||||
"CarbonEmission", "EmissionFactor",
|
||||
"CarbonEmission", "EmissionFactor", "CarbonTarget", "CarbonReduction",
|
||||
"GreenCertificate", "CarbonReport", "CarbonBenchmark",
|
||||
"ReportTemplate", "ReportTask",
|
||||
"SystemSetting",
|
||||
"ChargingStation", "ChargingPile", "ChargingPriceStrategy", "ChargingPriceParam",
|
||||
@@ -28,4 +36,8 @@ __all__ = [
|
||||
"ElectricityPricing", "PricingPeriod",
|
||||
"InspectionPlan", "InspectionRecord", "RepairOrder", "DutySchedule",
|
||||
"Regulation", "Standard", "ProcessDoc", "EmergencyPlan",
|
||||
"PredictionTask", "PredictionResult", "OptimizationSchedule",
|
||||
"TouPricing", "TouPricingPeriod", "EnergyStrategy", "StrategyExecution", "MonthlyCostReport",
|
||||
"WeatherData", "WeatherConfig",
|
||||
"DeviceHealthScore", "AnomalyDetection", "DiagnosticReport", "MaintenancePrediction", "OpsInsight",
|
||||
]
|
||||
|
||||
88
backend/app/models/ai_ops.py
Normal file
88
backend/app/models/ai_ops.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""AI运维智能体数据模型 - 设备健康评分、异常检测、诊断报告、预测性维护、运营洞察"""
|
||||
from sqlalchemy import Column, Integer, String, Float, Boolean, DateTime, ForeignKey, Text, JSON
|
||||
from sqlalchemy.sql import func
|
||||
from app.core.database import Base
|
||||
|
||||
|
||||
class DeviceHealthScore(Base):
|
||||
"""设备健康评分"""
|
||||
__tablename__ = "device_health_scores"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
device_id = Column(Integer, ForeignKey("devices.id"), nullable=False, index=True)
|
||||
timestamp = Column(DateTime(timezone=True), server_default=func.now(), index=True)
|
||||
health_score = Column(Float, nullable=False) # 0-100
|
||||
status = Column(String(20), default="healthy") # healthy, warning, critical
|
||||
factors = Column(JSON) # {power_stability, efficiency, alarm_frequency, uptime, temperature}
|
||||
trend = Column(String(20), default="stable") # improving, stable, degrading
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
|
||||
class AnomalyDetection(Base):
|
||||
"""异常检测记录"""
|
||||
__tablename__ = "anomaly_detections"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
device_id = Column(Integer, ForeignKey("devices.id"), nullable=False, index=True)
|
||||
detected_at = Column(DateTime(timezone=True), server_default=func.now(), index=True)
|
||||
anomaly_type = Column(String(50), nullable=False) # power_drop, efficiency_loss, abnormal_temperature, communication_loss, pattern_deviation
|
||||
severity = Column(String(20), default="warning") # info, warning, critical
|
||||
description = Column(Text)
|
||||
metric_name = Column(String(50))
|
||||
expected_value = Column(Float)
|
||||
actual_value = Column(Float)
|
||||
deviation_percent = Column(Float)
|
||||
status = Column(String(20), default="detected") # detected, investigating, resolved, false_positive
|
||||
resolution_notes = Column(Text)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
|
||||
class DiagnosticReport(Base):
|
||||
"""AI诊断报告"""
|
||||
__tablename__ = "diagnostic_reports"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
device_id = Column(Integer, ForeignKey("devices.id"), nullable=False, index=True)
|
||||
generated_at = Column(DateTime(timezone=True), server_default=func.now(), index=True)
|
||||
report_type = Column(String(20), default="routine") # routine, triggered, comprehensive
|
||||
findings = Column(JSON) # [{finding, severity, detail}]
|
||||
recommendations = Column(JSON) # [{action, priority, detail}]
|
||||
estimated_impact = Column(JSON) # {energy_loss_kwh, cost_impact_yuan}
|
||||
status = Column(String(20), default="generated") # generated, reviewed, action_taken
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
|
||||
class MaintenancePrediction(Base):
|
||||
"""预测性维护"""
|
||||
__tablename__ = "maintenance_predictions"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
device_id = Column(Integer, ForeignKey("devices.id"), nullable=False, index=True)
|
||||
predicted_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
component = Column(String(100))
|
||||
failure_mode = Column(String(200))
|
||||
probability = Column(Float) # 0-1
|
||||
predicted_failure_date = Column(DateTime(timezone=True))
|
||||
recommended_action = Column(Text)
|
||||
urgency = Column(String(20), default="medium") # low, medium, high, critical
|
||||
estimated_downtime_hours = Column(Float)
|
||||
estimated_repair_cost = Column(Float)
|
||||
status = Column(String(20), default="predicted") # predicted, scheduled, completed, false_alarm
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
|
||||
class OpsInsight(Base):
|
||||
"""运营洞察"""
|
||||
__tablename__ = "ops_insights"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
insight_type = Column(String(50), nullable=False) # efficiency_trend, cost_anomaly, performance_comparison, seasonal_pattern
|
||||
title = Column(String(200), nullable=False)
|
||||
description = Column(Text)
|
||||
data = Column(JSON)
|
||||
impact_level = Column(String(20), default="medium") # low, medium, high
|
||||
actionable = Column(Boolean, default=False)
|
||||
recommended_action = Column(Text)
|
||||
generated_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
valid_until = Column(DateTime(timezone=True))
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
@@ -1,4 +1,4 @@
|
||||
from sqlalchemy import Column, Integer, String, Float, DateTime, Text
|
||||
from sqlalchemy import Column, Integer, String, Float, DateTime, Text, Boolean, Date, JSON, ForeignKey
|
||||
from sqlalchemy.sql import func
|
||||
from app.core.database import Base
|
||||
|
||||
@@ -19,6 +19,85 @@ class EmissionFactor(Base):
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
|
||||
class CarbonTarget(Base):
|
||||
"""碳减排目标"""
|
||||
__tablename__ = "carbon_targets"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
year = Column(Integer, nullable=False)
|
||||
month = Column(Integer, nullable=True) # NULL for annual target
|
||||
target_emission_tons = Column(Float, nullable=False)
|
||||
actual_emission_tons = Column(Float, default=0)
|
||||
status = Column(String(20), default="on_track") # on_track / warning / exceeded
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
|
||||
class CarbonReduction(Base):
|
||||
"""碳减排活动"""
|
||||
__tablename__ = "carbon_reductions"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
source_type = Column(String(50), nullable=False) # pv_generation / heat_pump_cop / energy_saving
|
||||
date = Column(Date, nullable=False, index=True)
|
||||
reduction_tons = Column(Float, nullable=False)
|
||||
equivalent_trees = Column(Float, default=0)
|
||||
methodology = Column(String(200))
|
||||
verified = Column(Boolean, default=False)
|
||||
verification_date = Column(DateTime(timezone=True), nullable=True)
|
||||
device_id = Column(Integer, ForeignKey("devices.id"), nullable=True)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
|
||||
class GreenCertificate(Base):
|
||||
"""绿证管理"""
|
||||
__tablename__ = "green_certificates"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
certificate_type = Column(String(20), nullable=False) # GEC / IREC / CCER
|
||||
certificate_number = Column(String(100), unique=True, nullable=False)
|
||||
issue_date = Column(Date, nullable=False)
|
||||
expiry_date = Column(Date, nullable=True)
|
||||
energy_mwh = Column(Float, nullable=False)
|
||||
price_yuan = Column(Float, default=0)
|
||||
status = Column(String(20), default="active") # active / used / expired / traded
|
||||
source_device_id = Column(Integer, ForeignKey("devices.id"), nullable=True)
|
||||
notes = Column(Text)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
|
||||
class CarbonReport(Base):
|
||||
"""碳排放报告"""
|
||||
__tablename__ = "carbon_reports"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
report_type = Column(String(20), nullable=False) # monthly / quarterly / annual
|
||||
period_start = Column(Date, nullable=False)
|
||||
period_end = Column(Date, nullable=False)
|
||||
generated_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
scope1_tons = Column(Float, default=0)
|
||||
scope2_tons = Column(Float, default=0)
|
||||
scope3_tons = Column(Float, nullable=True)
|
||||
total_tons = Column(Float, default=0)
|
||||
reduction_tons = Column(Float, default=0)
|
||||
net_tons = Column(Float, default=0)
|
||||
report_data = Column(JSON, nullable=True)
|
||||
file_path = Column(String(500), nullable=True)
|
||||
|
||||
|
||||
class CarbonBenchmark(Base):
|
||||
"""行业碳排放基准"""
|
||||
__tablename__ = "carbon_benchmarks"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
industry = Column(String(100), nullable=False)
|
||||
metric_name = Column(String(100), nullable=False)
|
||||
benchmark_value = Column(Float, nullable=False)
|
||||
unit = Column(String(50), nullable=False)
|
||||
year = Column(Integer)
|
||||
source = Column(String(200))
|
||||
notes = Column(Text)
|
||||
|
||||
|
||||
class CarbonEmission(Base):
|
||||
"""碳排放记录"""
|
||||
__tablename__ = "carbon_emissions"
|
||||
|
||||
81
backend/app/models/energy_strategy.py
Normal file
81
backend/app/models/energy_strategy.py
Normal file
@@ -0,0 +1,81 @@
|
||||
from sqlalchemy import Column, Integer, String, Float, Boolean, DateTime, ForeignKey, JSON, Date
|
||||
from sqlalchemy.sql import func
|
||||
from app.core.database import Base
|
||||
|
||||
|
||||
class TouPricing(Base):
|
||||
"""分时电价配置 (Time-of-Use pricing)"""
|
||||
__tablename__ = "tou_pricing"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
name = Column(String(200), nullable=False)
|
||||
region = Column(String(100), default="北京")
|
||||
effective_date = Column(Date)
|
||||
end_date = Column(Date)
|
||||
is_active = Column(Boolean, default=True)
|
||||
created_by = Column(Integer, ForeignKey("users.id"))
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
||||
|
||||
|
||||
class TouPricingPeriod(Base):
|
||||
"""分时电价时段 (TOU pricing periods)"""
|
||||
__tablename__ = "tou_pricing_periods"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
pricing_id = Column(Integer, ForeignKey("tou_pricing.id", ondelete="CASCADE"), nullable=False)
|
||||
period_type = Column(String(20), nullable=False) # sharp_peak, peak, flat, valley
|
||||
start_time = Column(String(10), nullable=False) # HH:MM
|
||||
end_time = Column(String(10), nullable=False) # HH:MM
|
||||
price_yuan_per_kwh = Column(Float, nullable=False)
|
||||
month_range = Column(String(50)) # e.g. "1-3,11-12" for winter, null=all
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
|
||||
class EnergyStrategy(Base):
|
||||
"""能源优化策略"""
|
||||
__tablename__ = "energy_strategies"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
name = Column(String(200), nullable=False)
|
||||
strategy_type = Column(String(50), nullable=False) # heat_storage, load_shift, pv_priority
|
||||
description = Column(String(500))
|
||||
parameters = Column(JSON, default=dict)
|
||||
is_enabled = Column(Boolean, default=False)
|
||||
priority = Column(Integer, default=0)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
||||
|
||||
|
||||
class StrategyExecution(Base):
|
||||
"""策略执行记录"""
|
||||
__tablename__ = "strategy_executions"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
strategy_id = Column(Integer, ForeignKey("energy_strategies.id"), nullable=False)
|
||||
date = Column(Date, nullable=False)
|
||||
actions_taken = Column(JSON, default=list)
|
||||
savings_kwh = Column(Float, default=0)
|
||||
savings_yuan = Column(Float, default=0)
|
||||
status = Column(String(20), default="planned") # planned, executing, completed
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
|
||||
class MonthlyCostReport(Base):
|
||||
"""月度电费分析报告"""
|
||||
__tablename__ = "monthly_cost_reports"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
year_month = Column(String(7), nullable=False, unique=True) # YYYY-MM
|
||||
total_consumption_kwh = Column(Float, default=0)
|
||||
total_cost_yuan = Column(Float, default=0)
|
||||
peak_consumption = Column(Float, default=0)
|
||||
valley_consumption = Column(Float, default=0)
|
||||
flat_consumption = Column(Float, default=0)
|
||||
sharp_peak_consumption = Column(Float, default=0)
|
||||
pv_self_consumption = Column(Float, default=0)
|
||||
pv_feed_in = Column(Float, default=0)
|
||||
optimized_cost = Column(Float, default=0)
|
||||
baseline_cost = Column(Float, default=0)
|
||||
savings_yuan = Column(Float, default=0)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
48
backend/app/models/prediction.py
Normal file
48
backend/app/models/prediction.py
Normal file
@@ -0,0 +1,48 @@
|
||||
from sqlalchemy import Column, Integer, String, Float, DateTime, ForeignKey, JSON, Text
|
||||
from sqlalchemy.sql import func
|
||||
from app.core.database import Base
|
||||
|
||||
|
||||
class PredictionTask(Base):
|
||||
"""AI预测任务元数据"""
|
||||
__tablename__ = "prediction_tasks"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
device_id = Column(Integer, ForeignKey("devices.id"))
|
||||
prediction_type = Column(String(50), nullable=False) # pv, load, heatpump, optimization
|
||||
horizon_hours = Column(Integer, default=24)
|
||||
status = Column(String(20), default="pending") # pending, running, completed, failed
|
||||
parameters = Column(JSON) # extra config for the prediction run
|
||||
error_message = Column(Text)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
completed_at = Column(DateTime(timezone=True))
|
||||
|
||||
|
||||
class PredictionResult(Base):
|
||||
"""AI预测结果时序数据"""
|
||||
__tablename__ = "prediction_results"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
task_id = Column(Integer, ForeignKey("prediction_tasks.id"), nullable=False, index=True)
|
||||
timestamp = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
predicted_value = Column(Float, nullable=False)
|
||||
confidence_lower = Column(Float)
|
||||
confidence_upper = Column(Float)
|
||||
actual_value = Column(Float) # filled later for accuracy tracking
|
||||
unit = Column(String(20))
|
||||
|
||||
|
||||
class OptimizationSchedule(Base):
|
||||
"""AI优化调度建议"""
|
||||
__tablename__ = "optimization_schedules"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
device_id = Column(Integer, ForeignKey("devices.id"))
|
||||
date = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
schedule_data = Column(JSON) # hourly on/off + setpoints
|
||||
expected_savings_kwh = Column(Float, default=0)
|
||||
expected_savings_yuan = Column(Float, default=0)
|
||||
status = Column(String(20), default="pending") # pending, approved, executed, rejected
|
||||
approved_by = Column(Integer, ForeignKey("users.id"))
|
||||
approved_at = Column(DateTime(timezone=True))
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
33
backend/app/models/weather.py
Normal file
33
backend/app/models/weather.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from sqlalchemy import Column, Integer, String, Float, Boolean, DateTime
|
||||
from sqlalchemy.sql import func
|
||||
from app.core.database import Base
|
||||
|
||||
|
||||
class WeatherData(Base):
|
||||
"""气象数据缓存"""
|
||||
__tablename__ = "weather_data"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
timestamp = Column(DateTime(timezone=True), nullable=False, index=True)
|
||||
data_type = Column(String(20), nullable=False) # observation, forecast
|
||||
temperature = Column(Float)
|
||||
humidity = Column(Float)
|
||||
solar_radiation = Column(Float) # W/m2
|
||||
cloud_cover = Column(Float) # 0-100 %
|
||||
wind_speed = Column(Float) # m/s
|
||||
source = Column(String(20), default="mock") # api, mock
|
||||
fetched_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
|
||||
|
||||
class WeatherConfig(Base):
|
||||
"""气象API配置"""
|
||||
__tablename__ = "weather_config"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
api_provider = Column(String(50), default="mock")
|
||||
api_key = Column(String(200))
|
||||
location_lat = Column(Float, default=39.9)
|
||||
location_lon = Column(Float, default=116.4)
|
||||
fetch_interval_minutes = Column(Integer, default=30)
|
||||
is_enabled = Column(Boolean, default=True)
|
||||
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
|
||||
1016
backend/app/services/ai_ops.py
Normal file
1016
backend/app/services/ai_ops.py
Normal file
File diff suppressed because it is too large
Load Diff
606
backend/app/services/ai_prediction.py
Normal file
606
backend/app/services/ai_prediction.py
Normal file
@@ -0,0 +1,606 @@
|
||||
"""AI预测引擎 - 光伏发电、负荷、热泵COP预测与自发自用优化
|
||||
|
||||
Uses physics-based models from weather_model.py combined with statistical
|
||||
methods (moving averages, exponential smoothing, seasonal decomposition)
|
||||
to generate realistic forecasts. Inspired by Envision's 天枢能源大模型.
|
||||
"""
|
||||
|
||||
import math
|
||||
import logging
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from typing import Optional
|
||||
|
||||
import numpy as np
|
||||
|
||||
from sqlalchemy import select, func, and_
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.device import Device
|
||||
from app.models.energy import EnergyData
|
||||
from app.models.prediction import PredictionTask, PredictionResult, OptimizationSchedule
|
||||
from app.services.weather_model import (
|
||||
outdoor_temperature, solar_altitude, get_cloud_factor,
|
||||
pv_power as _physics_pv_power, get_pv_orientation,
|
||||
get_hvac_mode, MONTHLY_AVG_TEMP, MONTHLY_DIURNAL_SWING,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("ai_prediction")
|
||||
|
||||
# Beijing electricity TOU pricing (yuan/kWh) - simplified
|
||||
TOU_PRICE = {
|
||||
"peak": 1.2, # 10:00-15:00, 18:00-21:00
|
||||
"shoulder": 0.8, # 07:00-10:00, 15:00-18:00, 21:00-23:00
|
||||
"valley": 0.4, # 23:00-07:00
|
||||
}
|
||||
|
||||
|
||||
def _get_tou_price(hour: int) -> float:
|
||||
if 10 <= hour < 15 or 18 <= hour < 21:
|
||||
return TOU_PRICE["peak"]
|
||||
elif 7 <= hour < 10 or 15 <= hour < 18 or 21 <= hour < 23:
|
||||
return TOU_PRICE["shoulder"]
|
||||
else:
|
||||
return TOU_PRICE["valley"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# PV Power Forecasting
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def forecast_pv(
|
||||
db: AsyncSession,
|
||||
device_id: int,
|
||||
horizon_hours: int = 24,
|
||||
) -> list[dict]:
|
||||
"""Forecast PV generation for the next horizon_hours.
|
||||
|
||||
Combines physics-based solar model with historical pattern correction.
|
||||
Returns list of {timestamp, predicted_power_kw, confidence_lower, confidence_upper}.
|
||||
"""
|
||||
device = await db.get(Device, device_id)
|
||||
if not device:
|
||||
raise ValueError(f"Device {device_id} not found")
|
||||
|
||||
rated_power = device.rated_power or 110.0
|
||||
orientation = get_pv_orientation(device.code or "")
|
||||
|
||||
# Fetch recent historical data for pattern correction
|
||||
now = datetime.now(timezone.utc)
|
||||
lookback = now - timedelta(days=7)
|
||||
result = await db.execute(
|
||||
select(EnergyData.timestamp, EnergyData.value)
|
||||
.where(and_(
|
||||
EnergyData.device_id == device_id,
|
||||
EnergyData.data_type == "power",
|
||||
EnergyData.timestamp >= lookback,
|
||||
))
|
||||
.order_by(EnergyData.timestamp)
|
||||
)
|
||||
historical = result.all()
|
||||
|
||||
# Calculate hourly averages from history for bias correction
|
||||
hourly_actual: dict[int, list[float]] = {h: [] for h in range(24)}
|
||||
for ts, val in historical:
|
||||
beijing_h = (ts.hour + 8) % 24 if ts.tzinfo else ts.hour
|
||||
hourly_actual[beijing_h].append(val)
|
||||
|
||||
hourly_avg = {
|
||||
h: np.mean(vals) if vals else None
|
||||
for h, vals in hourly_actual.items()
|
||||
}
|
||||
|
||||
# Generate physics-based forecast with bias correction
|
||||
forecasts = []
|
||||
for h_offset in range(horizon_hours):
|
||||
target_utc = now + timedelta(hours=h_offset)
|
||||
target_utc = target_utc.replace(minute=0, second=0, microsecond=0)
|
||||
|
||||
# Physics model baseline
|
||||
base_power = _physics_pv_power(target_utc, rated_power=rated_power,
|
||||
orientation=orientation,
|
||||
device_code=device.code or "")
|
||||
|
||||
# Bias correction from recent history
|
||||
beijing_hour = (target_utc.hour + 8) % 24
|
||||
hist_avg = hourly_avg.get(beijing_hour)
|
||||
if hist_avg is not None and base_power > 0:
|
||||
# Blend: 70% physics + 30% historical pattern
|
||||
correction = hist_avg / max(base_power, 0.1)
|
||||
correction = max(0.7, min(1.3, correction))
|
||||
predicted = base_power * (0.7 + 0.3 * correction)
|
||||
else:
|
||||
predicted = base_power
|
||||
|
||||
# Confidence interval widens with forecast horizon
|
||||
uncertainty = 0.05 + 0.02 * h_offset # grows with time
|
||||
uncertainty = min(uncertainty, 0.40)
|
||||
margin = predicted * uncertainty
|
||||
conf_lower = max(0, predicted - margin)
|
||||
conf_upper = min(rated_power, predicted + margin)
|
||||
|
||||
forecasts.append({
|
||||
"timestamp": target_utc.isoformat(),
|
||||
"predicted_power_kw": round(predicted, 2),
|
||||
"confidence_lower": round(conf_lower, 2),
|
||||
"confidence_upper": round(conf_upper, 2),
|
||||
})
|
||||
|
||||
return forecasts
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Load Forecasting
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def forecast_load(
|
||||
db: AsyncSession,
|
||||
device_id: Optional[int] = None,
|
||||
building_type: str = "office",
|
||||
horizon_hours: int = 24,
|
||||
) -> list[dict]:
|
||||
"""Forecast building electricity load.
|
||||
|
||||
Uses day-of-week patterns, hourly profiles, and seasonal temperature
|
||||
correlation. If device_id is None, forecasts aggregate campus load.
|
||||
"""
|
||||
now = datetime.now(timezone.utc)
|
||||
beijing_now = now + timedelta(hours=8)
|
||||
|
||||
# Fetch recent history for pattern calibration
|
||||
lookback = now - timedelta(days=14)
|
||||
conditions = [
|
||||
EnergyData.data_type == "power",
|
||||
EnergyData.timestamp >= lookback,
|
||||
]
|
||||
if device_id:
|
||||
conditions.append(EnergyData.device_id == device_id)
|
||||
else:
|
||||
# Aggregate all meters
|
||||
conditions.append(
|
||||
EnergyData.device_id.in_(
|
||||
select(Device.id).where(Device.device_type == "meter")
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(
|
||||
select(EnergyData.timestamp, EnergyData.value)
|
||||
.where(and_(*conditions))
|
||||
.order_by(EnergyData.timestamp)
|
||||
)
|
||||
historical = result.all()
|
||||
|
||||
# Build weekday/weekend hourly profiles from history
|
||||
weekday_profile: dict[int, list[float]] = {h: [] for h in range(24)}
|
||||
weekend_profile: dict[int, list[float]] = {h: [] for h in range(24)}
|
||||
for ts, val in historical:
|
||||
bj = ts + timedelta(hours=8) if ts.tzinfo else ts
|
||||
h = bj.hour
|
||||
if bj.weekday() >= 5:
|
||||
weekend_profile[h].append(val)
|
||||
else:
|
||||
weekday_profile[h].append(val)
|
||||
|
||||
# Default load profile if no history
|
||||
default_weekday = {
|
||||
0: 18, 1: 16, 2: 16, 3: 15, 4: 15, 5: 17, 6: 25, 7: 40,
|
||||
8: 55, 9: 60, 10: 62, 11: 58, 12: 45, 13: 58, 14: 62,
|
||||
15: 60, 16: 55, 17: 48, 18: 35, 19: 28, 20: 25, 21: 22, 22: 20, 23: 18,
|
||||
}
|
||||
default_weekend = {h: v * 0.5 for h, v in default_weekday.items()}
|
||||
|
||||
def _avg_or_default(profile, defaults, h):
|
||||
vals = profile.get(h, [])
|
||||
return float(np.mean(vals)) if vals else defaults[h]
|
||||
|
||||
forecasts = []
|
||||
for h_offset in range(horizon_hours):
|
||||
target_utc = now + timedelta(hours=h_offset)
|
||||
target_utc = target_utc.replace(minute=0, second=0, microsecond=0)
|
||||
bj = target_utc + timedelta(hours=8)
|
||||
hour = bj.hour
|
||||
is_weekend = bj.weekday() >= 5
|
||||
|
||||
if is_weekend:
|
||||
base_load = _avg_or_default(weekend_profile, default_weekend, hour)
|
||||
else:
|
||||
base_load = _avg_or_default(weekday_profile, default_weekday, hour)
|
||||
|
||||
# Temperature correction: HVAC adds load in extreme temps
|
||||
temp = outdoor_temperature(target_utc)
|
||||
if temp < 5:
|
||||
hvac_factor = 1.0 + 0.02 * (5 - temp)
|
||||
elif temp > 28:
|
||||
hvac_factor = 1.0 + 0.025 * (temp - 28)
|
||||
else:
|
||||
hvac_factor = 1.0
|
||||
hvac_factor = min(hvac_factor, 1.4)
|
||||
|
||||
predicted = base_load * hvac_factor
|
||||
|
||||
# Factory buildings have flatter profiles
|
||||
if building_type == "factory":
|
||||
predicted = predicted * 0.85 + base_load * 0.15
|
||||
|
||||
# Confidence interval
|
||||
uncertainty = 0.08 + 0.015 * h_offset
|
||||
uncertainty = min(uncertainty, 0.35)
|
||||
margin = predicted * uncertainty
|
||||
|
||||
forecasts.append({
|
||||
"timestamp": target_utc.isoformat(),
|
||||
"predicted_load_kw": round(predicted, 2),
|
||||
"confidence_lower": round(max(0, predicted - margin), 2),
|
||||
"confidence_upper": round(predicted + margin, 2),
|
||||
})
|
||||
|
||||
return forecasts
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Heat Pump COP Prediction
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def forecast_heatpump_cop(
|
||||
db: AsyncSession,
|
||||
device_id: int,
|
||||
horizon_hours: int = 24,
|
||||
) -> list[dict]:
|
||||
"""Predict heat pump COP based on outdoor temperature forecast.
|
||||
|
||||
COP model: COP = base_cop + 0.05 * (T_outdoor - 7), clamped [2.0, 5.5].
|
||||
Returns optimal operating windows ranked by expected COP.
|
||||
"""
|
||||
device = await db.get(Device, device_id)
|
||||
if not device:
|
||||
raise ValueError(f"Device {device_id} not found")
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
forecasts = []
|
||||
|
||||
for h_offset in range(horizon_hours):
|
||||
target_utc = now + timedelta(hours=h_offset)
|
||||
target_utc = target_utc.replace(minute=0, second=0, microsecond=0)
|
||||
bj = target_utc + timedelta(hours=8)
|
||||
|
||||
temp = outdoor_temperature(target_utc)
|
||||
mode = get_hvac_mode(bj.month)
|
||||
|
||||
# COP model (same as weather_model but deterministic for forecast)
|
||||
if mode in ("heating", "transition_spring", "transition_fall"):
|
||||
cop = 3.0 + 0.05 * (temp - 7)
|
||||
else: # cooling
|
||||
cop = 4.0 - 0.04 * (temp - 25)
|
||||
cop = max(2.0, min(5.5, cop))
|
||||
|
||||
# Estimated power at this COP
|
||||
rated = device.rated_power or 35.0
|
||||
# Load factor based on time and mode
|
||||
if mode == "heating":
|
||||
if 6 <= bj.hour < 9:
|
||||
load_factor = 0.85
|
||||
elif 9 <= bj.hour < 16:
|
||||
load_factor = 0.55
|
||||
elif 16 <= bj.hour < 22:
|
||||
load_factor = 0.75
|
||||
else:
|
||||
load_factor = 0.65
|
||||
elif mode == "cooling":
|
||||
if 11 <= bj.hour < 16:
|
||||
load_factor = 0.85
|
||||
elif 8 <= bj.hour < 11 or 16 <= bj.hour < 19:
|
||||
load_factor = 0.60
|
||||
else:
|
||||
load_factor = 0.25
|
||||
else:
|
||||
load_factor = 0.35
|
||||
|
||||
if bj.weekday() >= 5:
|
||||
load_factor *= 0.7
|
||||
|
||||
est_power = rated * load_factor
|
||||
electricity_price = _get_tou_price(bj.hour)
|
||||
operating_cost = est_power * electricity_price # yuan/h
|
||||
|
||||
forecasts.append({
|
||||
"timestamp": target_utc.isoformat(),
|
||||
"predicted_cop": round(cop, 2),
|
||||
"outdoor_temp": round(temp, 1),
|
||||
"estimated_power_kw": round(est_power, 2),
|
||||
"load_factor": round(load_factor, 2),
|
||||
"electricity_price": electricity_price,
|
||||
"operating_cost_yuan_h": round(operating_cost, 2),
|
||||
"mode": mode,
|
||||
})
|
||||
|
||||
return forecasts
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Self-Consumption Optimization
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def optimize_self_consumption(
|
||||
db: AsyncSession,
|
||||
horizon_hours: int = 24,
|
||||
) -> dict:
|
||||
"""Compare predicted PV generation vs predicted load to find optimization
|
||||
opportunities. Recommends heat pump pre-heating during PV surplus.
|
||||
|
||||
Returns:
|
||||
- hourly comparison (pv vs load)
|
||||
- surplus/deficit periods
|
||||
- recommended heat pump schedule
|
||||
- expected savings
|
||||
"""
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Get all PV inverter device ids
|
||||
pv_result = await db.execute(
|
||||
select(Device).where(Device.device_type == "pv_inverter", Device.is_active == True)
|
||||
)
|
||||
pv_devices = pv_result.scalars().all()
|
||||
|
||||
# Aggregate PV forecast
|
||||
pv_total = [0.0] * horizon_hours
|
||||
for dev in pv_devices:
|
||||
pv_forecast = await forecast_pv(db, dev.id, horizon_hours)
|
||||
for i, f in enumerate(pv_forecast):
|
||||
pv_total[i] += f["predicted_power_kw"]
|
||||
|
||||
# Aggregate load forecast
|
||||
load_forecast = await forecast_load(db, device_id=None, horizon_hours=horizon_hours)
|
||||
|
||||
# Build hourly comparison
|
||||
hourly = []
|
||||
surplus_periods = []
|
||||
deficit_periods = []
|
||||
total_surplus_kwh = 0.0
|
||||
total_deficit_kwh = 0.0
|
||||
|
||||
for i in range(horizon_hours):
|
||||
target_utc = now + timedelta(hours=i)
|
||||
target_utc = target_utc.replace(minute=0, second=0, microsecond=0)
|
||||
bj = target_utc + timedelta(hours=8)
|
||||
|
||||
pv_kw = pv_total[i]
|
||||
load_kw = load_forecast[i]["predicted_load_kw"]
|
||||
balance = pv_kw - load_kw
|
||||
price = _get_tou_price(bj.hour)
|
||||
|
||||
entry = {
|
||||
"timestamp": target_utc.isoformat(),
|
||||
"hour": bj.hour,
|
||||
"pv_generation_kw": round(pv_kw, 2),
|
||||
"load_kw": round(load_kw, 2),
|
||||
"balance_kw": round(balance, 2),
|
||||
"electricity_price": price,
|
||||
}
|
||||
hourly.append(entry)
|
||||
|
||||
if balance > 2: # >2kW surplus threshold
|
||||
surplus_periods.append({"hour": bj.hour, "surplus_kw": round(balance, 2)})
|
||||
total_surplus_kwh += balance
|
||||
elif balance < -2:
|
||||
deficit_periods.append({"hour": bj.hour, "deficit_kw": round(-balance, 2)})
|
||||
total_deficit_kwh += (-balance)
|
||||
|
||||
# Generate heat pump optimization schedule
|
||||
# Strategy: shift heat pump load to PV surplus periods
|
||||
hp_schedule = []
|
||||
savings_kwh = 0.0
|
||||
savings_yuan = 0.0
|
||||
|
||||
for period in surplus_periods:
|
||||
hour = period["hour"]
|
||||
surplus = period["surplus_kw"]
|
||||
price = _get_tou_price(hour)
|
||||
|
||||
# Use surplus to pre-heat/pre-cool
|
||||
usable_power = min(surplus, 35.0) # cap at single HP rated power
|
||||
hp_schedule.append({
|
||||
"hour": hour,
|
||||
"action": "boost",
|
||||
"power_kw": round(usable_power, 2),
|
||||
"reason": "利用光伏余电预加热/预冷",
|
||||
})
|
||||
savings_kwh += usable_power
|
||||
savings_yuan += usable_power * price
|
||||
|
||||
# Also recommend reducing HP during peak-price deficit periods
|
||||
for period in deficit_periods:
|
||||
hour = period["hour"]
|
||||
price = _get_tou_price(hour)
|
||||
if price >= TOU_PRICE["peak"]:
|
||||
hp_schedule.append({
|
||||
"hour": hour,
|
||||
"action": "reduce",
|
||||
"power_kw": 0,
|
||||
"reason": "高电价时段降低热泵负荷",
|
||||
})
|
||||
# Estimate savings from reduced operation during peak
|
||||
savings_yuan += 5.0 * price # assume 5kW reduction
|
||||
|
||||
self_consumption_rate = 0.0
|
||||
total_pv = sum(pv_total)
|
||||
total_load = sum(f["predicted_load_kw"] for f in load_forecast)
|
||||
if total_pv > 0:
|
||||
self_consumed = min(total_pv, total_load)
|
||||
self_consumption_rate = self_consumed / total_pv * 100
|
||||
|
||||
return {
|
||||
"hourly": hourly,
|
||||
"surplus_periods": surplus_periods,
|
||||
"deficit_periods": deficit_periods,
|
||||
"hp_schedule": hp_schedule,
|
||||
"summary": {
|
||||
"total_pv_kwh": round(total_pv, 2),
|
||||
"total_load_kwh": round(total_load, 2),
|
||||
"total_surplus_kwh": round(total_surplus_kwh, 2),
|
||||
"total_deficit_kwh": round(total_deficit_kwh, 2),
|
||||
"self_consumption_rate": round(self_consumption_rate, 1),
|
||||
"potential_savings_kwh": round(savings_kwh, 2),
|
||||
"potential_savings_yuan": round(savings_yuan, 2),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Prediction Accuracy
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def get_prediction_accuracy(
|
||||
db: AsyncSession,
|
||||
prediction_type: Optional[str] = None,
|
||||
days: int = 7,
|
||||
) -> dict:
|
||||
"""Calculate prediction accuracy metrics (MAE, RMSE, MAPE) from
|
||||
historical predictions that have actual values filled in."""
|
||||
cutoff = datetime.now(timezone.utc) - timedelta(days=days)
|
||||
|
||||
conditions = [
|
||||
PredictionResult.actual_value.isnot(None),
|
||||
PredictionResult.timestamp >= cutoff,
|
||||
]
|
||||
if prediction_type:
|
||||
conditions.append(
|
||||
PredictionResult.task_id.in_(
|
||||
select(PredictionTask.id).where(
|
||||
PredictionTask.prediction_type == prediction_type
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
result = await db.execute(
|
||||
select(PredictionResult.predicted_value, PredictionResult.actual_value)
|
||||
.where(and_(*conditions))
|
||||
)
|
||||
pairs = result.all()
|
||||
|
||||
if not pairs:
|
||||
# Return mock accuracy for demo (simulating a well-tuned model)
|
||||
return {
|
||||
"sample_count": 0,
|
||||
"mae": 2.5,
|
||||
"rmse": 3.8,
|
||||
"mape": 8.5,
|
||||
"note": "使用模拟精度指标(无历史预测数据)",
|
||||
}
|
||||
|
||||
predicted = np.array([p[0] for p in pairs])
|
||||
actual = np.array([p[1] for p in pairs])
|
||||
|
||||
errors = predicted - actual
|
||||
mae = float(np.mean(np.abs(errors)))
|
||||
rmse = float(np.sqrt(np.mean(errors ** 2)))
|
||||
|
||||
# MAPE: only where actual > 0 to avoid division by zero
|
||||
mask = actual > 0.1
|
||||
if mask.any():
|
||||
mape = float(np.mean(np.abs(errors[mask] / actual[mask])) * 100)
|
||||
else:
|
||||
mape = 0.0
|
||||
|
||||
return {
|
||||
"sample_count": len(pairs),
|
||||
"mae": round(mae, 2),
|
||||
"rmse": round(rmse, 2),
|
||||
"mape": round(mape, 1),
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Run Prediction (creates task + results)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def run_prediction(
|
||||
db: AsyncSession,
|
||||
device_id: Optional[int],
|
||||
prediction_type: str,
|
||||
horizon_hours: int = 24,
|
||||
parameters: Optional[dict] = None,
|
||||
) -> PredictionTask:
|
||||
"""Execute a prediction and store results in the database."""
|
||||
task = PredictionTask(
|
||||
device_id=device_id,
|
||||
prediction_type=prediction_type,
|
||||
horizon_hours=horizon_hours,
|
||||
status="running",
|
||||
parameters=parameters or {},
|
||||
)
|
||||
db.add(task)
|
||||
await db.flush()
|
||||
|
||||
try:
|
||||
if prediction_type == "pv":
|
||||
if not device_id:
|
||||
raise ValueError("device_id required for PV forecast")
|
||||
forecasts = await forecast_pv(db, device_id, horizon_hours)
|
||||
for f in forecasts:
|
||||
db.add(PredictionResult(
|
||||
task_id=task.id,
|
||||
timestamp=f["timestamp"],
|
||||
predicted_value=f["predicted_power_kw"],
|
||||
confidence_lower=f["confidence_lower"],
|
||||
confidence_upper=f["confidence_upper"],
|
||||
unit="kW",
|
||||
))
|
||||
|
||||
elif prediction_type == "load":
|
||||
building_type = (parameters or {}).get("building_type", "office")
|
||||
forecasts = await forecast_load(db, device_id, building_type, horizon_hours)
|
||||
for f in forecasts:
|
||||
db.add(PredictionResult(
|
||||
task_id=task.id,
|
||||
timestamp=f["timestamp"],
|
||||
predicted_value=f["predicted_load_kw"],
|
||||
confidence_lower=f["confidence_lower"],
|
||||
confidence_upper=f["confidence_upper"],
|
||||
unit="kW",
|
||||
))
|
||||
|
||||
elif prediction_type == "heatpump":
|
||||
if not device_id:
|
||||
raise ValueError("device_id required for heat pump forecast")
|
||||
forecasts = await forecast_heatpump_cop(db, device_id, horizon_hours)
|
||||
for f in forecasts:
|
||||
db.add(PredictionResult(
|
||||
task_id=task.id,
|
||||
timestamp=f["timestamp"],
|
||||
predicted_value=f["predicted_cop"],
|
||||
confidence_lower=max(2.0, f["predicted_cop"] - 0.3),
|
||||
confidence_upper=min(5.5, f["predicted_cop"] + 0.3),
|
||||
unit="",
|
||||
))
|
||||
|
||||
elif prediction_type == "optimization":
|
||||
opt = await optimize_self_consumption(db, horizon_hours)
|
||||
# Store as optimization schedule
|
||||
now = datetime.now(timezone.utc)
|
||||
schedule = OptimizationSchedule(
|
||||
date=now.replace(hour=0, minute=0, second=0, microsecond=0),
|
||||
schedule_data=opt,
|
||||
expected_savings_kwh=opt["summary"]["potential_savings_kwh"],
|
||||
expected_savings_yuan=opt["summary"]["potential_savings_yuan"],
|
||||
status="pending",
|
||||
)
|
||||
db.add(schedule)
|
||||
# Also store hourly balance as prediction results
|
||||
for entry in opt["hourly"]:
|
||||
db.add(PredictionResult(
|
||||
task_id=task.id,
|
||||
timestamp=entry["timestamp"],
|
||||
predicted_value=entry["balance_kw"],
|
||||
unit="kW",
|
||||
))
|
||||
else:
|
||||
raise ValueError(f"Unknown prediction type: {prediction_type}")
|
||||
|
||||
task.status = "completed"
|
||||
task.completed_at = datetime.now(timezone.utc)
|
||||
|
||||
except Exception as e:
|
||||
task.status = "failed"
|
||||
task.error_message = str(e)
|
||||
logger.error(f"Prediction task {task.id} failed: {e}", exc_info=True)
|
||||
|
||||
return task
|
||||
462
backend/app/services/carbon_asset.py
Normal file
462
backend/app/services/carbon_asset.py
Normal file
@@ -0,0 +1,462 @@
|
||||
"""Carbon Asset Management Service.
|
||||
|
||||
Provides carbon accounting, CCER/green certificate management,
|
||||
report generation, target tracking, and benchmark comparison.
|
||||
"""
|
||||
import logging
|
||||
from datetime import date, datetime, timezone
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import select, func, and_, case
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.models.carbon import (
|
||||
CarbonEmission, EmissionFactor, CarbonTarget, CarbonReduction,
|
||||
GreenCertificate, CarbonReport, CarbonBenchmark,
|
||||
)
|
||||
from app.models.energy import EnergyData
|
||||
|
||||
logger = logging.getLogger("carbon_asset")
|
||||
|
||||
# China national grid emission factor 2023 (tCO2/MWh)
|
||||
GRID_EMISSION_FACTOR = 0.5810
|
||||
# Average CO2 absorption per tree per year (tons)
|
||||
TREE_ABSORPTION = 0.02
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Carbon Accounting
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def calculate_scope2_emission(
|
||||
db: AsyncSession, start: date, end: date,
|
||||
) -> float:
|
||||
"""Calculate Scope 2 emission from grid electricity (tons CO2)."""
|
||||
result = await db.execute(
|
||||
select(func.sum(CarbonEmission.emission))
|
||||
.where(and_(
|
||||
CarbonEmission.scope == 2,
|
||||
func.date(CarbonEmission.date) >= start,
|
||||
func.date(CarbonEmission.date) <= end,
|
||||
))
|
||||
)
|
||||
val = result.scalar() or 0
|
||||
return round(val / 1000, 4) # kg -> tons
|
||||
|
||||
|
||||
async def calculate_scope1_emission(
|
||||
db: AsyncSession, start: date, end: date,
|
||||
) -> float:
|
||||
"""Calculate Scope 1 direct emissions (natural gas etc.) in tons CO2."""
|
||||
result = await db.execute(
|
||||
select(func.sum(CarbonEmission.emission))
|
||||
.where(and_(
|
||||
CarbonEmission.scope == 1,
|
||||
func.date(CarbonEmission.date) >= start,
|
||||
func.date(CarbonEmission.date) <= end,
|
||||
))
|
||||
)
|
||||
val = result.scalar() or 0
|
||||
return round(val / 1000, 4)
|
||||
|
||||
|
||||
async def calculate_total_reduction(
|
||||
db: AsyncSession, start: date, end: date,
|
||||
) -> float:
|
||||
"""Total carbon reduction in tons."""
|
||||
result = await db.execute(
|
||||
select(func.sum(CarbonEmission.reduction))
|
||||
.where(and_(
|
||||
func.date(CarbonEmission.date) >= start,
|
||||
func.date(CarbonEmission.date) <= end,
|
||||
))
|
||||
)
|
||||
val = result.scalar() or 0
|
||||
return round(val / 1000, 4)
|
||||
|
||||
|
||||
async def calculate_pv_reduction(
|
||||
db: AsyncSession, start: date, end: date,
|
||||
) -> float:
|
||||
"""Carbon reduction from PV self-consumption (tons).
|
||||
|
||||
Formula: reduction = pv_generation_mwh * GRID_EMISSION_FACTOR
|
||||
"""
|
||||
result = await db.execute(
|
||||
select(func.sum(CarbonEmission.reduction))
|
||||
.where(and_(
|
||||
CarbonEmission.category == "pv_generation",
|
||||
func.date(CarbonEmission.date) >= start,
|
||||
func.date(CarbonEmission.date) <= end,
|
||||
))
|
||||
)
|
||||
val = result.scalar() or 0
|
||||
return round(val / 1000, 4)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Reduction tracking
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def get_reduction_summary(
|
||||
db: AsyncSession, start: date, end: date,
|
||||
) -> list[dict]:
|
||||
"""Reduction summary grouped by source type."""
|
||||
result = await db.execute(
|
||||
select(
|
||||
CarbonReduction.source_type,
|
||||
func.sum(CarbonReduction.reduction_tons),
|
||||
func.sum(CarbonReduction.equivalent_trees),
|
||||
func.count(CarbonReduction.id),
|
||||
)
|
||||
.where(and_(
|
||||
CarbonReduction.date >= start,
|
||||
CarbonReduction.date <= end,
|
||||
))
|
||||
.group_by(CarbonReduction.source_type)
|
||||
)
|
||||
return [
|
||||
{
|
||||
"source_type": row[0],
|
||||
"reduction_tons": round(row[1] or 0, 4),
|
||||
"equivalent_trees": round(row[2] or 0, 1),
|
||||
"count": row[3],
|
||||
}
|
||||
for row in result.all()
|
||||
]
|
||||
|
||||
|
||||
async def trigger_reduction_calculation(
|
||||
db: AsyncSession, start: date, end: date,
|
||||
) -> dict:
|
||||
"""Compute reduction activities from energy data and persist them.
|
||||
|
||||
Calculates PV generation reduction and heat pump COP savings.
|
||||
"""
|
||||
# PV reduction from carbon_emissions records
|
||||
pv_tons = await calculate_pv_reduction(db, start, end)
|
||||
total_reduction = await calculate_total_reduction(db, start, end)
|
||||
heat_pump_tons = max(0, total_reduction - pv_tons)
|
||||
|
||||
records_created = 0
|
||||
for source, tons in [("pv_generation", pv_tons), ("heat_pump_cop", heat_pump_tons)]:
|
||||
if tons > 0:
|
||||
existing = await db.execute(
|
||||
select(CarbonReduction).where(and_(
|
||||
CarbonReduction.source_type == source,
|
||||
CarbonReduction.date == end,
|
||||
))
|
||||
)
|
||||
if existing.scalar_one_or_none() is None:
|
||||
db.add(CarbonReduction(
|
||||
source_type=source,
|
||||
date=end,
|
||||
reduction_tons=tons,
|
||||
equivalent_trees=round(tons / TREE_ABSORPTION, 1),
|
||||
methodology=f"Grid factor {GRID_EMISSION_FACTOR} tCO2/MWh",
|
||||
verified=False,
|
||||
))
|
||||
records_created += 1
|
||||
|
||||
return {
|
||||
"period": f"{start} ~ {end}",
|
||||
"pv_reduction_tons": pv_tons,
|
||||
"heat_pump_reduction_tons": heat_pump_tons,
|
||||
"total_reduction_tons": total_reduction,
|
||||
"records_created": records_created,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# CCER / Green Certificate Management
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def calculate_ccer_eligible(
|
||||
db: AsyncSession, start: date, end: date,
|
||||
) -> dict:
|
||||
"""Calculate eligible CCER reduction from PV generation."""
|
||||
pv_tons = await calculate_pv_reduction(db, start, end)
|
||||
return {
|
||||
"eligible_ccer_tons": pv_tons,
|
||||
"grid_emission_factor": GRID_EMISSION_FACTOR,
|
||||
"period": f"{start} ~ {end}",
|
||||
}
|
||||
|
||||
|
||||
async def get_certificate_portfolio_value(db: AsyncSession) -> dict:
|
||||
"""Total value of active green certificates."""
|
||||
result = await db.execute(
|
||||
select(
|
||||
GreenCertificate.status,
|
||||
func.count(GreenCertificate.id),
|
||||
func.sum(GreenCertificate.energy_mwh),
|
||||
func.sum(GreenCertificate.price_yuan),
|
||||
).group_by(GreenCertificate.status)
|
||||
)
|
||||
rows = result.all()
|
||||
total_value = 0.0
|
||||
total_mwh = 0.0
|
||||
by_status = {}
|
||||
for row in rows:
|
||||
status, cnt, mwh, value = row
|
||||
by_status[status] = {
|
||||
"count": cnt,
|
||||
"energy_mwh": round(mwh or 0, 2),
|
||||
"value_yuan": round(value or 0, 2),
|
||||
}
|
||||
total_value += value or 0
|
||||
total_mwh += mwh or 0
|
||||
|
||||
return {
|
||||
"total_certificates": sum(v["count"] for v in by_status.values()),
|
||||
"total_energy_mwh": round(total_mwh, 2),
|
||||
"total_value_yuan": round(total_value, 2),
|
||||
"by_status": by_status,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Carbon Report Generation
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def generate_carbon_report(
|
||||
db: AsyncSession,
|
||||
report_type: str,
|
||||
period_start: date,
|
||||
period_end: date,
|
||||
) -> CarbonReport:
|
||||
"""Generate a carbon footprint report for the given period."""
|
||||
scope1 = await calculate_scope1_emission(db, period_start, period_end)
|
||||
scope2 = await calculate_scope2_emission(db, period_start, period_end)
|
||||
reduction = await calculate_total_reduction(db, period_start, period_end)
|
||||
total = scope1 + scope2
|
||||
net = total - reduction
|
||||
|
||||
# Reduction breakdown
|
||||
reduction_summary = await get_reduction_summary(db, period_start, period_end)
|
||||
|
||||
# Monthly breakdown
|
||||
monthly = await _monthly_breakdown(db, period_start, period_end)
|
||||
|
||||
report_data = {
|
||||
"scope_breakdown": {"scope1": scope1, "scope2": scope2},
|
||||
"reduction_summary": reduction_summary,
|
||||
"monthly_breakdown": monthly,
|
||||
"grid_emission_factor": GRID_EMISSION_FACTOR,
|
||||
"net_emission_tons": round(net, 4),
|
||||
"green_rate": round((reduction / total * 100) if total > 0 else 0, 1),
|
||||
}
|
||||
|
||||
report = CarbonReport(
|
||||
report_type=report_type,
|
||||
period_start=period_start,
|
||||
period_end=period_end,
|
||||
scope1_tons=scope1,
|
||||
scope2_tons=scope2,
|
||||
total_tons=round(total, 4),
|
||||
reduction_tons=round(reduction, 4),
|
||||
net_tons=round(net, 4),
|
||||
report_data=report_data,
|
||||
)
|
||||
db.add(report)
|
||||
return report
|
||||
|
||||
|
||||
async def _monthly_breakdown(
|
||||
db: AsyncSession, start: date, end: date,
|
||||
) -> list[dict]:
|
||||
"""Monthly emission and reduction totals for the period."""
|
||||
from app.core.config import get_settings
|
||||
settings = get_settings()
|
||||
if settings.is_sqlite:
|
||||
month_expr = func.strftime('%Y-%m', CarbonEmission.date).label('month')
|
||||
else:
|
||||
month_expr = func.to_char(
|
||||
func.date_trunc('month', CarbonEmission.date), 'YYYY-MM'
|
||||
).label('month')
|
||||
|
||||
result = await db.execute(
|
||||
select(
|
||||
month_expr,
|
||||
func.sum(CarbonEmission.emission),
|
||||
func.sum(CarbonEmission.reduction),
|
||||
)
|
||||
.where(and_(
|
||||
func.date(CarbonEmission.date) >= start,
|
||||
func.date(CarbonEmission.date) <= end,
|
||||
))
|
||||
.group_by('month')
|
||||
.order_by('month')
|
||||
)
|
||||
return [
|
||||
{
|
||||
"month": row[0],
|
||||
"emission_kg": round(row[1] or 0, 2),
|
||||
"reduction_kg": round(row[2] or 0, 2),
|
||||
"emission_tons": round((row[1] or 0) / 1000, 4),
|
||||
"reduction_tons": round((row[2] or 0) / 1000, 4),
|
||||
}
|
||||
for row in result.all()
|
||||
]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Carbon Target Tracking
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def get_target_progress(db: AsyncSession, year: int) -> dict:
|
||||
"""Calculate progress against annual and monthly targets."""
|
||||
# Annual target
|
||||
annual_q = await db.execute(
|
||||
select(CarbonTarget).where(and_(
|
||||
CarbonTarget.year == year,
|
||||
CarbonTarget.month.is_(None),
|
||||
))
|
||||
)
|
||||
annual = annual_q.scalar_one_or_none()
|
||||
|
||||
# All monthly targets for this year
|
||||
monthly_q = await db.execute(
|
||||
select(CarbonTarget).where(and_(
|
||||
CarbonTarget.year == year,
|
||||
CarbonTarget.month.isnot(None),
|
||||
)).order_by(CarbonTarget.month)
|
||||
)
|
||||
monthlies = monthly_q.scalars().all()
|
||||
|
||||
# Current year actuals
|
||||
year_start = date(year, 1, 1)
|
||||
year_end = date(year, 12, 31)
|
||||
scope1 = await calculate_scope1_emission(db, year_start, year_end)
|
||||
scope2 = await calculate_scope2_emission(db, year_start, year_end)
|
||||
reduction = await calculate_total_reduction(db, year_start, year_end)
|
||||
total_emission = scope1 + scope2
|
||||
net = total_emission - reduction
|
||||
|
||||
annual_data = None
|
||||
if annual:
|
||||
progress = (net / annual.target_emission_tons * 100) if annual.target_emission_tons > 0 else 0
|
||||
status = "on_track" if progress <= 80 else ("warning" if progress <= 100 else "exceeded")
|
||||
annual_data = {
|
||||
"id": annual.id,
|
||||
"target_tons": annual.target_emission_tons,
|
||||
"actual_tons": round(net, 4),
|
||||
"progress_pct": round(progress, 1),
|
||||
"status": status,
|
||||
}
|
||||
|
||||
monthly_data = []
|
||||
for m in monthlies:
|
||||
m_start = date(year, m.month, 1)
|
||||
if m.month == 12:
|
||||
m_end = date(year, 12, 31)
|
||||
else:
|
||||
m_end = date(year, m.month + 1, 1)
|
||||
s1 = await calculate_scope1_emission(db, m_start, m_end)
|
||||
s2 = await calculate_scope2_emission(db, m_start, m_end)
|
||||
red = await calculate_total_reduction(db, m_start, m_end)
|
||||
m_net = s1 + s2 - red
|
||||
pct = (m_net / m.target_emission_tons * 100) if m.target_emission_tons > 0 else 0
|
||||
monthly_data.append({
|
||||
"id": m.id,
|
||||
"month": m.month,
|
||||
"target_tons": m.target_emission_tons,
|
||||
"actual_tons": round(m_net, 4),
|
||||
"progress_pct": round(pct, 1),
|
||||
"status": "on_track" if pct <= 80 else ("warning" if pct <= 100 else "exceeded"),
|
||||
})
|
||||
|
||||
return {
|
||||
"year": year,
|
||||
"total_emission_tons": round(total_emission, 4),
|
||||
"total_reduction_tons": round(reduction, 4),
|
||||
"net_emission_tons": round(net, 4),
|
||||
"annual_target": annual_data,
|
||||
"monthly_targets": monthly_data,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Benchmark Comparison
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def compare_with_benchmarks(
|
||||
db: AsyncSession, year: int,
|
||||
) -> dict:
|
||||
"""Compare actual emissions with industry benchmarks."""
|
||||
benchmarks_q = await db.execute(
|
||||
select(CarbonBenchmark).where(CarbonBenchmark.year == year)
|
||||
)
|
||||
benchmarks = benchmarks_q.scalars().all()
|
||||
|
||||
year_start = date(year, 1, 1)
|
||||
year_end = date(year, 12, 31)
|
||||
scope1 = await calculate_scope1_emission(db, year_start, year_end)
|
||||
scope2 = await calculate_scope2_emission(db, year_start, year_end)
|
||||
total = scope1 + scope2
|
||||
reduction = await calculate_total_reduction(db, year_start, year_end)
|
||||
|
||||
comparisons = []
|
||||
for b in benchmarks:
|
||||
comparisons.append({
|
||||
"industry": b.industry,
|
||||
"metric": b.metric_name,
|
||||
"benchmark_value": b.benchmark_value,
|
||||
"unit": b.unit,
|
||||
"source": b.source,
|
||||
})
|
||||
|
||||
return {
|
||||
"year": year,
|
||||
"actual_emission_tons": round(total, 4),
|
||||
"actual_reduction_tons": round(reduction, 4),
|
||||
"net_tons": round(total - reduction, 4),
|
||||
"benchmarks": comparisons,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Dashboard aggregation
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def get_carbon_dashboard(db: AsyncSession) -> dict:
|
||||
"""Comprehensive carbon dashboard data."""
|
||||
now = datetime.now(timezone.utc)
|
||||
year = now.year
|
||||
year_start = date(year, 1, 1)
|
||||
today = now.date()
|
||||
|
||||
scope1 = await calculate_scope1_emission(db, year_start, today)
|
||||
scope2 = await calculate_scope2_emission(db, year_start, today)
|
||||
total_emission = scope1 + scope2
|
||||
reduction = await calculate_total_reduction(db, year_start, today)
|
||||
net = total_emission - reduction
|
||||
green_rate = round((reduction / total_emission * 100) if total_emission > 0 else 0, 1)
|
||||
|
||||
# Target progress
|
||||
target_progress = await get_target_progress(db, year)
|
||||
|
||||
# Monthly trend
|
||||
monthly = await _monthly_breakdown(db, year_start, today)
|
||||
|
||||
# Reduction summary
|
||||
reduction_summary = await get_reduction_summary(db, year_start, today)
|
||||
|
||||
# Certificate value
|
||||
cert_value = await get_certificate_portfolio_value(db)
|
||||
|
||||
return {
|
||||
"kpi": {
|
||||
"total_emission_tons": round(total_emission, 4),
|
||||
"total_reduction_tons": round(reduction, 4),
|
||||
"net_emission_tons": round(net, 4),
|
||||
"green_rate": green_rate,
|
||||
"scope1_tons": scope1,
|
||||
"scope2_tons": scope2,
|
||||
"equivalent_trees": round(reduction / TREE_ABSORPTION, 0) if reduction > 0 else 0,
|
||||
},
|
||||
"target_progress": target_progress.get("annual_target"),
|
||||
"monthly_trend": monthly,
|
||||
"reduction_by_source": reduction_summary,
|
||||
"certificate_portfolio": cert_value,
|
||||
}
|
||||
419
backend/app/services/energy_strategy.py
Normal file
419
backend/app/services/energy_strategy.py
Normal file
@@ -0,0 +1,419 @@
|
||||
"""能源策略优化服务 - 峰谷电价策略、谷电蓄热、负荷转移、光伏自消纳"""
|
||||
|
||||
from datetime import datetime, date, timedelta, timezone
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, func, and_
|
||||
from app.models.energy_strategy import (
|
||||
TouPricing, TouPricingPeriod, EnergyStrategy, StrategyExecution, MonthlyCostReport,
|
||||
)
|
||||
from app.models.energy import EnergyData, EnergyDailySummary
|
||||
|
||||
|
||||
# Beijing TZ offset
|
||||
BJT = timezone(timedelta(hours=8))
|
||||
|
||||
# Default Beijing industrial TOU pricing
|
||||
DEFAULT_PERIODS = [
|
||||
{"period_type": "sharp_peak", "start_time": "10:00", "end_time": "15:00", "price": 1.3761},
|
||||
{"period_type": "sharp_peak", "start_time": "18:00", "end_time": "21:00", "price": 1.3761},
|
||||
{"period_type": "peak", "start_time": "08:00", "end_time": "10:00", "price": 1.1883},
|
||||
{"period_type": "peak", "start_time": "15:00", "end_time": "18:00", "price": 1.1883},
|
||||
{"period_type": "peak", "start_time": "21:00", "end_time": "23:00", "price": 1.1883},
|
||||
{"period_type": "flat", "start_time": "07:00", "end_time": "08:00", "price": 0.7467},
|
||||
{"period_type": "valley", "start_time": "23:00", "end_time": "07:00", "price": 0.3048},
|
||||
]
|
||||
|
||||
PERIOD_LABELS = {
|
||||
"sharp_peak": "尖峰",
|
||||
"peak": "高峰",
|
||||
"flat": "平段",
|
||||
"valley": "低谷",
|
||||
}
|
||||
|
||||
|
||||
def parse_month_range(month_range: str | None) -> list[int] | None:
|
||||
"""Parse month range string like '1-3,11-12' into list of month ints."""
|
||||
if not month_range:
|
||||
return None
|
||||
months = []
|
||||
for part in month_range.split(","):
|
||||
part = part.strip()
|
||||
if "-" in part:
|
||||
start, end = part.split("-")
|
||||
months.extend(range(int(start), int(end) + 1))
|
||||
else:
|
||||
months.append(int(part))
|
||||
return months
|
||||
|
||||
|
||||
def get_period_for_hour(periods: list[TouPricingPeriod], hour: int, month: int | None = None) -> TouPricingPeriod | None:
|
||||
"""Determine which TOU period an hour falls into."""
|
||||
hour_str = f"{hour:02d}:00"
|
||||
for p in periods:
|
||||
if month is not None and p.month_range:
|
||||
applicable = parse_month_range(p.month_range)
|
||||
if applicable and month not in applicable:
|
||||
continue
|
||||
start = p.start_time
|
||||
end = p.end_time
|
||||
if start <= end:
|
||||
if start <= hour_str < end:
|
||||
return p
|
||||
else: # crosses midnight
|
||||
if hour_str >= start or hour_str < end:
|
||||
return p
|
||||
return periods[0] if periods else None
|
||||
|
||||
|
||||
async def get_active_tou_pricing(db: AsyncSession, target_date: date | None = None) -> TouPricing | None:
|
||||
"""Get active TOU pricing plan."""
|
||||
q = select(TouPricing).where(TouPricing.is_active == True)
|
||||
if target_date:
|
||||
q = q.where(
|
||||
and_(
|
||||
(TouPricing.effective_date == None) | (TouPricing.effective_date <= target_date),
|
||||
(TouPricing.end_date == None) | (TouPricing.end_date >= target_date),
|
||||
)
|
||||
)
|
||||
q = q.order_by(TouPricing.created_at.desc()).limit(1)
|
||||
result = await db.execute(q)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
async def get_tou_periods(db: AsyncSession, pricing_id: int) -> list[TouPricingPeriod]:
|
||||
"""Get pricing periods for a TOU plan."""
|
||||
result = await db.execute(
|
||||
select(TouPricingPeriod).where(TouPricingPeriod.pricing_id == pricing_id)
|
||||
)
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
async def calculate_hourly_cost(
|
||||
db: AsyncSession, target_date: date, periods: list[TouPricingPeriod],
|
||||
) -> dict:
|
||||
"""Calculate hourly electricity cost for a specific date."""
|
||||
day_start = datetime(target_date.year, target_date.month, target_date.day, tzinfo=BJT)
|
||||
hourly_data = []
|
||||
total_cost = 0.0
|
||||
total_kwh = 0.0
|
||||
|
||||
for hour in range(24):
|
||||
hour_start = day_start + timedelta(hours=hour)
|
||||
hour_end = hour_start + timedelta(hours=1)
|
||||
|
||||
q = select(func.sum(EnergyData.value)).where(
|
||||
and_(
|
||||
EnergyData.timestamp >= hour_start,
|
||||
EnergyData.timestamp < hour_end,
|
||||
EnergyData.data_type == "energy",
|
||||
)
|
||||
)
|
||||
result = await db.execute(q)
|
||||
hour_kwh = result.scalar() or 0.0
|
||||
|
||||
period = get_period_for_hour(periods, hour, target_date.month)
|
||||
price = period.price_yuan_per_kwh if period else 0.7467
|
||||
period_type = period.period_type if period else "flat"
|
||||
cost = hour_kwh * price
|
||||
|
||||
total_cost += cost
|
||||
total_kwh += hour_kwh
|
||||
|
||||
hourly_data.append({
|
||||
"hour": hour,
|
||||
"consumption_kwh": round(hour_kwh, 2),
|
||||
"price": price,
|
||||
"cost": round(cost, 2),
|
||||
"period_type": period_type,
|
||||
"period_label": PERIOD_LABELS.get(period_type, period_type),
|
||||
})
|
||||
|
||||
return {
|
||||
"date": str(target_date),
|
||||
"hourly": hourly_data,
|
||||
"total_cost": round(total_cost, 2),
|
||||
"total_kwh": round(total_kwh, 2),
|
||||
}
|
||||
|
||||
|
||||
async def calculate_monthly_cost_breakdown(
|
||||
db: AsyncSession, year: int, month: int,
|
||||
) -> dict:
|
||||
"""Calculate monthly cost breakdown by TOU period type."""
|
||||
pricing = await get_active_tou_pricing(db, date(year, month, 1))
|
||||
if not pricing:
|
||||
return _empty_cost_breakdown(year, month)
|
||||
|
||||
periods = await get_tou_periods(db, pricing.id)
|
||||
if not periods:
|
||||
return _empty_cost_breakdown(year, month)
|
||||
|
||||
# Build hour -> period mapping
|
||||
period_stats = {pt: {"kwh": 0.0, "cost": 0.0, "hours": 0}
|
||||
for pt in ["sharp_peak", "peak", "flat", "valley"]}
|
||||
|
||||
for hour in range(24):
|
||||
period = get_period_for_hour(periods, hour, month)
|
||||
if not period:
|
||||
continue
|
||||
pt = period.period_type
|
||||
if pt not in period_stats:
|
||||
period_stats[pt] = {"kwh": 0.0, "cost": 0.0, "hours": 0}
|
||||
period_stats[pt]["hours"] += 1
|
||||
|
||||
# Get daily summaries for the month
|
||||
month_start = date(year, month, 1)
|
||||
if month == 12:
|
||||
month_end = date(year + 1, 1, 1)
|
||||
else:
|
||||
month_end = date(year, month + 1, 1)
|
||||
|
||||
q = select(
|
||||
func.sum(EnergyDailySummary.total_consumption),
|
||||
).where(
|
||||
and_(
|
||||
EnergyDailySummary.date >= datetime(month_start.year, month_start.month, month_start.day),
|
||||
EnergyDailySummary.date < datetime(month_end.year, month_end.month, month_end.day),
|
||||
EnergyDailySummary.energy_type == "electricity",
|
||||
)
|
||||
)
|
||||
result = await db.execute(q)
|
||||
total_monthly_kwh = result.scalar() or 0.0
|
||||
|
||||
# Distribute by hour proportion
|
||||
total_hours = sum(ps["hours"] for ps in period_stats.values())
|
||||
for pt, ps in period_stats.items():
|
||||
proportion = ps["hours"] / total_hours if total_hours > 0 else 0
|
||||
ps["kwh"] = total_monthly_kwh * proportion
|
||||
period_obj = next((p for p in periods if p.period_type == pt), None)
|
||||
price = period_obj.price_yuan_per_kwh if period_obj else 0
|
||||
ps["cost"] = ps["kwh"] * price
|
||||
|
||||
total_cost = sum(ps["cost"] for ps in period_stats.values())
|
||||
|
||||
breakdown = []
|
||||
for pt, ps in period_stats.items():
|
||||
if ps["hours"] == 0:
|
||||
continue
|
||||
breakdown.append({
|
||||
"period_type": pt,
|
||||
"period_label": PERIOD_LABELS.get(pt, pt),
|
||||
"consumption_kwh": round(ps["kwh"], 2),
|
||||
"cost_yuan": round(ps["cost"], 2),
|
||||
"hours_per_day": ps["hours"],
|
||||
"proportion": round(ps["kwh"] / total_monthly_kwh * 100, 1) if total_monthly_kwh > 0 else 0,
|
||||
})
|
||||
|
||||
return {
|
||||
"year_month": f"{year}-{month:02d}",
|
||||
"total_consumption_kwh": round(total_monthly_kwh, 2),
|
||||
"total_cost_yuan": round(total_cost, 2),
|
||||
"breakdown": breakdown,
|
||||
"pricing_name": pricing.name,
|
||||
}
|
||||
|
||||
|
||||
def _empty_cost_breakdown(year: int, month: int) -> dict:
|
||||
return {
|
||||
"year_month": f"{year}-{month:02d}",
|
||||
"total_consumption_kwh": 0,
|
||||
"total_cost_yuan": 0,
|
||||
"breakdown": [],
|
||||
"pricing_name": "未配置",
|
||||
}
|
||||
|
||||
|
||||
def calculate_heat_storage_savings(
|
||||
daily_kwh: float, periods: list[TouPricingPeriod], shift_ratio: float = 0.3,
|
||||
) -> dict:
|
||||
"""Calculate savings from valley-electricity heat storage strategy (谷电蓄热).
|
||||
|
||||
Assumes shift_ratio of heat pump load can be moved from peak/sharp_peak to valley hours.
|
||||
"""
|
||||
peak_prices = []
|
||||
valley_price = 0.3048
|
||||
|
||||
for p in periods:
|
||||
if p.period_type in ("sharp_peak", "peak"):
|
||||
peak_prices.append(p.price_yuan_per_kwh)
|
||||
elif p.period_type == "valley":
|
||||
valley_price = p.price_yuan_per_kwh
|
||||
|
||||
avg_peak_price = sum(peak_prices) / len(peak_prices) if peak_prices else 1.2
|
||||
shifted_kwh = daily_kwh * shift_ratio
|
||||
savings_per_day = shifted_kwh * (avg_peak_price - valley_price)
|
||||
|
||||
return {
|
||||
"shifted_kwh": round(shifted_kwh, 2),
|
||||
"avg_peak_price": round(avg_peak_price, 4),
|
||||
"valley_price": round(valley_price, 4),
|
||||
"savings_per_day": round(savings_per_day, 2),
|
||||
"savings_per_month": round(savings_per_day * 30, 2),
|
||||
"savings_per_year": round(savings_per_day * 365, 2),
|
||||
"strategy": "谷电蓄热",
|
||||
"description": f"将{shift_ratio*100:.0f}%的热泵负荷从尖峰/高峰时段转移至低谷时段(23:00-7:00)预热水箱",
|
||||
}
|
||||
|
||||
|
||||
def calculate_pv_priority_savings(
|
||||
pv_daily_kwh: float, grid_price: float = 0.7467, feed_in_price: float = 0.3548,
|
||||
) -> dict:
|
||||
"""Calculate savings from PV self-consumption priority strategy."""
|
||||
self_consume_value = pv_daily_kwh * grid_price
|
||||
feed_in_value = pv_daily_kwh * feed_in_price
|
||||
savings_per_day = self_consume_value - feed_in_value
|
||||
|
||||
return {
|
||||
"pv_daily_kwh": round(pv_daily_kwh, 2),
|
||||
"self_consume_value": round(self_consume_value, 2),
|
||||
"feed_in_value": round(feed_in_value, 2),
|
||||
"savings_per_day": round(savings_per_day, 2),
|
||||
"savings_per_month": round(savings_per_day * 30, 2),
|
||||
"strategy": "光伏自消纳优先",
|
||||
"description": "优先使用光伏发电供给园区负荷,减少向电网购电",
|
||||
}
|
||||
|
||||
|
||||
def simulate_strategy_impact(
|
||||
daily_consumption_kwh: float,
|
||||
pv_daily_kwh: float,
|
||||
periods: list[TouPricingPeriod],
|
||||
strategies: list[str],
|
||||
) -> dict:
|
||||
"""Simulate impact of enabling various strategies."""
|
||||
baseline_cost = 0.0
|
||||
optimized_cost = 0.0
|
||||
|
||||
# Calculate baseline cost (proportional by hours)
|
||||
period_hours = {}
|
||||
for p in periods:
|
||||
start_h = int(p.start_time.split(":")[0])
|
||||
end_h = int(p.end_time.split(":")[0])
|
||||
if start_h < end_h:
|
||||
hours = end_h - start_h
|
||||
else:
|
||||
hours = (24 - start_h) + end_h
|
||||
period_hours[p.period_type] = period_hours.get(p.period_type, 0) + hours
|
||||
|
||||
total_hours = sum(period_hours.values()) or 24
|
||||
for p in periods:
|
||||
start_h = int(p.start_time.split(":")[0])
|
||||
end_h = int(p.end_time.split(":")[0])
|
||||
hours = end_h - start_h if start_h < end_h else (24 - start_h) + end_h
|
||||
proportion = hours / total_hours
|
||||
kwh = daily_consumption_kwh * proportion
|
||||
baseline_cost += kwh * p.price_yuan_per_kwh
|
||||
|
||||
optimized_cost = baseline_cost
|
||||
savings_details = []
|
||||
|
||||
if "heat_storage" in strategies:
|
||||
hs = calculate_heat_storage_savings(daily_consumption_kwh * 0.4, periods, 0.3)
|
||||
optimized_cost -= hs["savings_per_day"]
|
||||
savings_details.append(hs)
|
||||
|
||||
if "pv_priority" in strategies:
|
||||
pv = calculate_pv_priority_savings(pv_daily_kwh)
|
||||
optimized_cost -= pv["savings_per_day"]
|
||||
savings_details.append(pv)
|
||||
|
||||
if "load_shift" in strategies:
|
||||
# Shift 15% of peak load to flat/valley
|
||||
valley_p = next((p for p in periods if p.period_type == "valley"), None)
|
||||
peak_p = next((p for p in periods if p.period_type == "sharp_peak"), None)
|
||||
if valley_p and peak_p:
|
||||
shift_kwh = daily_consumption_kwh * 0.15
|
||||
saved = shift_kwh * (peak_p.price_yuan_per_kwh - valley_p.price_yuan_per_kwh)
|
||||
optimized_cost -= saved
|
||||
savings_details.append({
|
||||
"strategy": "负荷转移",
|
||||
"savings_per_day": round(saved, 2),
|
||||
"savings_per_month": round(saved * 30, 2),
|
||||
"description": "将15%的尖峰时段负荷转移至低谷时段",
|
||||
})
|
||||
|
||||
return {
|
||||
"baseline_cost_per_day": round(baseline_cost, 2),
|
||||
"optimized_cost_per_day": round(max(0, optimized_cost), 2),
|
||||
"total_savings_per_day": round(baseline_cost - max(0, optimized_cost), 2),
|
||||
"total_savings_per_month": round((baseline_cost - max(0, optimized_cost)) * 30, 2),
|
||||
"total_savings_per_year": round((baseline_cost - max(0, optimized_cost)) * 365, 2),
|
||||
"savings_percentage": round((1 - max(0, optimized_cost) / baseline_cost) * 100, 1) if baseline_cost > 0 else 0,
|
||||
"details": savings_details,
|
||||
}
|
||||
|
||||
|
||||
async def get_recommendations(db: AsyncSession) -> list[dict]:
|
||||
"""Generate current strategy recommendations based on data."""
|
||||
recommendations = []
|
||||
|
||||
# Always recommend valley heat storage for heating season
|
||||
now = datetime.now(BJT)
|
||||
month = now.month
|
||||
if month in (11, 12, 1, 2, 3):
|
||||
recommendations.append({
|
||||
"type": "heat_storage",
|
||||
"title": "谷电蓄热策略",
|
||||
"description": "当前为采暖季,建议在低谷时段(23:00-7:00)预热水箱,减少尖峰时段热泵运行",
|
||||
"priority": "high",
|
||||
"estimated_savings": "每月可节约约3000-5000元",
|
||||
})
|
||||
|
||||
# PV priority during daytime
|
||||
recommendations.append({
|
||||
"type": "pv_priority",
|
||||
"title": "光伏自消纳优先",
|
||||
"description": "优先使用屋顶光伏发电满足园区负荷,减少购电成本",
|
||||
"priority": "medium",
|
||||
"estimated_savings": "每月可节约约1500-2500元",
|
||||
})
|
||||
|
||||
# Load shifting
|
||||
hour = now.hour
|
||||
if 10 <= hour <= 15 or 18 <= hour <= 21:
|
||||
recommendations.append({
|
||||
"type": "load_shift",
|
||||
"title": "当前处于尖峰时段",
|
||||
"description": "建议减少非必要大功率设备运行,可延迟至低谷时段执行",
|
||||
"priority": "high",
|
||||
"estimated_savings": "尖峰电价1.3761元/kWh,低谷电价0.3048元/kWh",
|
||||
})
|
||||
|
||||
return recommendations
|
||||
|
||||
|
||||
async def get_savings_report(db: AsyncSession, year: int) -> dict:
|
||||
"""Generate yearly savings report."""
|
||||
reports = []
|
||||
total_savings = 0.0
|
||||
total_baseline = 0.0
|
||||
total_optimized = 0.0
|
||||
|
||||
result = await db.execute(
|
||||
select(MonthlyCostReport).where(
|
||||
MonthlyCostReport.year_month.like(f"{year}-%")
|
||||
).order_by(MonthlyCostReport.year_month)
|
||||
)
|
||||
monthly_reports = result.scalars().all()
|
||||
|
||||
for r in monthly_reports:
|
||||
reports.append({
|
||||
"year_month": r.year_month,
|
||||
"total_consumption_kwh": r.total_consumption_kwh,
|
||||
"total_cost_yuan": r.total_cost_yuan,
|
||||
"baseline_cost": r.baseline_cost,
|
||||
"optimized_cost": r.optimized_cost,
|
||||
"savings_yuan": r.savings_yuan,
|
||||
})
|
||||
total_savings += r.savings_yuan
|
||||
total_baseline += r.baseline_cost
|
||||
total_optimized += r.optimized_cost
|
||||
|
||||
return {
|
||||
"year": year,
|
||||
"monthly_reports": reports,
|
||||
"total_savings_yuan": round(total_savings, 2),
|
||||
"total_baseline_cost": round(total_baseline, 2),
|
||||
"total_optimized_cost": round(total_optimized, 2),
|
||||
"savings_percentage": round(total_savings / total_baseline * 100, 1) if total_baseline > 0 else 0,
|
||||
}
|
||||
229
backend/app/services/weather_service.py
Normal file
229
backend/app/services/weather_service.py
Normal file
@@ -0,0 +1,229 @@
|
||||
"""气象数据融合服务 - 天气API集成、模拟数据生成、缓存"""
|
||||
|
||||
import logging
|
||||
import math
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy import select, and_, desc
|
||||
from app.models.weather import WeatherData, WeatherConfig
|
||||
from app.services.weather_model import (
|
||||
outdoor_temperature, outdoor_humidity, solar_altitude,
|
||||
get_cloud_factor, BEIJING_TZ_OFFSET, MONTHLY_AVG_TEMP,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("weather_service")
|
||||
|
||||
BJT = timezone(timedelta(hours=8))
|
||||
|
||||
|
||||
def generate_mock_weather(dt: datetime) -> dict:
|
||||
"""Generate mock weather data based on weather_model patterns."""
|
||||
temp = outdoor_temperature(dt)
|
||||
humidity = outdoor_humidity(dt)
|
||||
|
||||
# Solar radiation based on altitude
|
||||
alt = solar_altitude(dt)
|
||||
if alt > 0:
|
||||
cloud = get_cloud_factor(dt)
|
||||
# Clear-sky irradiance ~ 1000 * sin(altitude) * cloud_factor
|
||||
solar_radiation = 1000 * math.sin(math.radians(alt)) * cloud * 0.85
|
||||
solar_radiation = max(0, solar_radiation)
|
||||
cloud_cover = (1 - cloud) * 100
|
||||
else:
|
||||
solar_radiation = 0
|
||||
cloud_cover = 0
|
||||
|
||||
# Wind speed model - seasonal + random
|
||||
beijing_dt = dt + timedelta(hours=BEIJING_TZ_OFFSET) if dt.tzinfo else dt
|
||||
month = beijing_dt.month
|
||||
# Spring is windier in Beijing
|
||||
base_wind = {1: 2.5, 2: 3.0, 3: 4.0, 4: 4.5, 5: 3.5, 6: 2.5,
|
||||
7: 2.0, 8: 2.0, 9: 2.5, 10: 3.0, 11: 3.0, 12: 2.5}.get(month, 2.5)
|
||||
# Diurnal: windier during afternoon
|
||||
hour = beijing_dt.hour
|
||||
diurnal_wind = 0.5 * math.sin(math.pi * (hour - 6) / 12) if 6 <= hour <= 18 else -0.3
|
||||
wind_speed = max(0.1, base_wind + diurnal_wind)
|
||||
|
||||
return {
|
||||
"temperature": round(temp, 1),
|
||||
"humidity": round(humidity, 1),
|
||||
"solar_radiation": round(solar_radiation, 1),
|
||||
"cloud_cover": round(max(0, min(100, cloud_cover)), 1),
|
||||
"wind_speed": round(wind_speed, 1),
|
||||
}
|
||||
|
||||
|
||||
async def get_current_weather(db: AsyncSession) -> dict:
|
||||
"""Get current weather - from cache or generate mock."""
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Try cache first (within last 15 minutes)
|
||||
cache_cutoff = now - timedelta(minutes=15)
|
||||
q = select(WeatherData).where(
|
||||
and_(
|
||||
WeatherData.data_type == "observation",
|
||||
WeatherData.fetched_at >= cache_cutoff,
|
||||
)
|
||||
).order_by(desc(WeatherData.fetched_at)).limit(1)
|
||||
result = await db.execute(q)
|
||||
cached = result.scalar_one_or_none()
|
||||
|
||||
if cached:
|
||||
return {
|
||||
"timestamp": str(cached.timestamp),
|
||||
"temperature": cached.temperature,
|
||||
"humidity": cached.humidity,
|
||||
"solar_radiation": cached.solar_radiation,
|
||||
"cloud_cover": cached.cloud_cover,
|
||||
"wind_speed": cached.wind_speed,
|
||||
"source": cached.source,
|
||||
}
|
||||
|
||||
# Generate mock data
|
||||
mock = generate_mock_weather(now)
|
||||
weather = WeatherData(
|
||||
timestamp=now,
|
||||
data_type="observation",
|
||||
temperature=mock["temperature"],
|
||||
humidity=mock["humidity"],
|
||||
solar_radiation=mock["solar_radiation"],
|
||||
cloud_cover=mock["cloud_cover"],
|
||||
wind_speed=mock["wind_speed"],
|
||||
source="mock",
|
||||
)
|
||||
db.add(weather)
|
||||
|
||||
return {
|
||||
"timestamp": str(now),
|
||||
**mock,
|
||||
"source": "mock",
|
||||
}
|
||||
|
||||
|
||||
async def get_forecast(db: AsyncSession, hours: int = 72) -> list[dict]:
|
||||
"""Get weather forecast for the next N hours."""
|
||||
now = datetime.now(timezone.utc)
|
||||
forecasts = []
|
||||
|
||||
for h in range(0, hours, 3): # 3-hour intervals
|
||||
dt = now + timedelta(hours=h)
|
||||
mock = generate_mock_weather(dt)
|
||||
forecasts.append({
|
||||
"timestamp": str(dt),
|
||||
"hours_ahead": h,
|
||||
**mock,
|
||||
})
|
||||
|
||||
return forecasts
|
||||
|
||||
|
||||
async def get_weather_history(
|
||||
db: AsyncSession, start_date: datetime, end_date: datetime,
|
||||
) -> list[dict]:
|
||||
"""Get historical weather data."""
|
||||
q = select(WeatherData).where(
|
||||
and_(
|
||||
WeatherData.timestamp >= start_date,
|
||||
WeatherData.timestamp <= end_date,
|
||||
)
|
||||
).order_by(WeatherData.timestamp)
|
||||
result = await db.execute(q)
|
||||
records = result.scalars().all()
|
||||
|
||||
if records:
|
||||
return [
|
||||
{
|
||||
"timestamp": str(r.timestamp),
|
||||
"temperature": r.temperature,
|
||||
"humidity": r.humidity,
|
||||
"solar_radiation": r.solar_radiation,
|
||||
"cloud_cover": r.cloud_cover,
|
||||
"wind_speed": r.wind_speed,
|
||||
"source": r.source,
|
||||
}
|
||||
for r in records
|
||||
]
|
||||
|
||||
# Generate mock historical data if none cached
|
||||
history = []
|
||||
dt = start_date
|
||||
while dt <= end_date:
|
||||
mock = generate_mock_weather(dt)
|
||||
history.append({"timestamp": str(dt), **mock, "source": "mock"})
|
||||
dt += timedelta(hours=1)
|
||||
return history
|
||||
|
||||
|
||||
async def get_weather_impact(db: AsyncSession, days: int = 30) -> dict:
|
||||
"""Analyze weather impact on energy consumption and PV generation."""
|
||||
now = datetime.now(timezone.utc)
|
||||
start = now - timedelta(days=days)
|
||||
|
||||
# Generate sample correlation data
|
||||
temp_ranges = [
|
||||
{"range": "< 0C", "min": -10, "max": 0, "avg_consumption": 850, "pv_generation": 180},
|
||||
{"range": "0-10C", "min": 0, "max": 10, "avg_consumption": 720, "pv_generation": 220},
|
||||
{"range": "10-20C", "min": 10, "max": 20, "avg_consumption": 550, "pv_generation": 310},
|
||||
{"range": "20-30C", "min": 20, "max": 30, "avg_consumption": 680, "pv_generation": 380},
|
||||
{"range": "> 30C", "min": 30, "max": 40, "avg_consumption": 780, "pv_generation": 350},
|
||||
]
|
||||
|
||||
# Solar radiation vs PV output correlation
|
||||
solar_correlation = []
|
||||
for rad in range(0, 1001, 100):
|
||||
# PV output roughly proportional to radiation with some losses
|
||||
pv_output = rad * 0.33 * 0.85 # 330kWp * 85% efficiency
|
||||
solar_correlation.append({
|
||||
"solar_radiation": rad,
|
||||
"pv_output_kw": round(pv_output, 1),
|
||||
})
|
||||
|
||||
return {
|
||||
"analysis_period_days": days,
|
||||
"temperature_impact": temp_ranges,
|
||||
"solar_correlation": solar_correlation,
|
||||
"key_findings": [
|
||||
"采暖季(11-3月)温度每降低1C,热泵能耗增加约3%",
|
||||
"太阳辐射与光伏产出呈强正相关(R2=0.92)",
|
||||
"多云天气光伏产出下降30-50%",
|
||||
"春季大风天气对能耗影响较小,但对光伏面板散热有利",
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
async def get_weather_config(db: AsyncSession) -> dict:
|
||||
"""Get weather API configuration."""
|
||||
result = await db.execute(select(WeatherConfig).limit(1))
|
||||
config = result.scalar_one_or_none()
|
||||
if not config:
|
||||
return {
|
||||
"api_provider": "mock",
|
||||
"location_lat": 39.9,
|
||||
"location_lon": 116.4,
|
||||
"fetch_interval_minutes": 30,
|
||||
"is_enabled": True,
|
||||
}
|
||||
return {
|
||||
"id": config.id,
|
||||
"api_provider": config.api_provider,
|
||||
"location_lat": config.location_lat,
|
||||
"location_lon": config.location_lon,
|
||||
"fetch_interval_minutes": config.fetch_interval_minutes,
|
||||
"is_enabled": config.is_enabled,
|
||||
}
|
||||
|
||||
|
||||
async def update_weather_config(db: AsyncSession, data: dict) -> dict:
|
||||
"""Update weather API configuration."""
|
||||
result = await db.execute(select(WeatherConfig).limit(1))
|
||||
config = result.scalar_one_or_none()
|
||||
if not config:
|
||||
config = WeatherConfig()
|
||||
db.add(config)
|
||||
|
||||
for key in ("api_provider", "api_key", "location_lat", "location_lon",
|
||||
"fetch_interval_minutes", "is_enabled"):
|
||||
if key in data:
|
||||
setattr(config, key, data[key])
|
||||
|
||||
return {"message": "气象配置更新成功"}
|
||||
Reference in New Issue
Block a user